diff --git a/.buildkite/bootstrap.yml b/.buildkite/bootstrap.yml index 1fef8b6cc0..b0b84616b3 100644 --- a/.buildkite/bootstrap.yml +++ b/.buildkite/bootstrap.yml @@ -10,9 +10,10 @@ steps: blocked_state: "running" - label: ":pipeline:" - command: "buildkite-agent pipeline upload .buildkite/ci.yml" agents: - queue: "build-linux" + queue: "build-darwin" + command: + - ".buildkite/scripts/prepare-build.sh" - if: "build.branch == 'main' && !build.pull_request.repository.fork" label: ":github:" diff --git a/.buildkite/ci.yml b/.buildkite/ci.yml index e586d3ec53..b7aa6f29b2 100644 --- a/.buildkite/ci.yml +++ b/.buildkite/ci.yml @@ -25,16 +25,8 @@ steps: queue: "build-darwin" os: "darwin" arch: "aarch64" - artifact_paths: - - "build/bun-deps/**/*" - env: - CPU_TARGET: "native" - CCACHE_DIR: "$$HOME/.cache/ccache" - SCCACHE_DIR: "$$HOME/.cache/sccache" - ZIG_LOCAL_CACHE_DIR: "$$HOME/.cache/zig-cache" - BUN_DEPS_CACHE_DIR: "$$HOME/.cache/bun-deps" command: - - "./scripts/all-dependencies.sh" + - "./.buildkite/scripts/build-deps.sh" - key: "darwin-aarch64-build-zig" label: ":darwin: aarch64 - build-zig" @@ -42,15 +34,8 @@ steps: queue: "build-darwin" os: "darwin" arch: "aarch64" - artifact_paths: - - "build/bun-zig.o" - env: - CPU_TARGET: "native" - CCACHE_DIR: "$$HOME/.cache/ccache" - SCCACHE_DIR: "$$HOME/.cache/sccache" - ZIG_LOCAL_CACHE_DIR: "$$HOME/.cache/zig-cache" command: - - "./scripts/build-bun-zig.sh darwin aarch64" + - "./.buildkite/scripts/build-zig.sh darwin aarch64" - key: "darwin-aarch64-build-cpp" label: ":darwin: aarch64 - build-cpp" @@ -58,37 +43,8 @@ steps: queue: "build-darwin" os: "darwin" arch: "aarch64" - artifact_paths: - - "build/bun-cpp-objects.a" - env: - CPU_TARGET: "native" - CCACHE_DIR: "$$HOME/.cache/ccache" - SCCACHE_DIR: "$$HOME/.cache/sccache" - ZIG_LOCAL_CACHE_DIR: "$$HOME/.cache/zig-cache" command: - - "./scripts/build-bun-cpp.sh" - - - key: "darwin-aarch64-build-bun-nolto" - label: ":darwin: aarch64 - build-bun (no-lto)" - if: "build.branch != 'main'" - depends_on: - - "darwin-aarch64-build-deps" - - "darwin-aarch64-build-zig" - - "darwin-aarch64-build-cpp" - agents: - queue: "build-darwin" - os: "darwin" - arch: "aarch64" - artifact_paths: - - "bun-darwin-aarch64-nolto.zip" - - "bun-darwin-aarch64-nolto-profile.zip" - env: - CPU_TARGET: "native" - CCACHE_DIR: "$$HOME/.cache/ccache" - SCCACHE_DIR: "$$HOME/.cache/sccache" - ZIG_LOCAL_CACHE_DIR: "$$HOME/.cache/zig-cache" - command: - - "./scripts/buildkite-link-bun.sh --tag darwin-aarch64 --fast" + - "./.buildkite/scripts/build-cpp.sh" - key: "darwin-aarch64-build-bun" label: ":darwin: aarch64 - build-bun" @@ -100,16 +56,8 @@ steps: queue: "build-darwin" os: "darwin" arch: "aarch64" - artifact_paths: - - "bun-darwin-aarch64.zip" - - "bun-darwin-aarch64-profile.zip" - env: - CPU_TARGET: "native" - CCACHE_DIR: "$$HOME/.cache/ccache" - SCCACHE_DIR: "$$HOME/.cache/sccache" - ZIG_LOCAL_CACHE_DIR: "$$HOME/.cache/zig-cache" command: - - "./scripts/buildkite-link-bun.sh --tag darwin-aarch64" + - "./.buildkite/scripts/build-bun.sh" - key: "darwin-aarch64-test-macos-14" label: ":darwin: 14 aarch64 - test-bun" @@ -129,77 +77,20 @@ steps: limit: 3 - signal: SIGTERM limit: 3 - depends_on: - - "darwin-aarch64-build-bun-nolto" + - "darwin-aarch64-build-bun" agents: queue: "test-darwin" os: "darwin" arch: "aarch64" release: "14" command: - - "./scripts/runner.node.mjs --step darwin-aarch64-build-bun-nolto" + - "./scripts/runner.node.mjs --step darwin-aarch64-build-bun" - key: "darwin-aarch64-test-macos-13" label: ":darwin: 13 aarch64 - test-bun" if: "build.branch != 'main'" parallelism: 3 - soft_fail: - - exit_status: 2 - retry: - automatic: - - exit_status: 1 - limit: 1 - - exit_status: -1 - limit: 3 - - exit_status: 255 - limit: 3 - - signal_reason: agent_stop - limit: 3 - - signal: SIGTERM - limit: 3 - depends_on: - - "darwin-aarch64-build-bun-nolto" - agents: - queue: "test-darwin" - os: "darwin" - arch: "aarch64" - release: "13" - command: - - "./scripts/runner.node.mjs --step darwin-aarch64-build-bun-nolto" - - - key: "darwin-aarch64-test-macos-14-smoke" - label: ":darwin: 14 aarch64 - test-bun (smoke)" - if: "build.branch != 'main'" - parallelism: 1 - soft_fail: - - exit_status: 2 - retry: - automatic: - - exit_status: 1 - limit: 1 - - exit_status: -1 - limit: 3 - - exit_status: 255 - limit: 3 - - signal_reason: agent_stop - limit: 3 - - signal: SIGTERM - limit: 3 - depends_on: - - "darwin-aarch64-build-bun" - agents: - queue: "test-darwin" - os: "darwin" - arch: "aarch64" - release: "14" - command: - - "./scripts/runner.node.mjs --step darwin-aarch64-build-bun --smoke 0.05" - - - key: "darwin-aarch64-test-macos-13-smoke" - label: ":darwin: 13 aarch64 - test-bun (smoke)" - if: "build.branch != 'main'" - parallelism: 1 soft_fail: - exit_status: 2 retry: @@ -222,7 +113,7 @@ steps: arch: "aarch64" release: "13" command: - - "./scripts/runner.node.mjs --step darwin-aarch64-build-bun --smoke 0.05" + - "./scripts/runner.node.mjs --step darwin-aarch64-build-bun" # macOS x64 - key: "darwin-x64" @@ -234,16 +125,8 @@ steps: queue: "build-darwin" os: "darwin" arch: "x64" - artifact_paths: - - "build/bun-deps/**/*" - env: - CPU_TARGET: "haswell" - CCACHE_DIR: "$$HOME/.cache/ccache" - SCCACHE_DIR: "$$HOME/.cache/sccache" - ZIG_LOCAL_CACHE_DIR: "$$HOME/.cache/zig-cache" - BUN_DEPS_CACHE_DIR: "$$HOME/.cache/bun-deps" command: - - "./scripts/all-dependencies.sh" + - "./.buildkite/scripts/build-deps.sh" - key: "darwin-x64-build-zig" label: ":darwin: x64 - build-zig" @@ -251,15 +134,8 @@ steps: queue: "build-darwin" os: "darwin" arch: "aarch64" - artifact_paths: - - "build/bun-zig.o" - env: - CPU_TARGET: "haswell" - CCACHE_DIR: "$$HOME/.cache/ccache" - SCCACHE_DIR: "$$HOME/.cache/sccache" - ZIG_LOCAL_CACHE_DIR: "$$HOME/.cache/zig-cache" command: - - "./scripts/build-bun-zig.sh darwin x64" + - "./.buildkite/scripts/build-zig.sh darwin x64" - key: "darwin-x64-build-cpp" label: ":darwin: x64 - build-cpp" @@ -267,37 +143,8 @@ steps: queue: "build-darwin" os: "darwin" arch: "x64" - artifact_paths: - - "build/bun-cpp-objects.a" - env: - CPU_TARGET: "haswell" - CCACHE_DIR: "$$HOME/.cache/ccache" - SCCACHE_DIR: "$$HOME/.cache/sccache" - ZIG_LOCAL_CACHE_DIR: "$$HOME/.cache/zig-cache" command: - - "./scripts/build-bun-cpp.sh" - - - key: "darwin-x64-build-bun-nolto" - label: ":darwin: x64 - build-bun (no-lto)" - if: "build.branch != 'main'" - depends_on: - - "darwin-x64-build-deps" - - "darwin-x64-build-zig" - - "darwin-x64-build-cpp" - agents: - queue: "build-darwin" - os: "darwin" - arch: "x64" - artifact_paths: - - "bun-darwin-x64-nolto.zip" - - "bun-darwin-x64-nolto-profile.zip" - env: - CPU_TARGET: "haswell" - CCACHE_DIR: "$$HOME/.cache/ccache" - SCCACHE_DIR: "$$HOME/.cache/sccache" - ZIG_LOCAL_CACHE_DIR: "$$HOME/.cache/zig-cache" - command: - - "./scripts/buildkite-link-bun.sh --tag darwin-x64 --fast" + - "./.buildkite/scripts/build-cpp.sh" - key: "darwin-x64-build-bun" label: ":darwin: x64 - build-bun" @@ -309,77 +156,13 @@ steps: queue: "build-darwin" os: "darwin" arch: "x64" - artifact_paths: - - "bun-darwin-x64.zip" - - "bun-darwin-x64-profile.zip" - env: - CPU_TARGET: "haswell" - CCACHE_DIR: "$$HOME/.cache/ccache" - SCCACHE_DIR: "$$HOME/.cache/sccache" - ZIG_LOCAL_CACHE_DIR: "$$HOME/.cache/zig-cache" command: - - "./scripts/buildkite-link-bun.sh --tag darwin-x64" + - "./.buildkite/scripts/build-bun.sh" - key: "darwin-x64-test-macos-14" label: ":darwin: 14 x64 - test-bun" if: "build.branch != 'main'" - parallelism: 3 - soft_fail: - - exit_status: 2 - retry: - automatic: - - exit_status: 1 - limit: 1 - - exit_status: -1 - limit: 3 - - exit_status: 255 - limit: 3 - - signal_reason: agent_stop - limit: 3 - - signal: SIGTERM - limit: 3 - depends_on: - - "darwin-x64-build-bun-nolto" - agents: - queue: "test-darwin" - os: "darwin" - arch: "x64" - release: "14" - command: - - "./scripts/runner.node.mjs --step darwin-x64-build-bun-nolto" - - - key: "darwin-x64-test-macos-13" - label: ":darwin: 13 x64 - test-bun" - if: "build.branch != 'main'" - parallelism: 3 - soft_fail: - - exit_status: 2 - retry: - automatic: - - exit_status: 1 - limit: 1 - - exit_status: -1 - limit: 3 - - exit_status: 255 - limit: 3 - - signal_reason: agent_stop - limit: 3 - - signal: SIGTERM - limit: 3 - depends_on: - - "darwin-x64-build-bun-nolto" - agents: - queue: "test-darwin" - os: "darwin" - arch: "x64" - release: "13" - command: - - "./scripts/runner.node.mjs --step darwin-x64-build-bun-nolto" - - - key: "darwin-x64-test-macos-14-smoke" - label: ":darwin: 14 x64 - test-bun (smoke)" - if: "build.branch != 'main'" - parallelism: 1 + parallelism: 2 soft_fail: - exit_status: 2 retry: @@ -402,12 +185,12 @@ steps: arch: "x64" release: "14" command: - - "./scripts/runner.node.mjs --step darwin-x64-build-bun --smoke 0.05" + - "./scripts/runner.node.mjs --step darwin-x64-build-bun" - - key: "darwin-x64-test-macos-13-smoke" - label: ":darwin: 13 x64 - test-bun (smoke)" + - key: "darwin-x64-test-macos-13" + label: ":darwin: 13 x64 - test-bun" if: "build.branch != 'main'" - parallelism: 1 + parallelism: 2 soft_fail: - exit_status: 2 retry: @@ -423,14 +206,14 @@ steps: - signal: SIGTERM limit: 3 depends_on: - - "darwin-x64-build-bun-nolto" + - "darwin-x64-build-bun" agents: queue: "test-darwin" os: "darwin" arch: "x64" release: "13" command: - - "./scripts/runner.node.mjs --step darwin-x64-build-bun --smoke 0.05" + - "./scripts/runner.node.mjs --step darwin-x64-build-bun" # Linux aarch64 - key: "linux-aarch64" @@ -442,16 +225,8 @@ steps: queue: "build-linux" os: "linux" arch: "aarch64" - artifact_paths: - - "build/bun-deps/**/*" - env: - CPU_TARGET: "native" - CCACHE_DIR: "$$HOME/.cache/ccache" - SCCACHE_DIR: "$$HOME/.cache/sccache" - ZIG_LOCAL_CACHE_DIR: "$$HOME/.cache/zig-cache" - BUN_DEPS_CACHE_DIR: "$$HOME/.cache/bun-deps" command: - - "./scripts/all-dependencies.sh" + - "./.buildkite/scripts/build-deps.sh" - key: "linux-aarch64-build-zig" label: ":linux: aarch64 - build-zig" @@ -459,15 +234,8 @@ steps: queue: "build-darwin" os: "darwin" arch: "aarch64" - artifact_paths: - - "build/bun-zig.o" - env: - CPU_TARGET: "native" - CCACHE_DIR: "$$HOME/.cache/ccache" - SCCACHE_DIR: "$$HOME/.cache/sccache" - ZIG_LOCAL_CACHE_DIR: "$$HOME/.cache/zig-cache" command: - - "./scripts/build-bun-zig.sh linux aarch64" + - "./.buildkite/scripts/build-zig.sh linux aarch64" - key: "linux-aarch64-build-cpp" label: ":linux: aarch64 - build-cpp" @@ -475,37 +243,8 @@ steps: queue: "build-linux" os: "linux" arch: "aarch64" - artifact_paths: - - "build/bun-cpp-objects.a" - env: - CPU_TARGET: "native" - CCACHE_DIR: "$$HOME/.cache/ccache" - SCCACHE_DIR: "$$HOME/.cache/sccache" - ZIG_LOCAL_CACHE_DIR: "$$HOME/.cache/zig-cache" command: - - "./scripts/build-bun-cpp.sh" - - - key: "linux-aarch64-build-bun-nolto" - label: ":linux: aarch64 - build-bun (no-lto)" - if: "build.branch != 'main'" - depends_on: - - "linux-aarch64-build-deps" - - "linux-aarch64-build-zig" - - "linux-aarch64-build-cpp" - agents: - queue: "build-linux" - os: "linux" - arch: "aarch64" - artifact_paths: - - "bun-linux-aarch64-nolto.zip" - - "bun-linux-aarch64-nolto-profile.zip" - env: - CPU_TARGET: "native" - CCACHE_DIR: "$$HOME/.cache/ccache" - SCCACHE_DIR: "$$HOME/.cache/sccache" - ZIG_LOCAL_CACHE_DIR: "$$HOME/.cache/zig-cache" - command: - - "./scripts/buildkite-link-bun.sh --tag linux-aarch64 --fast" + - "./.buildkite/scripts/build-cpp.sh" - key: "linux-aarch64-build-bun" label: ":linux: aarch64 - build-bun" @@ -517,16 +256,8 @@ steps: queue: "build-linux" os: "linux" arch: "aarch64" - artifact_paths: - - "bun-linux-aarch64.zip" - - "bun-linux-aarch64-profile.zip" - env: - CPU_TARGET: "native" - CCACHE_DIR: "$$HOME/.cache/ccache" - SCCACHE_DIR: "$$HOME/.cache/sccache" - ZIG_LOCAL_CACHE_DIR: "$$HOME/.cache/zig-cache" command: - - "./scripts/buildkite-link-bun.sh --tag linux-aarch64" + - "./.buildkite/scripts/build-bun.sh" - key: "linux-aarch64-test-debian-12" label: ":debian: 12 aarch64 - test-bun" @@ -547,7 +278,7 @@ steps: - signal: SIGTERM limit: 3 depends_on: - - "linux-aarch64-build-bun-nolto" + - "linux-aarch64-build-bun" agents: robobun: "true" os: "linux" @@ -555,7 +286,7 @@ steps: distro: "debian" release: "12" command: - - "./scripts/runner.node.mjs --step linux-aarch64-build-bun-nolto" + - "./scripts/runner.node.mjs --step linux-aarch64-build-bun" - key: "linux-aarch64-test-ubuntu-2204" label: ":ubuntu: 22.04 aarch64 - test-bun" @@ -576,7 +307,7 @@ steps: - signal: SIGTERM limit: 3 depends_on: - - "linux-aarch64-build-bun-nolto" + - "linux-aarch64-build-bun" agents: robobun: "true" os: "linux" @@ -584,99 +315,12 @@ steps: distro: "ubuntu" release: "22.04" command: - - "./scripts/runner.node.mjs --step linux-aarch64-build-bun-nolto" + - "./scripts/runner.node.mjs --step linux-aarch64-build-bun" - key: "linux-aarch64-test-ubuntu-2004" label: ":ubuntu: 20.04 aarch64 - test-bun" if: "build.branch != 'main'" parallelism: 5 - soft_fail: - - exit_status: 2 - retry: - automatic: - - exit_status: 1 - limit: 1 - - exit_status: -1 - limit: 3 - - exit_status: 255 - limit: 3 - - signal_reason: agent_stop - limit: 3 - - signal: SIGTERM - limit: 3 - depends_on: - - "linux-aarch64-build-bun-nolto" - agents: - robobun: "true" - os: "linux" - arch: "aarch64" - distro: "ubuntu" - release: "20.04" - command: - - "./scripts/runner.node.mjs --step linux-aarch64-build-bun-nolto" - - - key: "linux-aarch64-test-debian-12-smoke" - label: ":debian: 12 aarch64 - test-bun-smoke" - if: "build.branch != 'main'" - parallelism: 1 - soft_fail: - - exit_status: 2 - retry: - automatic: - - exit_status: 1 - limit: 1 - - exit_status: -1 - limit: 3 - - exit_status: 255 - limit: 3 - - signal_reason: agent_stop - limit: 3 - - signal: SIGTERM - limit: 3 - depends_on: - - "linux-aarch64-build-bun" - agents: - robobun: "true" - os: "linux" - arch: "aarch64" - distro: "debian" - release: "12" - command: - - "./scripts/runner.node.mjs --step linux-aarch64-build-bun --smoke 0.05" - - - key: "linux-aarch64-test-ubuntu-2204-smoke" - label: ":ubuntu: 22.04 aarch64 - test-bun (smoke)" - if: "build.branch != 'main'" - parallelism: 1 - soft_fail: - - exit_status: 2 - retry: - automatic: - - exit_status: 1 - limit: 1 - - exit_status: -1 - limit: 3 - - exit_status: 255 - limit: 3 - - signal_reason: agent_stop - limit: 3 - - signal: SIGTERM - limit: 3 - depends_on: - - "linux-aarch64-build-bun" - agents: - robobun: "true" - os: "linux" - arch: "aarch64" - distro: "ubuntu" - release: "22.04" - command: - - "./scripts/runner.node.mjs --step linux-aarch64-build-bun --smoke 0.05" - - - key: "linux-aarch64-test-ubuntu-2004-smoke" - label: ":ubuntu: 20.04 aarch64 - test-bun (smoke)" - if: "build.branch != 'main'" - parallelism: 1 soft_fail: - exit_status: 2 retry: @@ -700,7 +344,7 @@ steps: distro: "ubuntu" release: "20.04" command: - - "./scripts/runner.node.mjs --step linux-aarch64-build-bun --smoke 0.05" + - "./scripts/runner.node.mjs --step linux-aarch64-build-bun" # Linux x64 - key: "linux-x64" @@ -712,16 +356,8 @@ steps: queue: "build-linux" os: "linux" arch: "x64" - artifact_paths: - - "build/bun-deps/**/*" - env: - CPU_TARGET: "haswell" - CCACHE_DIR: "$$HOME/.cache/ccache" - SCCACHE_DIR: "$$HOME/.cache/sccache" - ZIG_LOCAL_CACHE_DIR: "$$HOME/.cache/zig-cache" - BUN_DEPS_CACHE_DIR: "$$HOME/.cache/bun-deps" command: - - "./scripts/all-dependencies.sh" + - "./.buildkite/scripts/build-deps.sh" - key: "linux-x64-build-zig" label: ":linux: x64 - build-zig" @@ -729,15 +365,8 @@ steps: queue: "build-darwin" os: "darwin" arch: "aarch64" - artifact_paths: - - "build/bun-zig.o" - env: - CPU_TARGET: "haswell" - CCACHE_DIR: "$$HOME/.cache/ccache" - SCCACHE_DIR: "$$HOME/.cache/sccache" - ZIG_LOCAL_CACHE_DIR: "$$HOME/.cache/zig-cache" command: - - "./scripts/build-bun-zig.sh linux x64" + - "./.buildkite/scripts/build-zig.sh linux x64" - key: "linux-x64-build-cpp" label: ":linux: x64 - build-cpp" @@ -745,37 +374,8 @@ steps: queue: "build-linux" os: "linux" arch: "x64" - artifact_paths: - - "build/bun-cpp-objects.a" - env: - CPU_TARGET: "haswell" - CCACHE_DIR: "$$HOME/.cache/ccache" - SCCACHE_DIR: "$$HOME/.cache/sccache" - ZIG_LOCAL_CACHE_DIR: "$$HOME/.cache/zig-cache" command: - - "./scripts/build-bun-cpp.sh" - - - key: "linux-x64-build-bun-nolto" - label: ":linux: x64 - build-bun (no-lto)" - if: "build.branch != 'main'" - depends_on: - - "linux-x64-build-deps" - - "linux-x64-build-zig" - - "linux-x64-build-cpp" - agents: - queue: "build-linux" - os: "linux" - arch: "x64" - artifact_paths: - - "bun-linux-x64-nolto.zip" - - "bun-linux-x64-nolto-profile.zip" - env: - CPU_TARGET: "haswell" - CCACHE_DIR: "$$HOME/.cache/ccache" - SCCACHE_DIR: "$$HOME/.cache/sccache" - ZIG_LOCAL_CACHE_DIR: "$$HOME/.cache/zig-cache" - command: - - "./scripts/buildkite-link-bun.sh --tag linux-x64 --fast" + - "./.buildkite/scripts/build-cpp.sh" - key: "linux-x64-build-bun" label: ":linux: x64 - build-bun" @@ -787,16 +387,8 @@ steps: queue: "build-linux" os: "linux" arch: "x64" - artifact_paths: - - "bun-linux-x64.zip" - - "bun-linux-x64-profile.zip" - env: - CPU_TARGET: "haswell" - CCACHE_DIR: "$$HOME/.cache/ccache" - SCCACHE_DIR: "$$HOME/.cache/sccache" - ZIG_LOCAL_CACHE_DIR: "$$HOME/.cache/zig-cache" command: - - "./scripts/buildkite-link-bun.sh --tag linux-x64" + - "./.buildkite/scripts/build-bun.sh" - key: "linux-x64-test-debian-12" label: ":debian: 12 x64 - test-bun" @@ -817,7 +409,7 @@ steps: - signal: SIGTERM limit: 3 depends_on: - - "linux-x64-build-bun-nolto" + - "linux-x64-build-bun" agents: robobun: "true" os: "linux" @@ -825,7 +417,7 @@ steps: distro: "debian" release: "12" command: - - "./scripts/runner.node.mjs --step linux-x64-build-bun-nolto" + - "./scripts/runner.node.mjs --step linux-x64-build-bun" - key: "linux-x64-test-ubuntu-2204" label: ":ubuntu: 22.04 x64 - test-bun" @@ -846,7 +438,7 @@ steps: - signal: SIGTERM limit: 3 depends_on: - - "linux-x64-build-bun-nolto" + - "linux-x64-build-bun" agents: robobun: "true" os: "linux" @@ -854,99 +446,12 @@ steps: distro: "ubuntu" release: "22.04" command: - - "./scripts/runner.node.mjs --step linux-x64-build-bun-nolto" + - "./scripts/runner.node.mjs --step linux-x64-build-bun" - key: "linux-x64-test-ubuntu-2004" label: ":ubuntu: 20.04 x64 - test-bun" if: "build.branch != 'main'" parallelism: 5 - soft_fail: - - exit_status: 2 - retry: - automatic: - - exit_status: 1 - limit: 1 - - exit_status: -1 - limit: 3 - - exit_status: 255 - limit: 3 - - signal_reason: agent_stop - limit: 3 - - signal: SIGTERM - limit: 3 - depends_on: - - "linux-x64-build-bun-nolto" - agents: - robobun: "true" - os: "linux" - arch: "x64" - distro: "ubuntu" - release: "20.04" - command: - - "./scripts/runner.node.mjs --step linux-x64-build-bun-nolto" - - - key: "linux-x64-test-debian-12-smoke" - label: ":debian: 12 x64 - test-bun (smoke)" - if: "build.branch != 'main'" - parallelism: 1 - soft_fail: - - exit_status: 2 - retry: - automatic: - - exit_status: 1 - limit: 1 - - exit_status: -1 - limit: 3 - - exit_status: 255 - limit: 3 - - signal_reason: agent_stop - limit: 3 - - signal: SIGTERM - limit: 3 - depends_on: - - "linux-x64-build-bun" - agents: - robobun: "true" - os: "linux" - arch: "x64" - distro: "debian" - release: "12" - command: - - "./scripts/runner.node.mjs --step linux-x64-build-bun --smoke 0.05" - - - key: "linux-x64-test-ubuntu-2204-smoke" - label: ":ubuntu: 22.04 x64 - test-bun (smoke)" - if: "build.branch != 'main'" - parallelism: 1 - soft_fail: - - exit_status: 2 - retry: - automatic: - - exit_status: 1 - limit: 1 - - exit_status: -1 - limit: 3 - - exit_status: 255 - limit: 3 - - signal_reason: agent_stop - limit: 3 - - signal: SIGTERM - limit: 3 - depends_on: - - "linux-x64-build-bun" - agents: - robobun: "true" - os: "linux" - arch: "x64" - distro: "ubuntu" - release: "22.04" - command: - - "./scripts/runner.node.mjs --step linux-x64-build-bun --smoke 0.05" - - - key: "linux-x64-test-ubuntu-2004-smoke" - label: ":ubuntu: 20.04 x64 - test-bun (smoke)" - if: "build.branch != 'main'" - parallelism: 1 soft_fail: - exit_status: 2 retry: @@ -970,7 +475,7 @@ steps: distro: "ubuntu" release: "20.04" command: - - "./scripts/runner.node.mjs --step linux-x64-build-bun --smoke 0.05" + - "./scripts/runner.node.mjs --step linux-x64-build-bun" # Linux x64-baseline - key: "linux-x64-baseline" @@ -982,16 +487,8 @@ steps: queue: "build-linux" os: "linux" arch: "x64" - artifact_paths: - - "build/bun-deps/**/*" - env: - CPU_TARGET: "nehalem" - CCACHE_DIR: "$$HOME/.cache/ccache" - SCCACHE_DIR: "$$HOME/.cache/sccache" - ZIG_LOCAL_CACHE_DIR: "$$HOME/.cache/zig-cache" - BUN_DEPS_CACHE_DIR: "$$HOME/.cache/bun-deps" command: - - "./scripts/all-dependencies.sh" + - "./.buildkite/scripts/build-deps.sh" - key: "linux-x64-baseline-build-zig" label: ":linux: x64-baseline - build-zig" @@ -999,15 +496,8 @@ steps: queue: "build-darwin" os: "darwin" arch: "aarch64" - artifact_paths: - - "build/bun-zig.o" - env: - CPU_TARGET: "nehalem" - CCACHE_DIR: "$$HOME/.cache/ccache" - SCCACHE_DIR: "$$HOME/.cache/sccache" - ZIG_LOCAL_CACHE_DIR: "$$HOME/.cache/zig-cache" command: - - "./scripts/build-bun-zig.sh linux x64" + - "./.buildkite/scripts/build-zig.sh linux x64" - key: "linux-x64-baseline-build-cpp" label: ":linux: x64-baseline - build-cpp" @@ -1015,37 +505,8 @@ steps: queue: "build-linux" os: "linux" arch: "x64" - artifact_paths: - - "build/bun-cpp-objects.a" - env: - CPU_TARGET: "nehalem" - CCACHE_DIR: "$$HOME/.cache/ccache" - SCCACHE_DIR: "$$HOME/.cache/sccache" - ZIG_LOCAL_CACHE_DIR: "$$HOME/.cache/zig-cache" command: - - "./scripts/build-bun-cpp.sh" - - - key: "linux-x64-baseline-build-bun-nolto" - label: ":linux: x64-baseline - build-bun (no-lto)" - if: "build.branch != 'main'" - depends_on: - - "linux-x64-baseline-build-deps" - - "linux-x64-baseline-build-zig" - - "linux-x64-baseline-build-cpp" - agents: - queue: "build-linux" - os: "linux" - arch: "x64" - artifact_paths: - - "bun-linux-x64-baseline-nolto.zip" - - "bun-linux-x64-baseline-nolto-profile.zip" - env: - CPU_TARGET: "nehalem" - CCACHE_DIR: "$$HOME/.cache/ccache" - SCCACHE_DIR: "$$HOME/.cache/sccache" - ZIG_LOCAL_CACHE_DIR: "$$HOME/.cache/zig-cache" - command: - - "./scripts/buildkite-link-bun.sh --tag linux-x64-baseline --fast" + - "./.buildkite/scripts/build-cpp.sh" - key: "linux-x64-baseline-build-bun" label: ":linux: x64-baseline - build-bun" @@ -1057,16 +518,8 @@ steps: queue: "build-linux" os: "linux" arch: "x64" - artifact_paths: - - "bun-linux-x64-baseline.zip" - - "bun-linux-x64-baseline-profile.zip" - env: - CPU_TARGET: "nehalem" - CCACHE_DIR: "$$HOME/.cache/ccache" - SCCACHE_DIR: "$$HOME/.cache/sccache" - ZIG_LOCAL_CACHE_DIR: "$$HOME/.cache/zig-cache" command: - - "./scripts/buildkite-link-bun.sh --tag linux-x64-baseline" + - "./.buildkite/scripts/build-bun.sh" - key: "linux-x64-baseline-test-debian-12" label: ":debian: 12 x64-baseline - test-bun" @@ -1087,7 +540,7 @@ steps: - signal: SIGTERM limit: 3 depends_on: - - "linux-x64-baseline-build-bun-nolto" + - "linux-x64-baseline-build-bun" agents: robobun: "true" os: "linux" @@ -1095,7 +548,7 @@ steps: distro: "debian" release: "12" command: - - "./scripts/runner.node.mjs --step linux-x64-baseline-build-bun-nolto" + - "./scripts/runner.node.mjs --step linux-x64-baseline-build-bun" - key: "linux-x64-baseline-test-ubuntu-2204" label: ":ubuntu: 22.04 x64-baseline - test-bun" @@ -1116,7 +569,7 @@ steps: - signal: SIGTERM limit: 3 depends_on: - - "linux-x64-baseline-build-bun-nolto" + - "linux-x64-baseline-build-bun" agents: robobun: "true" os: "linux" @@ -1124,99 +577,12 @@ steps: distro: "ubuntu" release: "22.04" command: - - "./scripts/runner.node.mjs --step linux-x64-baseline-build-bun-nolto" + - "./scripts/runner.node.mjs --step linux-x64-baseline-build-bun" - key: "linux-x64-baseline-test-ubuntu-2004" label: ":ubuntu: 20.04 x64-baseline - test-bun" if: "build.branch != 'main'" parallelism: 5 - soft_fail: - - exit_status: 2 - retry: - automatic: - - exit_status: 1 - limit: 1 - - exit_status: -1 - limit: 3 - - exit_status: 255 - limit: 3 - - signal_reason: agent_stop - limit: 3 - - signal: SIGTERM - limit: 3 - depends_on: - - "linux-x64-baseline-build-bun-nolto" - agents: - robobun: "true" - os: "linux" - arch: "x64" - distro: "ubuntu" - release: "20.04" - command: - - "./scripts/runner.node.mjs --step linux-x64-baseline-build-bun-nolto" - - - key: "linux-x64-baseline-test-debian-12-smoke" - label: ":debian: 12 x64-baseline - test-bun (smoke)" - if: "build.branch != 'main'" - parallelism: 1 - soft_fail: - - exit_status: 2 - retry: - automatic: - - exit_status: 1 - limit: 1 - - exit_status: -1 - limit: 3 - - exit_status: 255 - limit: 3 - - signal_reason: agent_stop - limit: 3 - - signal: SIGTERM - limit: 3 - depends_on: - - "linux-x64-baseline-build-bun" - agents: - robobun: "true" - os: "linux" - arch: "x64" - distro: "debian" - release: "12" - command: - - "./scripts/runner.node.mjs --step linux-x64-baseline-build-bun --smoke 0.05" - - - key: "linux-x64-baseline-test-ubuntu-2204-smoke" - label: ":ubuntu: 22.04 x64-baseline - test-bun (smoke)" - if: "build.branch != 'main'" - parallelism: 1 - soft_fail: - - exit_status: 2 - retry: - automatic: - - exit_status: 1 - limit: 1 - - exit_status: -1 - limit: 3 - - exit_status: 255 - limit: 3 - - signal_reason: agent_stop - limit: 3 - - signal: SIGTERM - limit: 3 - depends_on: - - "linux-x64-baseline-build-bun" - agents: - robobun: "true" - os: "linux" - arch: "x64" - distro: "ubuntu" - release: "22.04" - command: - - "./scripts/runner.node.mjs --step linux-x64-baseline-build-bun --smoke 0.05" - - - key: "linux-x64-baseline-test-ubuntu-2004-smoke" - label: ":ubuntu: 20.04 x64-baseline - test-bun (smoke)" - if: "build.branch != 'main'" - parallelism: 1 soft_fail: - exit_status: 2 retry: @@ -1240,7 +606,7 @@ steps: distro: "ubuntu" release: "20.04" command: - - "./scripts/runner.node.mjs --step linux-x64-baseline-build-bun --smoke 0.05" + - "./scripts/runner.node.mjs --step linux-x64-baseline-build-bun" # Windows x64 - key: "windows-x64" @@ -1255,9 +621,9 @@ steps: artifact_paths: - "build\\bun-deps\\*.lib" env: - CPU_TARGET: "haswell" SCCACHE_DIR: "$$HOME\\.cache\\sccache" ZIG_LOCAL_CACHE_DIR: "$$HOME\\.cache\\zig-cache" + SCCACHE_IGNORE_SERVER_IO_ERROR: "1" command: - ".\\scripts\\all-dependencies.ps1" @@ -1267,14 +633,8 @@ steps: queue: "build-darwin" os: "darwin" # cross-compile on Linux or Darwin arch: "aarch64" - artifact_paths: - - "build/bun-zig.o" - env: - CPU_TARGET: "haswell" - SCCACHE_DIR: "$$HOME\\.cache\\sccache" - ZIG_LOCAL_CACHE_DIR: "$$HOME\\.cache\\zig-cache" command: - - "./scripts/build-bun-zig.sh windows x64" + - "./.buildkite/scripts/build-zig.sh windows x64" - key: "windows-x64-build-cpp" label: ":windows: x64 - build-cpp" @@ -1283,31 +643,11 @@ steps: os: "windows" arch: "x64" artifact_paths: - - "build\\bun-cpp-objects.a" + # HACK: See scripts/build-bun-cpp.ps1 + # - "build\\bun-cpp-objects.a" + - "build\\bun-cpp-objects.a.*" command: - - ".\\scripts\\build-bun-cpp.ps1 -Fast $$True" - - - key: "windows-x64-build-bun-nolto" - label: ":windows: x64 - build-bun (no-lto)" - if: "build.branch != 'main'" - depends_on: - - "windows-x64-build-deps" - - "windows-x64-build-zig" - - "windows-x64-build-cpp" - agents: - queue: "build-windows" - os: "windows" - arch: "x64" - artifact_paths: - - "bun-windows-x64-nolto.zip" - - "bun-windows-x64-nolto-profile.zip" - - "features.json" - env: - CPU_TARGET: "haswell" - SCCACHE_DIR: "$$HOME\\.cache\\sccache" - ZIG_LOCAL_CACHE_DIR: "$$HOME\\.cache\\zig-cache" - command: - - ".\\scripts\\buildkite-link-bun.ps1 -Fast $$True" + - ".\\scripts\\build-bun-cpp.ps1" - key: "windows-x64-build-bun" label: ":windows: x64 - build-bun" @@ -1324,9 +664,9 @@ steps: - "bun-windows-x64-profile.zip" - "features.json" env: - CPU_TARGET: "haswell" SCCACHE_DIR: "$$HOME\\.cache\\sccache" ZIG_LOCAL_CACHE_DIR: "$$HOME\\.cache\\zig-cache" + SCCACHE_IGNORE_SERVER_IO_ERROR: "1" command: - ".\\scripts\\buildkite-link-bun.ps1" @@ -1334,31 +674,6 @@ steps: label: ":windows: x64 - test-bun" if: "build.branch != 'main'" parallelism: 10 - soft_fail: - - exit_status: 1 - retry: - automatic: - - exit_status: -1 - limit: 3 - - exit_status: 255 - limit: 3 - - signal_reason: agent_stop - limit: 3 - - signal: SIGTERM - limit: 3 - depends_on: - - "windows-x64-build-bun-nolto" - agents: - robobun: "true" - os: "windows" - arch: "x64" - command: - - "node .\\scripts\\runner.node.mjs --step windows-x64-build-bun-nolto" - - - key: "windows-x64-test-bun-smoke" - label: ":windows: x64 - test-bun (smoke)" - if: "build.branch != 'main'" - parallelism: 1 soft_fail: - exit_status: 1 retry: @@ -1378,7 +693,7 @@ steps: os: "windows" arch: "x64" command: - - "node .\\scripts\\runner.node.mjs --step windows-x64-build-bun --smoke 0.05" + - "node .\\scripts\\runner.node.mjs --step windows-x64-build-bun" # Windows x64-baseline - key: "windows-x64-baseline" @@ -1393,9 +708,10 @@ steps: artifact_paths: - "build\\bun-deps\\*.lib" env: - CPU_TARGET: "nehalem" SCCACHE_DIR: "$$HOME\\.cache\\sccache" ZIG_LOCAL_CACHE_DIR: "$$HOME\\.cache\\zig-cache" + SCCACHE_IGNORE_SERVER_IO_ERROR: "1" + USE_BASELINE_BUILD: "1" command: - ".\\scripts\\all-dependencies.ps1" @@ -1405,14 +721,8 @@ steps: queue: "build-darwin" os: "darwin" # cross-compile on Linux or Darwin arch: "aarch64" - artifact_paths: - - "build/bun-zig.o" - env: - CPU_TARGET: "nehalem" - SCCACHE_DIR: "$$HOME\\.cache\\sccache" - ZIG_LOCAL_CACHE_DIR: "$$HOME\\.cache\\zig-cache" command: - - "./scripts/build-bun-zig.sh windows x64" + - "./.buildkite/scripts/build-zig.sh windows x64" - key: "windows-x64-baseline-build-cpp" label: ":windows: x64-baseline - build-cpp" @@ -1421,35 +731,16 @@ steps: os: "windows" arch: "x64" artifact_paths: - - "build\\bun-cpp-objects.a" + # HACK: See scripts/build-bun-cpp.ps1 + # - "build\\bun-cpp-objects.a" + - "build\\bun-cpp-objects.a.*" env: - CPU_TARGET: "nehalem" SCCACHE_DIR: "$$HOME\\.cache\\sccache" ZIG_LOCAL_CACHE_DIR: "$$HOME\\.cache\\zig-cache" + SCCACHE_IGNORE_SERVER_IO_ERROR: "1" + USE_BASELINE_BUILD: "1" command: - - ".\\scripts\\build-bun-cpp.ps1 -Baseline $$True" - - - key: "windows-x64-baseline-build-bun-nolto" - label: ":windows: x64-baseline - build-bun (no-lto)" - if: "build.branch != 'main'" - depends_on: - - "windows-x64-baseline-build-deps" - - "windows-x64-baseline-build-zig" - - "windows-x64-baseline-build-cpp" - agents: - queue: "build-windows" - os: "windows" - arch: "x64" - artifact_paths: - - "bun-windows-x64-baseline-nolto.zip" - - "bun-windows-x64-baseline-nolto-profile.zip" - - "features.json" - env: - CPU_TARGET: "nehalem" - SCCACHE_DIR: "$$HOME\\.cache\\sccache" - ZIG_LOCAL_CACHE_DIR: "$$HOME\\.cache\\zig-cache" - command: - - ".\\scripts\\buildkite-link-bun.ps1 -Baseline $$True -Fast $$True" + - ".\\scripts\\build-bun-cpp.ps1" - key: "windows-x64-baseline-build-bun" label: ":windows: x64-baseline - build-bun" @@ -1466,9 +757,10 @@ steps: - "bun-windows-x64-baseline-profile.zip" - "features.json" env: - CPU_TARGET: "nehalem" SCCACHE_DIR: "$$HOME\\.cache\\sccache" ZIG_LOCAL_CACHE_DIR: "$$HOME\\.cache\\zig-cache" + SCCACHE_IGNORE_SERVER_IO_ERROR: "1" + USE_BASELINE_BUILD: "1" command: - ".\\scripts\\buildkite-link-bun.ps1 -Baseline $$True" @@ -1476,31 +768,6 @@ steps: label: ":windows: x64-baseline - test-bun" if: "build.branch != 'main'" parallelism: 10 - soft_fail: - - exit_status: 1 - retry: - automatic: - - exit_status: -1 - limit: 3 - - exit_status: 255 - limit: 3 - - signal_reason: agent_stop - limit: 3 - - signal: SIGTERM - limit: 3 - depends_on: - - "windows-x64-baseline-build-bun-nolto" - agents: - robobun: "true" - os: "windows" - arch: "x64" - command: - - "node .\\scripts\\runner.node.mjs --step windows-x64-baseline-build-bun-nolto" - - - key: "windows-x64-baseline-test-bun-smoke" - label: ":windows: x64-baseline - test-bun (smoke)" - if: "build.branch != 'main'" - parallelism: 1 soft_fail: - exit_status: 1 retry: @@ -1520,4 +787,4 @@ steps: os: "windows" arch: "x64" command: - - "node .\\scripts\\runner.node.mjs --step windows-x64-baseline-build-bun --smoke 0.05" + - "node .\\scripts\\runner.node.mjs --step windows-x64-baseline-build-bun" diff --git a/.buildkite/scripts/build-bun.sh b/.buildkite/scripts/build-bun.sh new file mode 100755 index 0000000000..59363a39fd --- /dev/null +++ b/.buildkite/scripts/build-bun.sh @@ -0,0 +1,55 @@ +#!/bin/bash + +set -eo pipefail +source "$(dirname "$0")/env.sh" + +function run_command() { + set -x + "$@" + { set +x; } 2>/dev/null +} + +cwd="$(pwd)" + +mkdir -p build +source "$(dirname "$0")/download-artifact.sh" "build/bun-deps/**" --step "$BUILDKITE_GROUP_KEY-build-deps" +source "$(dirname "$0")/download-artifact.sh" "build/bun-zig.o" --step "$BUILDKITE_GROUP_KEY-build-zig" +source "$(dirname "$0")/download-artifact.sh" "build/bun-cpp-objects.a" --step "$BUILDKITE_GROUP_KEY-build-cpp" --split +cd build + +run_command cmake .. "${CMAKE_FLAGS[@]}" \ + -GNinja \ + -DBUN_LINK_ONLY="1" \ + -DNO_CONFIGURE_DEPENDS="1" \ + -DBUN_ZIG_OBJ_DIR="$cwd/build" \ + -DBUN_CPP_ARCHIVE="$cwd/build/bun-cpp-objects.a" \ + -DBUN_DEPS_OUT_DIR="$cwd/build/bun-deps" \ + -DCMAKE_BUILD_TYPE="$CMAKE_BUILD_TYPE" \ + -DCPU_TARGET="$CPU_TARGET" \ + -DUSE_LTO="$USE_LTO" \ + -DUSE_DEBUG_JSC="$USE_DEBUG_JSC" \ + -DCANARY="$CANARY" \ + -DGIT_SHA="$GIT_SHA" +run_command ninja -v -j "$CPUS" +run_command ls + +tag="bun-$BUILDKITE_GROUP_KEY" +if [ "$USE_LTO" == "OFF" ]; then + # Remove OS check when LTO is enabled on macOS again + if [[ "$tag" == *"darwin"* ]]; then + tag="$tag-nolto" + fi +fi + +for name in bun bun-profile; do + dir="$tag" + if [ "$name" == "bun-profile" ]; then + dir="$tag-profile" + fi + run_command chmod +x "$name" + run_command "./$name" --revision + run_command mkdir -p "$dir" + run_command mv "$name" "$dir/$name" + run_command zip -r "$dir.zip" "$dir" + source "$cwd/.buildkite/scripts/upload-artifact.sh" "$dir.zip" +done diff --git a/.buildkite/scripts/build-cpp.sh b/.buildkite/scripts/build-cpp.sh new file mode 100755 index 0000000000..f307918189 --- /dev/null +++ b/.buildkite/scripts/build-cpp.sh @@ -0,0 +1,34 @@ +#!/bin/bash + +set -eo pipefail +source "$(dirname "$0")/env.sh" +source "$(realpath $(dirname "$0")/../../scripts/update-submodules.sh)" +{ set +x; } 2>/dev/null + +function run_command() { + set -x + "$@" + { set +x; } 2>/dev/null +} + +mkdir -p build +cd build +mkdir -p tmp_modules tmp_functions js codegen + +run_command cmake .. "${CMAKE_FLAGS[@]}" \ + -GNinja \ + -DBUN_CPP_ONLY="1" \ + -DNO_CONFIGURE_DEPENDS="1" \ + -DCMAKE_BUILD_TYPE="$CMAKE_BUILD_TYPE" \ + -DCPU_TARGET="$CPU_TARGET" \ + -DUSE_LTO="$USE_LTO" \ + -DUSE_DEBUG_JSC="$USE_DEBUG_JSC" \ + -DCANARY="$CANARY" \ + -DGIT_SHA="$GIT_SHA" + +chmod +x compile-cpp-only.sh +source compile-cpp-only.sh -v -j "$CPUS" +{ set +x; } 2>/dev/null + +cd .. +source "$(dirname "$0")/upload-artifact.sh" "build/bun-cpp-objects.a" --split diff --git a/.buildkite/scripts/build-deps.ps1 b/.buildkite/scripts/build-deps.ps1 new file mode 100644 index 0000000000..e69de29bb2 diff --git a/.buildkite/scripts/build-deps.sh b/.buildkite/scripts/build-deps.sh new file mode 100755 index 0000000000..e736fb43ff --- /dev/null +++ b/.buildkite/scripts/build-deps.sh @@ -0,0 +1,22 @@ +#!/bin/bash + +set -eo pipefail +source "$(dirname "$0")/env.sh" +source "$(realpath $(dirname "$0")/../../scripts/all-dependencies.sh)" + +artifacts=( + libcrypto.a libssl.a libdecrepit.a + libcares.a + libarchive.a + liblolhtml.a + libmimalloc.a libmimalloc.o + libtcc.a + libz.a + libzstd.a + libdeflate.a + liblshpack.a +) + +for artifact in "${artifacts[@]}"; do + source "$(dirname "$0")/upload-artifact.sh" "build/bun-deps/$artifact" +done diff --git a/.buildkite/scripts/build-old-js.sh b/.buildkite/scripts/build-old-js.sh new file mode 100755 index 0000000000..92484aebe1 --- /dev/null +++ b/.buildkite/scripts/build-old-js.sh @@ -0,0 +1,40 @@ +#!/bin/bash + +set -eo pipefail +source "$(dirname "$0")/env.sh" + +function assert_bun() { + if ! command -v bun &>/dev/null; then + echo "error: bun is not installed" 1>&2 + exit 1 + fi +} + +function assert_make() { + if ! command -v make &>/dev/null; then + echo "error: make is not installed" 1>&2 + exit 1 + fi +} + +function run_command() { + set -x + "$@" + { set +x; } 2>/dev/null +} + +function build_node_fallbacks() { + local cwd="src/node-fallbacks" + run_command bun install --cwd "$cwd" --frozen-lockfile + run_command bun run --cwd "$cwd" build +} + +function build_old_js() { + run_command bun install --frozen-lockfile + run_command make runtime_js fallback_decoder bun_error +} + +assert_bun +assert_make +build_node_fallbacks +build_old_js diff --git a/.buildkite/scripts/build-zig.sh b/.buildkite/scripts/build-zig.sh new file mode 100755 index 0000000000..e7a2614556 --- /dev/null +++ b/.buildkite/scripts/build-zig.sh @@ -0,0 +1,80 @@ +#!/bin/bash + +set -eo pipefail +source "$(dirname "$0")/env.sh" + +function assert_target() { + local arch="${2-$(uname -m)}" + case "$(echo "$arch" | tr '[:upper:]' '[:lower:]')" in + x64 | x86_64 | amd64) + export ZIG_ARCH="x86_64" + if [[ "$BUILDKITE_STEP_KEY" == *"baseline"* ]]; then + export ZIG_CPU_TARGET="nehalem" + else + export ZIG_CPU_TARGET="haswell" + fi + ;; + aarch64 | arm64) + export ZIG_ARCH="aarch64" + export ZIG_CPU_TARGET="native" + ;; + *) + echo "error: Unsupported architecture: $arch" 1>&2 + exit 1 + ;; + esac + local os="${1-$(uname -s)}" + case "$(echo "$os" | tr '[:upper:]' '[:lower:]')" in + linux) + export ZIG_TARGET="$ZIG_ARCH-linux-gnu" ;; + darwin) + export ZIG_TARGET="$ZIG_ARCH-macos-none" ;; + windows) + export ZIG_TARGET="$ZIG_ARCH-windows-msvc" ;; + *) + echo "error: Unsupported operating system: $os" 1>&2 + exit 1 + ;; + esac +} + +function run_command() { + set -x + "$@" + { set +x; } 2>/dev/null +} + +assert_target "$@" + +# Since the zig build depends on files from the zig submodule, +# make sure to update the submodule before building. +run_command git submodule update --init --recursive --progress --depth=1 --checkout src/deps/zig + +# TODO: Move these to be part of the CMake build +source "$(dirname "$0")/build-old-js.sh" + +cwd="$(pwd)" +mkdir -p build +cd build + +run_command cmake .. "${CMAKE_FLAGS[@]}" \ + -GNinja \ + -DNO_CONFIGURE_DEPENDS="1" \ + -DNO_CODEGEN="0" \ + -DWEBKIT_DIR="omit" \ + -DBUN_ZIG_OBJ_DIR="$cwd/build" \ + -DZIG_LIB_DIR="$cwd/src/deps/zig/lib" \ + -DCMAKE_BUILD_TYPE="$CMAKE_BUILD_TYPE" \ + -DARCH="$ZIG_ARCH" \ + -DCPU_TARGET="$ZIG_CPU_TARGET" \ + -DZIG_TARGET="$ZIG_TARGET" \ + -DUSE_LTO="$USE_LTO" \ + -DUSE_DEBUG_JSC="$USE_DEBUG_JSC" \ + -DCANARY="$CANARY" \ + -DGIT_SHA="$GIT_SHA" + +export ONLY_ZIG="1" +run_command ninja "$cwd/build/bun-zig.o" -v -j "$CPUS" + +cd .. +source "$(dirname "$0")/upload-artifact.sh" "build/bun-zig.o" diff --git a/.buildkite/scripts/download-artifact.ps1 b/.buildkite/scripts/download-artifact.ps1 new file mode 100755 index 0000000000..0504474077 --- /dev/null +++ b/.buildkite/scripts/download-artifact.ps1 @@ -0,0 +1,47 @@ +param ( + [Parameter(Mandatory=$true)] + [string[]] $Paths, + [switch] $Split +) + +$ErrorActionPreference = "Stop" + +function Assert-Buildkite-Agent() { + if (-not (Get-Command "buildkite-agent" -ErrorAction SilentlyContinue)) { + Write-Error "Cannot find buildkite-agent, please install it: https://buildkite.com/docs/agent/v3/install" + exit 1 + } +} + +function Assert-Join-File() { + if (-not (Get-Command "Join-File" -ErrorAction SilentlyContinue)) { + Write-Error "Cannot find Join-File, please install it: https://www.powershellgallery.com/packages/FileSplitter/1.3" + exit 1 + } +} + +function Download-Buildkite-Artifact() { + param ( + [Parameter(Mandatory=$true)] + [string] $Path, + ) + if ($Split) { + & buildkite-agent artifact download "$Path.*" --debug --debug-http + Join-File -Path "$(Resolve-Path .)\$Path" -Verbose -DeletePartFiles + } else { + & buildkite-agent artifact download "$Path" --debug --debug-http + } + if (-not (Test-Path $Path)) { + Write-Error "Could not find artifact: $Path" + exit 1 + } +} + +Assert-Buildkite-Agent +if ($Split) { + Assert-Join-File +} + +foreach ($Path in $Paths) { + Download-Buildkite-Artifact $Path +} diff --git a/.buildkite/scripts/download-artifact.sh b/.buildkite/scripts/download-artifact.sh new file mode 100755 index 0000000000..5907561853 --- /dev/null +++ b/.buildkite/scripts/download-artifact.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +set -eo pipefail + +function assert_buildkite_agent() { + if ! command -v buildkite-agent &> /dev/null; then + echo "error: Cannot find buildkite-agent, please install it:" + echo "https://buildkite.com/docs/agent/v3/install" + exit 1 + fi +} + +function download_buildkite_artifact() { + local path="$1"; shift + local split="0" + local args=() + while true; do + if [ -z "$1" ]; then + break + fi + case "$1" in + --split) split="1"; shift ;; + *) args+=("$1"); shift ;; + esac + done + if [ "$split" == "1" ]; then + run_command buildkite-agent artifact download "$path.*" . "${args[@]}" + run_command cat $path.?? > "$path" + run_command rm -f $path.?? + else + run_command buildkite-agent artifact download "$path" . "${args[@]}" + fi + if [[ "$path" != *"*"* ]] && [ ! -f "$path" ]; then + echo "error: Could not find artifact: $path" + exit 1 + fi +} + +function run_command() { + set -x + "$@" + { set +x; } 2>/dev/null +} + +assert_buildkite_agent +download_buildkite_artifact "$@" diff --git a/.buildkite/scripts/env.sh b/.buildkite/scripts/env.sh new file mode 100755 index 0000000000..b09f799bf6 --- /dev/null +++ b/.buildkite/scripts/env.sh @@ -0,0 +1,119 @@ +#!/bin/bash + +set -eo pipefail + +function assert_os() { + local os="$(uname -s)" + case "$os" in + Linux) + echo "linux" ;; + Darwin) + echo "darwin" ;; + *) + echo "error: Unsupported operating system: $os" 1>&2 + exit 1 + ;; + esac +} + +function assert_arch() { + local arch="$(uname -m)" + case "$arch" in + aarch64 | arm64) + echo "aarch64" ;; + x86_64 | amd64) + echo "x64" ;; + *) + echo "error: Unknown architecture: $arch" 1>&2 + exit 1 + ;; + esac +} + +function assert_build() { + if [ -z "$BUILDKITE_REPO" ]; then + echo "error: Cannot find repository for this build" + exit 1 + fi + if [ -z "$BUILDKITE_COMMIT" ]; then + echo "error: Cannot find commit for this build" + exit 1 + fi + if [ -z "$BUILDKITE_STEP_KEY" ]; then + echo "error: Cannot find step key for this build" + exit 1 + fi + if [ -n "$BUILDKITE_GROUP_KEY" ] && [[ "$BUILDKITE_STEP_KEY" != "$BUILDKITE_GROUP_KEY"* ]]; then + echo "error: Build step '$BUILDKITE_STEP_KEY' does not start with group key '$BUILDKITE_GROUP_KEY'" + exit 1 + fi + # Skip os and arch checks for Zig, since it's cross-compiled on macOS + if [[ "$BUILDKITE_STEP_KEY" != *"zig"* ]]; then + local os="$(assert_os)" + if [[ "$BUILDKITE_STEP_KEY" != *"$os"* ]]; then + echo "error: Build step '$BUILDKITE_STEP_KEY' does not match operating system '$os'" + exit 1 + fi + local arch="$(assert_arch)" + if [[ "$BUILDKITE_STEP_KEY" != *"$arch"* ]]; then + echo "error: Build step '$BUILDKITE_STEP_KEY' does not match architecture '$arch'" + exit 1 + fi + fi +} + +function assert_buildkite_agent() { + if ! command -v buildkite-agent &> /dev/null; then + echo "error: Cannot find buildkite-agent, please install it:" + echo "https://buildkite.com/docs/agent/v3/install" + exit 1 + fi +} + +function export_environment() { + source "$(realpath $(dirname "$0")/../../scripts/env.sh)" + source "$(realpath $(dirname "$0")/../../scripts/update-submodules.sh)" + { set +x; } 2>/dev/null + export GIT_SHA="$BUILDKITE_COMMIT" + export CCACHE_DIR="$HOME/.cache/ccache/$BUILDKITE_STEP_KEY" + export SCCACHE_DIR="$HOME/.cache/sccache/$BUILDKITE_STEP_KEY" + export ZIG_LOCAL_CACHE_DIR="$HOME/.cache/zig-cache/$BUILDKITE_STEP_KEY" + export BUN_DEPS_CACHE_DIR="$HOME/.cache/bun-deps/$BUILDKITE_STEP_KEY" + if [ "$(assert_arch)" == "aarch64" ]; then + export CPU_TARGET="native" + elif [[ "$BUILDKITE_STEP_KEY" == *"baseline"* ]]; then + export CPU_TARGET="nehalem" + else + export CPU_TARGET="haswell" + fi + if [[ "$BUILDKITE_STEP_KEY" == *"nolto"* ]]; then + export USE_LTO="OFF" + else + export USE_LTO="ON" + fi + if $(buildkite-agent meta-data exists release &> /dev/null); then + export CMAKE_BUILD_TYPE="$(buildkite-agent meta-data get release)" + else + export CMAKE_BUILD_TYPE="Release" + fi + if $(buildkite-agent meta-data exists canary &> /dev/null); then + export CANARY="$(buildkite-agent meta-data get canary)" + else + export CANARY="1" + fi + if $(buildkite-agent meta-data exists assertions &> /dev/null); then + export USE_DEBUG_JSC="$(buildkite-agent meta-data get assertions)" + else + export USE_DEBUG_JSC="OFF" + fi + if [ "$BUILDKITE_CLEAN_CHECKOUT" == "true" ]; then + rm -rf "$CCACHE_DIR" + rm -rf "$SCCACHE_DIR" + rm -rf "$ZIG_LOCAL_CACHE_DIR" + rm -rf "$BUN_DEPS_CACHE_DIR" + fi +} + +assert_build +assert_buildkite_agent +export_environment diff --git a/.buildkite/scripts/prepare-build.sh b/.buildkite/scripts/prepare-build.sh new file mode 100755 index 0000000000..1c245d9618 --- /dev/null +++ b/.buildkite/scripts/prepare-build.sh @@ -0,0 +1,97 @@ +#!/bin/bash + +set -eo pipefail + +function assert_build() { + if [ -z "$BUILDKITE_REPO" ]; then + echo "error: Cannot find repository for this build" + exit 1 + fi + if [ -z "$BUILDKITE_COMMIT" ]; then + echo "error: Cannot find commit for this build" + exit 1 + fi +} + +function assert_buildkite_agent() { + if ! command -v buildkite-agent &> /dev/null; then + echo "error: Cannot find buildkite-agent, please install it:" + echo "https://buildkite.com/docs/agent/v3/install" + exit 1 + fi +} + +function assert_jq() { + assert_command "jq" "jq" "https://stedolan.github.io/jq/" +} + +function assert_curl() { + assert_command "curl" "curl" "https://curl.se/download.html" +} + +function assert_command() { + local command="$1" + local package="$2" + local help_url="$3" + if ! command -v "$command" &> /dev/null; then + echo "warning: $command is not installed, installing..." + if command -v brew &> /dev/null; then + HOMEBREW_NO_AUTO_UPDATE=1 brew install "$package" + else + echo "error: Cannot install $command, please install it" + if [ -n "$help_url" ]; then + echo "" + echo "hint: See $help_url for help" + fi + exit 1 + fi + fi +} + +function assert_release() { + if [ "$RELEASE" == "1" ]; then + run_command buildkite-agent meta-data set canary "0" + fi +} + +function assert_canary() { + local canary="$(buildkite-agent meta-data get canary 2>/dev/null)" + if [ -z "$canary" ]; then + local repo=$(echo "$BUILDKITE_REPO" | sed -E 's#https://github.com/([^/]+)/([^/]+).git#\1/\2#g') + local tag="$(curl -sL "https://api.github.com/repos/$repo/releases/latest" | jq -r ".tag_name")" + if [ "$tag" == "null" ]; then + canary="1" + else + local revision=$(curl -sL "https://api.github.com/repos/$repo/compare/$tag...$BUILDKITE_COMMIT" | jq -r ".ahead_by") + if [ "$revision" == "null" ]; then + canary="1" + else + canary="$revision" + fi + fi + run_command buildkite-agent meta-data set canary "$canary" + fi +} + +function upload_buildkite_pipeline() { + local path="$1" + if [ ! -f "$path" ]; then + echo "error: Cannot find pipeline: $path" + exit 1 + fi + run_command buildkite-agent pipeline upload "$path" +} + +function run_command() { + set -x + "$@" + { set +x; } 2>/dev/null +} + +assert_build +assert_buildkite_agent +assert_jq +assert_curl +assert_release +assert_canary +upload_buildkite_pipeline ".buildkite/ci.yml" diff --git a/.buildkite/scripts/upload-artifact.ps1 b/.buildkite/scripts/upload-artifact.ps1 new file mode 100755 index 0000000000..b7d79a410b --- /dev/null +++ b/.buildkite/scripts/upload-artifact.ps1 @@ -0,0 +1,47 @@ +param ( + [Parameter(Mandatory=$true)] + [string[]] $Paths, + [switch] $Split +) + +$ErrorActionPreference = "Stop" + +function Assert-Buildkite-Agent() { + if (-not (Get-Command "buildkite-agent" -ErrorAction SilentlyContinue)) { + Write-Error "Cannot find buildkite-agent, please install it: https://buildkite.com/docs/agent/v3/install" + exit 1 + } +} + +function Assert-Split-File() { + if (-not (Get-Command "Split-File" -ErrorAction SilentlyContinue)) { + Write-Error "Cannot find Split-File, please install it: https://www.powershellgallery.com/packages/FileSplitter/1.3" + exit 1 + } +} + +function Upload-Buildkite-Artifact() { + param ( + [Parameter(Mandatory=$true)] + [string] $Path, + ) + if (-not (Test-Path $Path)) { + Write-Error "Could not find artifact: $Path" + exit 1 + } + if ($Split) { + Remove-Item -Path "$Path.*" -Force + Split-File -Path (Resolve-Path $Path) -PartSizeBytes "50MB" -Verbose + $Path = "$Path.*" + } + & buildkite-agent artifact upload "$Path" --debug --debug-http +} + +Assert-Buildkite-Agent +if ($Split) { + Assert-Split-File +} + +foreach ($Path in $Paths) { + Upload-Buildkite-Artifact $Path +} diff --git a/.buildkite/scripts/upload-artifact.sh b/.buildkite/scripts/upload-artifact.sh new file mode 100755 index 0000000000..0284a93c79 --- /dev/null +++ b/.buildkite/scripts/upload-artifact.sh @@ -0,0 +1,54 @@ +#!/bin/bash + +set -eo pipefail + +function assert_buildkite_agent() { + if ! command -v buildkite-agent &> /dev/null; then + echo "error: Cannot find buildkite-agent, please install it:" + echo "https://buildkite.com/docs/agent/v3/install" + exit 1 + fi +} + +function assert_split() { + if ! command -v split &> /dev/null; then + echo "error: Cannot find split, please install it:" + echo "https://www.gnu.org/software/coreutils/split" + exit 1 + fi +} + +function upload_buildkite_artifact() { + local path="$1"; shift + local split="0" + local args=() + while true; do + if [ -z "$1" ]; then + break + fi + case "$1" in + --split) split="1"; shift ;; + *) args+=("$1"); shift ;; + esac + done + if [ ! -f "$path" ]; then + echo "error: Could not find artifact: $path" + exit 1 + fi + if [ "$split" == "1" ]; then + run_command rm -f "$path."* + run_command split -b 50MB -d "$path" "$path." + run_command buildkite-agent artifact upload "$path.*" "${args[@]}" + else + run_command buildkite-agent artifact upload "$path" "${args[@]}" + fi +} + +function run_command() { + set -x + "$@" + { set +x; } 2>/dev/null +} + +assert_buildkite_agent +upload_buildkite_artifact "$@" diff --git a/.buildkite/scripts/upload-release.sh b/.buildkite/scripts/upload-release.sh index c1576bd620..7ea85aec55 100755 --- a/.buildkite/scripts/upload-release.sh +++ b/.buildkite/scripts/upload-release.sh @@ -3,7 +3,15 @@ set -eo pipefail function assert_main() { - if [[ "$BUILDKITE_PULL_REQUEST_REPO" && "$BUILDKITE_REPO" != "$BUILDKITE_PULL_REQUEST_REPO" ]]; then + if [ -z "$BUILDKITE_REPO" ]; then + echo "error: Cannot find repository for this build" + exit 1 + fi + if [ -z "$BUILDKITE_COMMIT" ]; then + echo "error: Cannot find commit for this build" + exit 1 + fi + if [ -n "$BUILDKITE_PULL_REQUEST_REPO" ] && [ "$BUILDKITE_REPO" != "$BUILDKITE_PULL_REQUEST_REPO" ]; then echo "error: Cannot upload release from a fork" exit 1 fi @@ -25,70 +33,168 @@ function assert_buildkite_agent() { fi } -function assert_gh() { - if ! command -v gh &> /dev/null; then - echo "warning: gh is not installed, installing..." +function assert_github() { + assert_command "gh" "gh" "https://github.com/cli/cli#installation" + assert_buildkite_secret "GITHUB_TOKEN" + # gh expects the token in $GH_TOKEN + export GH_TOKEN="$GITHUB_TOKEN" +} + +function assert_aws() { + assert_command "aws" "awscli" "https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html" + for secret in AWS_ACCESS_KEY_ID AWS_SECRET_ACCESS_KEY AWS_ENDPOINT AWS_BUCKET; do + assert_buildkite_secret "$secret" + done +} + +function assert_sentry() { + assert_command "sentry-cli" "getsentry/tools/sentry-cli" "https://docs.sentry.io/cli/installation/" + for secret in SENTRY_AUTH_TOKEN SENTRY_ORG SENTRY_PROJECT; do + assert_buildkite_secret "$secret" + done +} + +function run_command() { + set -x + "$@" + { set +x; } 2>/dev/null +} + +function assert_command() { + local command="$1" + local package="$2" + local help_url="$3" + if ! command -v "$command" &> /dev/null; then + echo "warning: $command is not installed, installing..." if command -v brew &> /dev/null; then - brew install gh + HOMEBREW_NO_AUTO_UPDATE=1 run_command brew install "$package" else - echo "error: Cannot install gh, please install it:" - echo "https://github.com/cli/cli#installation" + echo "error: Cannot install $command, please install it" + if [ -n "$help_url" ]; then + echo "" + echo "hint: See $help_url for help" + fi exit 1 fi fi } -function assert_gh_token() { - local token=$(buildkite-agent secret get GITHUB_TOKEN) - if [ -z "$token" ]; then - echo "error: Cannot find GITHUB_TOKEN secret" +function assert_buildkite_secret() { + local key="$1" + local value=$(buildkite-agent secret get "$key") + if [ -z "$value" ]; then + echo "error: Cannot find $key secret" echo "" - echo "hint: Create a secret named GITHUB_TOKEN with a GitHub access token:" + echo "hint: Create a secret named $key with a value:" echo "https://buildkite.com/docs/pipelines/buildkite-secrets" exit 1 fi - export GH_TOKEN="$token" + export "$key"="$value" } -function download_artifact() { - local name=$1 - buildkite-agent artifact download "$name" . - if [ ! -f "$name" ]; then +function release_tag() { + local version="$1" + if [ "$version" == "canary" ]; then + echo "canary" + else + echo "bun-v$version" + fi +} + +function create_sentry_release() { + local version="$1" + local release="$version" + if [ "$version" == "canary" ]; then + release="$BUILDKITE_COMMIT-canary" + fi + run_command sentry-cli releases new "$release" --finalize + run_command sentry-cli releases set-commits "$release" --auto --ignore-missing + if [ "$version" == "canary" ]; then + run_command sentry-cli deploys new --env="canary" --release="$release" + fi +} + +function download_buildkite_artifact() { + local name="$1" + local dir="$2" + if [ -z "$dir" ]; then + dir="." + fi + run_command buildkite-agent artifact download "$name" "$dir" + if [ ! -f "$dir/$name" ]; then echo "error: Cannot find Buildkite artifact: $name" exit 1 fi } -function upload_assets() { - local tag=$1 - local files=${@:2} - gh release upload "$tag" $files --clobber --repo "$BUILDKITE_REPO" +function upload_github_asset() { + local version="$1" + local tag="$(release_tag "$version")" + local file="$2" + run_command gh release upload "$tag" "$file" --clobber --repo "$BUILDKITE_REPO" + if [ "$tag" == "canary" ]; then + run_command gh release edit "$tag" --repo "$BUILDKITE_REPO" \ + --notes "This canary release of Bun corresponds to the commit: $BUILDKITE_COMMIT" + fi } -assert_main -assert_buildkite_agent -assert_gh -assert_gh_token +function update_github_release() { + local version="$1" + local tag="$(release_tag "$version")" + if [ "$tag" == "canary" ]; then + run_command gh release edit "$tag" --repo "$BUILDKITE_REPO" \ + --notes "This release of Bun corresponds to the commit: $BUILDKITE_COMMIT" + fi +} -declare artifacts=( - bun-darwin-aarch64.zip - bun-darwin-aarch64-profile.zip - bun-darwin-x64.zip - bun-darwin-x64-profile.zip - bun-linux-aarch64.zip - bun-linux-aarch64-profile.zip - bun-linux-x64.zip - bun-linux-x64-profile.zip - bun-linux-x64-baseline.zip - bun-linux-x64-baseline-profile.zip - bun-windows-x64.zip - bun-windows-x64-profile.zip - bun-windows-x64-baseline.zip - bun-windows-x64-baseline-profile.zip -) +function upload_s3_file() { + local folder="$1" + local file="$2" + run_command aws --endpoint-url="$AWS_ENDPOINT" s3 cp "$file" "s3://$AWS_BUCKET/$folder/$file" +} -for artifact in "${artifacts[@]}"; do - download_artifact $artifact -done +function create_release() { + assert_main + assert_buildkite_agent + assert_github + assert_aws + assert_sentry -upload_assets "canary" "${artifacts[@]}" + local tag="$1" # 'canary' or 'x.y.z' + local artifacts=( + bun-darwin-aarch64.zip + bun-darwin-aarch64-profile.zip + bun-darwin-x64.zip + bun-darwin-x64-profile.zip + bun-linux-aarch64.zip + bun-linux-aarch64-profile.zip + bun-linux-x64.zip + bun-linux-x64-profile.zip + bun-linux-x64-baseline.zip + bun-linux-x64-baseline-profile.zip + bun-windows-x64.zip + bun-windows-x64-profile.zip + bun-windows-x64-baseline.zip + bun-windows-x64-baseline-profile.zip + ) + + for name in "${artifacts[@]}"; do + download_buildkite_artifact "$name" + upload_s3_file "releases/$BUILDKITE_COMMIT" "$name" + upload_s3_file "releases/$tag" "$name" + upload_github_asset "$tag" "$name" + done + update_github_release "$tag" + create_sentry_release "$tag" +} + +function assert_canary() { + local canary="$(buildkite-agent meta-data get canary 2>/dev/null)" + if [ -z "$canary" ] || [ "$canary" == "0" ]; then + echo "warn: Skipping release because this is not a canary build" + exit 0 + fi +} + +assert_canary +create_release "canary" diff --git a/.github/workflows/build-darwin.yml b/.github/workflows/build-darwin.yml deleted file mode 100644 index c59226d131..0000000000 --- a/.github/workflows/build-darwin.yml +++ /dev/null @@ -1,316 +0,0 @@ -name: Build Darwin - -permissions: - contents: read - actions: write - -on: - workflow_call: - inputs: - runs-on: - type: string - default: macos-12-large - tag: - type: string - required: true - arch: - type: string - required: true - cpu: - type: string - required: true - assertions: - type: boolean - canary: - type: boolean - no-cache: - type: boolean - -env: - LLVM_VERSION: 16 - BUN_VERSION: 1.1.8 - LC_CTYPE: "en_US.UTF-8" - LC_ALL: "en_US.UTF-8" - BUN_ENABLE_LTO: "1" - -jobs: - build-submodules: - name: Build Submodules - runs-on: ${{ inputs.runs-on }} - steps: - - name: Checkout - uses: actions/checkout@v4 - with: - sparse-checkout: | - .gitmodules - src/deps - scripts - - name: Hash Submodules - id: hash - run: | - print_versions() { - git submodule | grep -v WebKit - echo "LLVM_VERSION=${{ env.LLVM_VERSION }}" - cat $(echo scripts/build*.sh scripts/all-dependencies.sh | tr " " "\n" | sort) - } - echo "hash=$(print_versions | shasum)" >> $GITHUB_OUTPUT - - if: ${{ !inputs.no-cache }} - name: Restore Cache - id: cache - uses: actions/cache/restore@v4 - with: - path: ${{ runner.temp }}/bun-deps - key: bun-${{ inputs.tag }}-deps-${{ steps.hash.outputs.hash }} - # TODO: Figure out how to cache homebrew dependencies - - if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }} - name: Install Dependencies - env: - HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1 - HOMEBREW_NO_AUTO_UPDATE: 1 - HOMEBREW_NO_INSTALL_CLEANUP: 1 - run: | - brew install \ - llvm@${{ env.LLVM_VERSION }} \ - ccache \ - rust \ - pkg-config \ - coreutils \ - libtool \ - cmake \ - libiconv \ - automake \ - openssl@1.1 \ - ninja \ - golang \ - gnu-sed --force --overwrite - echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH - echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH - echo "$(brew --prefix llvm@$LLVM_VERSION)/bin" >> $GITHUB_PATH - brew link --overwrite llvm@$LLVM_VERSION - - if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }} - name: Clone Submodules - run: | - ./scripts/update-submodules.sh - - name: Build Submodules - if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }} - env: - CPU_TARGET: ${{ inputs.cpu }} - BUN_DEPS_OUT_DIR: ${{ runner.temp }}/bun-deps - run: | - mkdir -p $BUN_DEPS_OUT_DIR - ./scripts/all-dependencies.sh - - name: Save Cache - if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }} - uses: actions/cache/save@v4 - with: - path: ${{ runner.temp }}/bun-deps - key: ${{ steps.cache.outputs.cache-primary-key }} - - name: Upload bun-${{ inputs.tag }}-deps - uses: actions/upload-artifact@v4 - with: - name: bun-${{ inputs.tag }}-deps - path: ${{ runner.temp }}/bun-deps - if-no-files-found: error - build-cpp: - name: Build C++ - runs-on: ${{ inputs.runs-on }} - steps: - - name: Checkout - uses: actions/checkout@v4 - with: - submodules: recursive - # TODO: Figure out how to cache homebrew dependencies - - name: Install Dependencies - env: - HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1 - HOMEBREW_NO_AUTO_UPDATE: 1 - HOMEBREW_NO_INSTALL_CLEANUP: 1 - run: | - brew install \ - llvm@${{ env.LLVM_VERSION }} \ - ccache \ - rust \ - pkg-config \ - coreutils \ - libtool \ - cmake \ - libiconv \ - automake \ - openssl@1.1 \ - ninja \ - golang \ - gnu-sed --force --overwrite - echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH - echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH - echo "$(brew --prefix llvm@$LLVM_VERSION)/bin" >> $GITHUB_PATH - brew link --overwrite llvm@$LLVM_VERSION - - name: Setup Bun - uses: ./.github/actions/setup-bun - with: - bun-version: ${{ env.BUN_VERSION }} - - if: ${{ !inputs.no-cache }} - name: Restore Cache - uses: actions/cache@v4 - with: - path: ${{ runner.temp }}/ccache - key: bun-${{ inputs.tag }}-cpp-${{ hashFiles('Dockerfile', 'Makefile', 'CMakeLists.txt', 'build.zig', 'scripts/**', 'src/**', 'packages/bun-usockets/src/**', 'packages/bun-uws/src/**') }} - restore-keys: | - bun-${{ inputs.tag }}-cpp- - - name: Compile - env: - CPU_TARGET: ${{ inputs.cpu }} - SOURCE_DIR: ${{ github.workspace }} - OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj - BUN_DEPS_OUT_DIR: ${{ runner.temp }}/bun-deps - CCACHE_DIR: ${{ runner.temp }}/ccache - run: | - mkdir -p $OBJ_DIR - cd $OBJ_DIR - cmake -S $SOURCE_DIR -B $OBJ_DIR \ - -G Ninja \ - -DCMAKE_BUILD_TYPE=Release \ - -DUSE_LTO=ON \ - -DBUN_CPP_ONLY=1 \ - -DNO_CONFIGURE_DEPENDS=1 - chmod +x compile-cpp-only.sh - ./compile-cpp-only.sh -v - - name: Upload bun-${{ inputs.tag }}-cpp - uses: actions/upload-artifact@v4 - with: - name: bun-${{ inputs.tag }}-cpp - path: ${{ runner.temp }}/bun-cpp-obj/bun-cpp-objects.a - if-no-files-found: error - build-zig: - name: Build Zig - uses: ./.github/workflows/build-zig.yml - with: - os: darwin - only-zig: true - tag: ${{ inputs.tag }} - arch: ${{ inputs.arch }} - cpu: ${{ inputs.cpu }} - assertions: ${{ inputs.assertions }} - canary: ${{ inputs.canary }} - no-cache: ${{ inputs.no-cache }} - link: - name: Link - runs-on: ${{ inputs.runs-on }} - needs: - - build-submodules - - build-cpp - - build-zig - steps: - - uses: actions/checkout@v4 - # TODO: Figure out how to cache homebrew dependencies - - name: Install Dependencies - env: - HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1 - HOMEBREW_NO_AUTO_UPDATE: 1 - HOMEBREW_NO_INSTALL_CLEANUP: 1 - run: | - brew install \ - llvm@${{ env.LLVM_VERSION }} \ - ccache \ - rust \ - pkg-config \ - coreutils \ - libtool \ - cmake \ - libiconv \ - automake \ - openssl@1.1 \ - ninja \ - golang \ - gnu-sed --force --overwrite - echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH - echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH - echo "$(brew --prefix llvm@$LLVM_VERSION)/bin" >> $GITHUB_PATH - brew link --overwrite llvm@$LLVM_VERSION - - name: Setup Bun - uses: ./.github/actions/setup-bun - with: - bun-version: ${{ env.BUN_VERSION }} - - name: Download bun-${{ inputs.tag }}-deps - uses: actions/download-artifact@v4 - with: - name: bun-${{ inputs.tag }}-deps - path: ${{ runner.temp }}/bun-deps - - name: Download bun-${{ inputs.tag }}-cpp - uses: actions/download-artifact@v4 - with: - name: bun-${{ inputs.tag }}-cpp - path: ${{ runner.temp }}/bun-cpp-obj - - name: Download bun-${{ inputs.tag }}-zig - uses: actions/download-artifact@v4 - with: - name: bun-${{ inputs.tag }}-zig - path: ${{ runner.temp }}/release - - if: ${{ !inputs.no-cache }} - name: Restore Cache - uses: actions/cache@v4 - with: - path: ${{ runner.temp }}/ccache - key: bun-${{ inputs.tag }}-cpp-${{ hashFiles('Dockerfile', 'Makefile', 'CMakeLists.txt', 'build.zig', 'scripts/**', 'src/**', 'packages/bun-usockets/src/**', 'packages/bun-uws/src/**') }} - restore-keys: | - bun-${{ inputs.tag }}-cpp- - - name: Link - env: - CPU_TARGET: ${{ inputs.cpu }} - CCACHE_DIR: ${{ runner.temp }}/ccache - run: | - SRC_DIR=$PWD - mkdir ${{ runner.temp }}/link-build - cd ${{ runner.temp }}/link-build - cmake $SRC_DIR \ - -G Ninja \ - -DCMAKE_BUILD_TYPE=Release \ - -DUSE_LTO=ON \ - -DBUN_LINK_ONLY=1 \ - -DBUN_ZIG_OBJ_DIR="${{ runner.temp }}/release" \ - -DBUN_CPP_ARCHIVE="${{ runner.temp }}/bun-cpp-obj/bun-cpp-objects.a" \ - -DBUN_DEPS_OUT_DIR="${{ runner.temp }}/bun-deps" \ - -DNO_CONFIGURE_DEPENDS=1 - ninja -v - - name: Prepare - run: | - cd ${{ runner.temp }}/link-build - chmod +x bun-profile bun - mkdir -p bun-${{ inputs.tag }}-profile/ bun-${{ inputs.tag }}/ - mv bun-profile bun-${{ inputs.tag }}-profile/bun-profile - if [ -f bun.dSYM ]; then - mv bun.dSYM bun-${{ inputs.tag }}-profile/bun.dSYM - fi - mv bun bun-${{ inputs.tag }}/bun - zip -r bun-${{ inputs.tag }}-profile.zip bun-${{ inputs.tag }}-profile - zip -r bun-${{ inputs.tag }}.zip bun-${{ inputs.tag }} - - name: Upload bun-${{ inputs.tag }} - uses: actions/upload-artifact@v4 - with: - name: bun-${{ inputs.tag }} - path: ${{ runner.temp }}/link-build/bun-${{ inputs.tag }}.zip - if-no-files-found: error - - name: Upload bun-${{ inputs.tag }}-profile - uses: actions/upload-artifact@v4 - with: - name: bun-${{ inputs.tag }}-profile - path: ${{ runner.temp }}/link-build/bun-${{ inputs.tag }}-profile.zip - if-no-files-found: error - on-failure: - if: ${{ github.repository_owner == 'oven-sh' && failure() }} - name: On Failure - needs: link - runs-on: ubuntu-latest - steps: - - name: Send Message - uses: sarisia/actions-status-discord@v1 - with: - webhook: ${{ secrets.DISCORD_WEBHOOK }} - nodetail: true - color: "#FF0000" - title: "" - description: | - ### ❌ [${{ github.event.pull_request.title }}](${{ github.event.pull_request.html_url }}) - - @${{ github.actor }}, the build for bun-${{ inputs.tag }} failed. - - **[View logs](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }})** diff --git a/.github/workflows/build-linux.yml b/.github/workflows/build-linux.yml deleted file mode 100644 index c1bde9271c..0000000000 --- a/.github/workflows/build-linux.yml +++ /dev/null @@ -1,64 +0,0 @@ -name: Build Linux - -permissions: - contents: read - actions: write - -on: - workflow_call: - inputs: - runs-on: - type: string - required: true - tag: - type: string - required: true - arch: - type: string - required: true - cpu: - type: string - required: true - assertions: - type: boolean - zig-optimize: - type: string - canary: - type: boolean - no-cache: - type: boolean - -jobs: - build: - name: Build Linux - uses: ./.github/workflows/build-zig.yml - with: - os: linux - only-zig: false - runs-on: ${{ inputs.runs-on }} - tag: ${{ inputs.tag }} - arch: ${{ inputs.arch }} - cpu: ${{ inputs.cpu }} - assertions: ${{ inputs.assertions }} - zig-optimize: ${{ inputs.zig-optimize }} - canary: ${{ inputs.canary }} - no-cache: ${{ inputs.no-cache }} - on-failure: - if: ${{ github.repository_owner == 'oven-sh' && failure() }} - name: On Failure - needs: build - runs-on: ubuntu-latest - steps: - - name: Send Message - uses: sarisia/actions-status-discord@v1 - with: - webhook: ${{ secrets.DISCORD_WEBHOOK }} - nodetail: true - color: "#FF0000" - title: "" - description: | - ### ❌ [${{ github.event.pull_request.title }}](${{ github.event.pull_request.html_url }}) - - @${{ github.actor }}, the build for bun-${{ inputs.tag }} failed. - - **[View logs](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }})** diff --git a/.github/workflows/build-windows.yml b/.github/workflows/build-windows.yml deleted file mode 100644 index 3585a77f6b..0000000000 --- a/.github/workflows/build-windows.yml +++ /dev/null @@ -1,348 +0,0 @@ -name: Build Windows - -permissions: - contents: read - actions: write - -on: - workflow_call: - inputs: - runs-on: - type: string - default: windows - tag: - type: string - required: true - arch: - type: string - required: true - cpu: - type: string - required: true - assertions: - type: boolean - canary: - type: boolean - no-cache: - type: boolean - bun-version: - type: string - default: 1.1.7 - -env: - # Must specify exact version of LLVM for Windows - LLVM_VERSION: 18.1.8 - BUN_VERSION: ${{ inputs.bun-version }} - BUN_GARBAGE_COLLECTOR_LEVEL: 1 - BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING: 1 - CI: true - USE_LTO: 1 - -jobs: - build-submodules: - name: Build Submodules - runs-on: ${{ inputs.runs-on }} - steps: - - name: Install Scoop - run: | - Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser - Invoke-RestMethod -Uri https://get.scoop.sh | Invoke-Expression - Join-Path (Resolve-Path ~).Path "scoop\shims" >> $Env:GITHUB_PATH - - name: Setup Git - run: | - git config --global core.autocrlf false - git config --global core.eol lf - - name: Checkout - uses: actions/checkout@v4 - with: - sparse-checkout: | - .gitmodules - src/deps - scripts - - name: Hash Submodules - id: hash - run: | - $data = "$(& { - git submodule | Where-Object { $_ -notmatch 'WebKit' } - echo "LLVM_VERSION=${{ env.LLVM_VERSION }}" - Get-Content -Path (Get-ChildItem -Path 'scripts/build*.ps1', 'scripts/all-dependencies.ps1', 'scripts/env.ps1' | Sort-Object -Property Name).FullName | Out-String - echo 1 - })" - $hash = ( -join ((New-Object -TypeName System.Security.Cryptography.SHA1CryptoServiceProvider).ComputeHash([System.Text.Encoding]::UTF8.GetBytes($data)) | ForEach-Object { $_.ToString("x2") } )).Substring(0, 10) - echo "hash=${hash}" >> $env:GITHUB_OUTPUT - - if: ${{ !inputs.no-cache }} - name: Restore Cache - id: cache - uses: actions/cache/restore@v4 - with: - path: bun-deps - key: bun-${{ inputs.tag }}-deps-${{ steps.hash.outputs.hash }} - - if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }} - name: Install LLVM and Ninja - run: | - scoop install ninja - scoop install llvm@${{ env.LLVM_VERSION }} - scoop install nasm@2.16.01 - - if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }} - name: Clone Submodules - run: | - .\scripts\update-submodules.ps1 - - if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }} - name: Build Dependencies - env: - CPU_TARGET: ${{ inputs.cpu }} - CCACHE_DIR: ccache - USE_LTO: 1 - run: | - .\scripts\env.ps1 ${{ contains(inputs.tag, '-baseline') && '-Baseline' || '' }} - $env:BUN_DEPS_OUT_DIR = (mkdir -Force "./bun-deps") - .\scripts\all-dependencies.ps1 - - name: Save Cache - if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }} - uses: actions/cache/save@v4 - with: - path: bun-deps - key: ${{ steps.cache.outputs.cache-primary-key }} - - name: Upload bun-${{ inputs.tag }}-deps - uses: actions/upload-artifact@v4 - with: - name: bun-${{ inputs.tag }}-deps - path: bun-deps - if-no-files-found: error - codegen: - name: Codegen - runs-on: ubuntu-latest - steps: - - name: Setup Git - run: | - git config --global core.autocrlf false - git config --global core.eol lf - - name: Checkout - uses: actions/checkout@v4 - - name: Setup Bun - uses: ./.github/actions/setup-bun - with: - bun-version: ${{ inputs.bun-version }} - - name: Codegen - run: | - ./scripts/cross-compile-codegen.sh win32 x64 - - if: ${{ inputs.canary }} - name: Calculate Revision - run: | - echo "canary_revision=$(GITHUB_TOKEN="${{ github.token }}" - bash ./scripts/calculate-canary-revision.sh --raw)" > build-codegen-win32-x64/.canary_revision - - name: Upload bun-${{ inputs.tag }}-codegen - uses: actions/upload-artifact@v4 - with: - name: bun-${{ inputs.tag }}-codegen - path: build-codegen-win32-x64 - if-no-files-found: error - build-cpp: - name: Build C++ - needs: codegen - runs-on: ${{ inputs.runs-on }} - steps: - - name: Install Scoop - run: | - Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser - Invoke-RestMethod -Uri https://get.scoop.sh | Invoke-Expression - Join-Path (Resolve-Path ~).Path "scoop\shims" >> $Env:GITHUB_PATH - - name: Setup Git - run: | - git config --global core.autocrlf false - git config --global core.eol lf - - name: Checkout - uses: actions/checkout@v4 - with: - submodules: recursive - - name: Install LLVM and Ninja - run: | - scoop install ninja - scoop install llvm@${{ env.LLVM_VERSION }} - - name: Setup Bun - uses: ./.github/actions/setup-bun - with: - bun-version: ${{ inputs.bun-version }} - - if: ${{ !inputs.no-cache }} - name: Restore Cache - uses: actions/cache@v4 - with: - path: ccache - key: bun-${{ inputs.tag }}-cpp-${{ hashFiles('Dockerfile', 'Makefile', 'CMakeLists.txt', 'build.zig', 'scripts/**', 'src/**', 'packages/bun-usockets/src/**', 'packages/bun-uws/src/**') }} - restore-keys: | - bun-${{ inputs.tag }}-cpp- - - name: Download bun-${{ inputs.tag }}-codegen - uses: actions/download-artifact@v4 - with: - name: bun-${{ inputs.tag }}-codegen - path: build - - name: Compile - env: - CPU_TARGET: ${{ inputs.cpu }} - CCACHE_DIR: ccache - USE_LTO: 1 - run: | - # $CANARY_REVISION = if (Test-Path build/.canary_revision) { Get-Content build/.canary_revision } else { "0" } - $CANARY_REVISION = 0 - .\scripts\env.ps1 ${{ contains(inputs.tag, '-baseline') && '-Baseline' || '' }} - .\scripts\update-submodules.ps1 - .\scripts\build-libuv.ps1 -CloneOnly $True - cd build - cmake .. -G Ninja -DCMAKE_BUILD_TYPE=Release ` - -DNO_CODEGEN=1 ` - -DUSE_LTO=1 ` - -DNO_CONFIGURE_DEPENDS=1 ` - "-DCANARY=${CANARY_REVISION}" ` - -DBUN_CPP_ONLY=1 ${{ contains(inputs.tag, '-baseline') && '-DUSE_BASELINE_BUILD=1' || '' }} - if ($LASTEXITCODE -ne 0) { throw "CMake configuration failed" } - .\compile-cpp-only.ps1 -v - if ($LASTEXITCODE -ne 0) { throw "C++ compilation failed" } - - name: Upload bun-${{ inputs.tag }}-cpp - uses: actions/upload-artifact@v4 - with: - name: bun-${{ inputs.tag }}-cpp - path: build/bun-cpp-objects.a - if-no-files-found: error - build-zig: - name: Build Zig - uses: ./.github/workflows/build-zig.yml - with: - os: windows - zig-optimize: ReleaseSafe - only-zig: true - tag: ${{ inputs.tag }} - arch: ${{ inputs.arch }} - cpu: ${{ inputs.cpu }} - assertions: ${{ inputs.assertions }} - canary: ${{ inputs.canary }} - no-cache: ${{ inputs.no-cache }} - link: - name: Link - runs-on: ${{ inputs.runs-on }} - needs: - - build-submodules - - build-cpp - - build-zig - - codegen - steps: - - name: Install Scoop - run: | - Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser - Invoke-RestMethod -Uri https://get.scoop.sh | Invoke-Expression - Join-Path (Resolve-Path ~).Path "scoop\shims" >> $Env:GITHUB_PATH - - name: Setup Git - run: | - git config --global core.autocrlf false - git config --global core.eol lf - - name: Checkout - uses: actions/checkout@v4 - with: - submodules: recursive - - name: Install Ninja - run: | - scoop install ninja - scoop install llvm@${{ env.LLVM_VERSION }} - - name: Setup Bun - uses: ./.github/actions/setup-bun - with: - bun-version: ${{ inputs.bun-version }} - - name: Download bun-${{ inputs.tag }}-deps - uses: actions/download-artifact@v4 - with: - name: bun-${{ inputs.tag }}-deps - path: bun-deps - - name: Download bun-${{ inputs.tag }}-cpp - uses: actions/download-artifact@v4 - with: - name: bun-${{ inputs.tag }}-cpp - path: bun-cpp - - name: Download bun-${{ inputs.tag }}-zig - uses: actions/download-artifact@v4 - with: - name: bun-${{ inputs.tag }}-zig - path: bun-zig - - name: Download bun-${{ inputs.tag }}-codegen - uses: actions/download-artifact@v4 - with: - name: bun-${{ inputs.tag }}-codegen - path: build - - if: ${{ !inputs.no-cache }} - name: Restore Cache - uses: actions/cache@v4 - with: - path: ccache - key: bun-${{ inputs.tag }}-cpp-${{ hashFiles('Dockerfile', 'Makefile', 'CMakeLists.txt', 'build.zig', 'scripts/**', 'src/**', 'packages/bun-usockets/src/**', 'packages/bun-uws/src/**') }} - restore-keys: | - bun-${{ inputs.tag }}-cpp- - - name: Link - env: - CPU_TARGET: ${{ inputs.cpu }} - CCACHE_DIR: ccache - run: | - .\scripts\update-submodules.ps1 - .\scripts\env.ps1 ${{ contains(inputs.tag, '-baseline') && '-Baseline' || '' }} - Set-Location build - # $CANARY_REVISION = if (Test-Path build/.canary_revision) { Get-Content build/.canary_revision } else { "0" } - $CANARY_REVISION = 0 - cmake .. -G Ninja -DCMAKE_BUILD_TYPE=Release ` - -DNO_CODEGEN=1 ` - -DNO_CONFIGURE_DEPENDS=1 ` - "-DCANARY=${CANARY_REVISION}" ` - -DBUN_LINK_ONLY=1 ` - -DUSE_LTO=1 ` - "-DBUN_DEPS_OUT_DIR=$(Resolve-Path ../bun-deps)" ` - "-DBUN_CPP_ARCHIVE=$(Resolve-Path ../bun-cpp/bun-cpp-objects.a)" ` - "-DBUN_ZIG_OBJ_DIR=$(Resolve-Path ../bun-zig)" ` - ${{ contains(inputs.tag, '-baseline') && '-DUSE_BASELINE_BUILD=1' || '' }} - if ($LASTEXITCODE -ne 0) { throw "CMake configuration failed" } - ninja -v - if ($LASTEXITCODE -ne 0) { throw "Link failed!" } - - name: Prepare - run: | - $Dist = mkdir -Force "bun-${{ inputs.tag }}" - cp -r build\bun.exe "$Dist\bun.exe" - Compress-Archive -Force "$Dist" "${Dist}.zip" - $Dist = "$Dist-profile" - MkDir -Force "$Dist" - cp -r build\bun.exe "$Dist\bun.exe" - cp -r build\bun.pdb "$Dist\bun.pdb" - Compress-Archive -Force "$Dist" "$Dist.zip" - .\build\bun.exe --print "JSON.stringify(require('bun:internal-for-testing').crash_handler.getFeatureData())" > .\features.json - - name: Upload bun-${{ inputs.tag }} - uses: actions/upload-artifact@v4 - with: - name: bun-${{ inputs.tag }} - path: bun-${{ inputs.tag }}.zip - if-no-files-found: error - - name: Upload bun-${{ inputs.tag }}-profile - uses: actions/upload-artifact@v4 - with: - name: bun-${{ inputs.tag }}-profile - path: bun-${{ inputs.tag }}-profile.zip - if-no-files-found: error - - name: Upload bun-feature-data - uses: actions/upload-artifact@v4 - with: - name: bun-feature-data - path: features.json - if-no-files-found: error - overwrite: true - on-failure: - if: ${{ github.repository_owner == 'oven-sh' && failure() }} - name: On Failure - needs: link - runs-on: ubuntu-latest - steps: - - name: Send Message - uses: sarisia/actions-status-discord@v1 - with: - webhook: ${{ secrets.DISCORD_WEBHOOK }} - nodetail: true - color: "#FF0000" - title: "" - description: | - ### ❌ [${{ github.event.pull_request.title }}](${{ github.event.pull_request.html_url }}) - - @${{ github.actor }}, the build for bun-${{ inputs.tag }} failed. - - **[View logs](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }})** diff --git a/.github/workflows/build-zig.yml b/.github/workflows/build-zig.yml deleted file mode 100644 index 097fe082e7..0000000000 --- a/.github/workflows/build-zig.yml +++ /dev/null @@ -1,122 +0,0 @@ -name: Build Zig - -permissions: - contents: read - actions: write - -on: - workflow_call: - inputs: - runs-on: - type: string - default: ${{ github.repository_owner != 'oven-sh' && 'ubuntu-latest' || inputs.only-zig && 'namespace-profile-bun-ci-linux-x64' || inputs.arch == 'x64' && 'namespace-profile-bun-ci-linux-x64' || 'namespace-profile-bun-ci-linux-aarch64' }} - tag: - type: string - required: true - os: - type: string - required: true - arch: - type: string - required: true - cpu: - type: string - required: true - assertions: - type: boolean - default: false - zig-optimize: - type: string # 'ReleaseSafe' or 'ReleaseFast' - default: ReleaseFast - canary: - type: boolean - default: ${{ github.ref == 'refs/heads/main' }} - only-zig: - type: boolean - default: true - no-cache: - type: boolean - default: false - -jobs: - build-zig: - name: ${{ inputs.only-zig && 'Build Zig' || 'Build & Link' }} - runs-on: ${{ inputs.runs-on }} - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Calculate Cache Key - id: cache - run: | - echo "key=${{ hashFiles('Dockerfile', 'Makefile', 'CMakeLists.txt', 'build.zig', 'scripts/**', 'src/**', 'packages/bun-usockets/src/**', 'packages/bun-uws/src/**') }}" >> $GITHUB_OUTPUT - - if: ${{ !inputs.no-cache }} - name: Restore Cache - uses: actions/cache@v4 - with: - key: bun-${{ inputs.tag }}-docker-${{ steps.cache.outputs.key }} - restore-keys: | - bun-${{ inputs.tag }}-docker- - path: | - ${{ runner.temp }}/dockercache - - name: Setup Docker - uses: docker/setup-buildx-action@v3 - with: - install: true - platforms: | - linux/${{ runner.arch == 'X64' && 'amd64' || 'arm64' }} - - name: Build - uses: docker/build-push-action@v5 - with: - push: false - target: ${{ inputs.only-zig && 'build_release_obj' || 'artifact' }} - cache-from: | - type=local,src=${{ runner.temp }}/dockercache - cache-to: | - type=local,dest=${{ runner.temp }}/dockercache,mode=max - outputs: | - type=local,dest=${{ runner.temp }}/release - platforms: | - linux/${{ runner.arch == 'X64' && 'amd64' || 'arm64' }} - build-args: | - GIT_SHA=${{ github.event.workflow_run.head_sha || github.sha }} - TRIPLET=${{ inputs.os == 'darwin' && format('{0}-macos-none', inputs.arch == 'x64' && 'x86_64' || 'aarch64') || inputs.os == 'windows' && format('{0}-windows-msvc', inputs.arch == 'x64' && 'x86_64' || 'aarch64') || format('{0}-linux-gnu', inputs.arch == 'x64' && 'x86_64' || 'aarch64') }} - ARCH=${{ inputs.arch == 'x64' && 'x86_64' || 'aarch64' }} - BUILDARCH=${{ inputs.arch == 'x64' && 'amd64' || 'arm64' }} - BUILD_MACHINE_ARCH=${{ inputs.arch == 'x64' && 'x86_64' || 'aarch64' }} - CPU_TARGET=${{ inputs.arch == 'x64' && inputs.cpu || 'native' }} - ASSERTIONS=${{ inputs.assertions && 'ON' || 'OFF' }} - ZIG_OPTIMIZE=${{ inputs.zig-optimize }} - CANARY=${{ inputs.canary && '1' || '0' }} - - if: ${{ inputs.only-zig }} - name: Upload bun-${{ inputs.tag }}-zig - uses: actions/upload-artifact@v4 - with: - name: bun-${{ inputs.tag }}-zig - path: ${{ runner.temp }}/release/bun-zig.o - if-no-files-found: error - - if: ${{ !inputs.only-zig }} - name: Prepare - run: | - cd ${{ runner.temp }}/release - chmod +x bun-profile bun - mkdir bun-${{ inputs.tag }}-profile - mkdir bun-${{ inputs.tag }} - strip bun - mv bun-profile bun-${{ inputs.tag }}-profile/bun-profile - mv bun bun-${{ inputs.tag }}/bun - zip -r bun-${{ inputs.tag }}-profile.zip bun-${{ inputs.tag }}-profile - zip -r bun-${{ inputs.tag }}.zip bun-${{ inputs.tag }} - - if: ${{ !inputs.only-zig }} - name: Upload bun-${{ inputs.tag }} - uses: actions/upload-artifact@v4 - with: - name: bun-${{ inputs.tag }} - path: ${{ runner.temp }}/release/bun-${{ inputs.tag }}.zip - if-no-files-found: error - - if: ${{ !inputs.only-zig }} - name: Upload bun-${{ inputs.tag }}-profile - uses: actions/upload-artifact@v4 - with: - name: bun-${{ inputs.tag }}-profile - path: ${{ runner.temp }}/release/bun-${{ inputs.tag }}-profile.zip - if-no-files-found: error diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml deleted file mode 100644 index e7acf37682..0000000000 --- a/.github/workflows/ci.yml +++ /dev/null @@ -1,245 +0,0 @@ -name: CI - -permissions: - contents: read - actions: write - -concurrency: - group: ${{ github.workflow }}-${{ github.event_name == 'workflow_dispatch' && inputs.run-id || github.ref }} - cancel-in-progress: true - -on: - workflow_dispatch: - inputs: - run-id: - type: string - description: The workflow ID to download artifacts (skips the build step) - pull_request: - paths-ignore: - - .vscode/**/* - - docs/**/* - - examples/**/* - push: - branches: - - main - paths-ignore: - - .vscode/**/* - - docs/**/* - - examples/**/* - -jobs: - format: - if: ${{ !inputs.run-id }} - name: Format - uses: ./.github/workflows/run-format.yml - secrets: inherit - with: - zig-version: 0.13.0 - permissions: - contents: write - lint: - if: ${{ !inputs.run-id }} - name: Lint - uses: ./.github/workflows/run-lint.yml - secrets: inherit - linux-x64: - if: ${{ !inputs.run-id }} - name: Build linux-x64 - uses: ./.github/workflows/build-linux.yml - secrets: inherit - with: - runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-linux-x64' || 'ubuntu-latest' }} - tag: linux-x64 - arch: x64 - cpu: haswell - canary: true - no-cache: true - linux-x64-baseline: - if: ${{ !inputs.run-id }} - name: Build linux-x64-baseline - uses: ./.github/workflows/build-linux.yml - secrets: inherit - with: - runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-linux-x64' || 'ubuntu-latest' }} - tag: linux-x64-baseline - arch: x64 - cpu: nehalem - canary: true - no-cache: true - linux-aarch64: - if: ${{ !inputs.run-id && github.repository_owner == 'oven-sh' }} - name: Build linux-aarch64 - uses: ./.github/workflows/build-linux.yml - secrets: inherit - with: - runs-on: namespace-profile-bun-ci-linux-aarch64 - tag: linux-aarch64 - arch: aarch64 - cpu: native - canary: true - no-cache: true - darwin-x64: - if: ${{ !inputs.run-id }} - name: Build darwin-x64 - uses: ./.github/workflows/build-darwin.yml - secrets: inherit - with: - runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-12-large' || 'macos-12' }} - tag: darwin-x64 - arch: x64 - cpu: haswell - canary: true - darwin-x64-baseline: - if: ${{ !inputs.run-id }} - name: Build darwin-x64-baseline - uses: ./.github/workflows/build-darwin.yml - secrets: inherit - with: - runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-12-large' || 'macos-12' }} - tag: darwin-x64-baseline - arch: x64 - cpu: nehalem - canary: true - darwin-aarch64: - if: ${{ !inputs.run-id }} - name: Build darwin-aarch64 - uses: ./.github/workflows/build-darwin.yml - secrets: inherit - with: - runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-darwin-aarch64' || 'macos-12' }} - tag: darwin-aarch64 - arch: aarch64 - cpu: native - canary: true - windows-x64: - if: ${{ !inputs.run-id }} - name: Build windows-x64 - uses: ./.github/workflows/build-windows.yml - secrets: inherit - with: - runs-on: windows - tag: windows-x64 - arch: x64 - cpu: haswell - canary: true - windows-x64-baseline: - if: ${{ !inputs.run-id }} - name: Build windows-x64-baseline - uses: ./.github/workflows/build-windows.yml - secrets: inherit - with: - runs-on: windows - tag: windows-x64-baseline - arch: x64 - cpu: nehalem - canary: true - linux-x64-test: - if: ${{ inputs.run-id || github.event_name == 'pull_request' }} - name: Test linux-x64 - needs: linux-x64 - uses: ./.github/workflows/run-test.yml - secrets: inherit - with: - run-id: ${{ inputs.run-id }} - pr-number: ${{ github.event.number }} - runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-linux-x64' || 'ubuntu-latest' }} - tag: linux-x64 - linux-x64-baseline-test: - if: ${{ inputs.run-id || github.event_name == 'pull_request' }} - name: Test linux-x64-baseline - needs: linux-x64-baseline - uses: ./.github/workflows/run-test.yml - secrets: inherit - with: - run-id: ${{ inputs.run-id }} - pr-number: ${{ github.event.number }} - runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-linux-x64' || 'ubuntu-latest' }} - tag: linux-x64-baseline - linux-aarch64-test: - if: ${{ inputs.run-id || github.event_name == 'pull_request' && github.repository_owner == 'oven-sh'}} - name: Test linux-aarch64 - needs: linux-aarch64 - uses: ./.github/workflows/run-test.yml - secrets: inherit - with: - run-id: ${{ inputs.run-id }} - pr-number: ${{ github.event.number }} - runs-on: namespace-profile-bun-ci-linux-aarch64 - tag: linux-aarch64 - darwin-x64-test: - if: ${{ inputs.run-id || github.event_name == 'pull_request' }} - name: Test darwin-x64 - needs: darwin-x64 - uses: ./.github/workflows/run-test.yml - secrets: inherit - with: - run-id: ${{ inputs.run-id }} - pr-number: ${{ github.event.number }} - runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-12-large' || 'macos-12' }} - tag: darwin-x64 - darwin-x64-baseline-test: - if: ${{ inputs.run-id || github.event_name == 'pull_request' }} - name: Test darwin-x64-baseline - needs: darwin-x64-baseline - uses: ./.github/workflows/run-test.yml - secrets: inherit - with: - run-id: ${{ inputs.run-id }} - pr-number: ${{ github.event.number }} - runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-12-large' || 'macos-12' }} - tag: darwin-x64-baseline - darwin-aarch64-test: - if: ${{ inputs.run-id || github.event_name == 'pull_request' }} - name: Test darwin-aarch64 - needs: darwin-aarch64 - uses: ./.github/workflows/run-test.yml - secrets: inherit - with: - run-id: ${{ inputs.run-id }} - pr-number: ${{ github.event.number }} - runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-darwin-aarch64' || 'macos-12' }} - tag: darwin-aarch64 - windows-x64-test: - if: ${{ inputs.run-id || github.event_name == 'pull_request' }} - name: Test windows-x64 - needs: windows-x64 - uses: ./.github/workflows/run-test.yml - secrets: inherit - with: - run-id: ${{ inputs.run-id }} - pr-number: ${{ github.event.number }} - runs-on: windows - tag: windows-x64 - windows-x64-baseline-test: - if: ${{ inputs.run-id || github.event_name == 'pull_request' }} - name: Test windows-x64-baseline - needs: windows-x64-baseline - uses: ./.github/workflows/run-test.yml - secrets: inherit - with: - run-id: ${{ inputs.run-id }} - pr-number: ${{ github.event.number }} - runs-on: windows - tag: windows-x64-baseline - cleanup: - if: ${{ always() }} - name: Cleanup - needs: - - linux-x64 - - linux-x64-baseline - - linux-aarch64 - - darwin-x64 - - darwin-x64-baseline - - darwin-aarch64 - - windows-x64 - - windows-x64-baseline - runs-on: ubuntu-latest - steps: - - name: Cleanup Artifacts - uses: geekyeggo/delete-artifact@v5 - with: - name: | - bun-*-cpp - bun-*-zig - bun-*-deps - bun-*-codegen diff --git a/.github/workflows/comment.yml b/.github/workflows/comment.yml deleted file mode 100644 index 3c798e8fcc..0000000000 --- a/.github/workflows/comment.yml +++ /dev/null @@ -1,55 +0,0 @@ -name: Comment - -permissions: - actions: read - pull-requests: write - -on: - workflow_run: - workflows: - - CI - types: - - completed - -jobs: - comment: - if: ${{ github.repository_owner == 'oven-sh' }} - name: Comment - runs-on: ubuntu-latest - steps: - - name: Download Tests - uses: actions/download-artifact@v4 - with: - path: bun - pattern: bun-*-tests - github-token: ${{ github.token }} - run-id: ${{ github.event.workflow_run.id }} - - name: Setup Environment - id: env - shell: bash - run: | - echo "pr-number=$(> $GITHUB_OUTPUT - - name: Generate Comment - run: | - cat bun/bun-*-tests/comment.md > comment.md - if [ -s comment.md ]; then - echo -e "❌ @${{ github.actor }}, your commit has failing tests :(\n\n$(cat comment.md)" > comment.md - else - echo -e "✅ @${{ github.actor }}, all tests passed!" > comment.md - fi - echo -e "\n**[View logs](https://github.com/${{ github.repository }}/actions/runs/${{ github.event.workflow_run.id }})**" >> comment.md - echo -e "" >> comment.md - - name: Find Comment - id: comment - uses: peter-evans/find-comment@v3 - with: - issue-number: ${{ steps.env.outputs.pr-number }} - comment-author: github-actions[bot] - body-includes: - - name: Write Comment - uses: peter-evans/create-or-update-comment@v4 - with: - comment-id: ${{ steps.comment.outputs.comment-id }} - issue-number: ${{ steps.env.outputs.pr-number }} - body-path: comment.md - edit-mode: replace diff --git a/.github/workflows/create-release-build.yml b/.github/workflows/create-release-build.yml deleted file mode 100644 index e9aa5796fe..0000000000 --- a/.github/workflows/create-release-build.yml +++ /dev/null @@ -1,183 +0,0 @@ -name: Create Release Build -run-name: Compile Bun v${{ inputs.version }} by ${{ github.actor }} - -concurrency: - group: release - cancel-in-progress: true - -permissions: - contents: write - actions: write - -on: - workflow_dispatch: - inputs: - version: - type: string - required: true - description: "Release version. Example: 1.1.4. Exclude the 'v' prefix." - tag: - type: string - required: true - description: "GitHub tag to use" - clobber: - type: boolean - required: false - default: false - description: "Overwrite existing release artifacts?" - release: - types: - - created - -jobs: - notify-start: - if: ${{ github.repository_owner == 'oven-sh' }} - name: Notify Start - runs-on: ubuntu-latest - steps: - - name: Send Message - uses: sarisia/actions-status-discord@v1 - with: - webhook: ${{ secrets.DISCORD_WEBHOOK_PUBLIC }} - nodetail: true - color: "#1F6FEB" - title: "Bun v${{ inputs.version }} is compiling" - description: | - ### @${{ github.actor }} started compiling Bun v${{inputs.version}} - - name: Send Message - uses: sarisia/actions-status-discord@v1 - with: - webhook: ${{ secrets.BUN_DISCORD_GITHUB_CHANNEL_WEBHOOK }} - nodetail: true - color: "#1F6FEB" - title: "Bun v${{ inputs.version }} is compiling" - description: | - ### @${{ github.actor }} started compiling Bun v${{inputs.version}} - - **[View logs](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }})** - linux-x64: - name: Build linux-x64 - uses: ./.github/workflows/build-linux.yml - secrets: inherit - with: - runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-linux-x64' || 'ubuntu-latest' }} - tag: linux-x64 - arch: x64 - cpu: haswell - canary: false - linux-x64-baseline: - name: Build linux-x64-baseline - uses: ./.github/workflows/build-linux.yml - secrets: inherit - with: - runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-linux-x64' || 'ubuntu-latest' }} - tag: linux-x64-baseline - arch: x64 - cpu: nehalem - canary: false - linux-aarch64: - name: Build linux-aarch64 - uses: ./.github/workflows/build-linux.yml - secrets: inherit - with: - runs-on: namespace-profile-bun-ci-linux-aarch64 - tag: linux-aarch64 - arch: aarch64 - cpu: native - canary: false - darwin-x64: - name: Build darwin-x64 - uses: ./.github/workflows/build-darwin.yml - secrets: inherit - with: - runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-12-large' || 'macos-12' }} - tag: darwin-x64 - arch: x64 - cpu: haswell - canary: false - darwin-x64-baseline: - name: Build darwin-x64-baseline - uses: ./.github/workflows/build-darwin.yml - secrets: inherit - with: - runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-12-large' || 'macos-12' }} - tag: darwin-x64-baseline - arch: x64 - cpu: nehalem - canary: false - darwin-aarch64: - name: Build darwin-aarch64 - uses: ./.github/workflows/build-darwin.yml - secrets: inherit - with: - runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-darwin-aarch64' || 'macos-12' }} - tag: darwin-aarch64 - arch: aarch64 - cpu: native - canary: false - windows-x64: - name: Build windows-x64 - uses: ./.github/workflows/build-windows.yml - secrets: inherit - with: - runs-on: windows - tag: windows-x64 - arch: x64 - cpu: haswell - canary: false - windows-x64-baseline: - name: Build windows-x64-baseline - uses: ./.github/workflows/build-windows.yml - secrets: inherit - with: - runs-on: windows - tag: windows-x64-baseline - arch: x64 - cpu: nehalem - canary: false - - upload-artifacts: - needs: - - linux-x64 - - linux-x64-baseline - - linux-aarch64 - - darwin-x64 - - darwin-x64-baseline - - darwin-aarch64 - - windows-x64 - - windows-x64-baseline - runs-on: ubuntu-latest - steps: - - name: Download Artifacts - uses: actions/download-artifact@v4 - with: - path: bun-releases - pattern: bun-* - merge-multiple: true - github-token: ${{ github.token }} - - name: Check for Artifacts - run: | - if [ ! -d "bun-releases" ] || [ -z "$(ls -A bun-releases)" ]; then - echo "Error: No artifacts were downloaded or 'bun-releases' directory does not exist." - exit 1 # Fail the job if the condition is met - else - echo "Artifacts downloaded successfully." - fi - - name: Send Message - uses: sarisia/actions-status-discord@v1 - with: - webhook: ${{ secrets.DISCORD_WEBHOOK }} - nodetail: true - color: "#FF0000" - title: "Bun v${{ inputs.version }} release artifacts uploaded" - - name: "Upload Artifacts" - env: - GH_TOKEN: ${{ github.token }} - run: | - # Unzip one level deep each artifact - cd bun-releases - for f in *.zip; do - unzip -o $f - done - cd .. - gh release upload --repo=${{ github.repository }} ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.tag || github.event.release.id }} ${{ inputs.clobber && '--clobber' || '' }} bun-releases/*.zip diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 2e0e90fac1..cbe6b3e93a 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -1,3 +1,6 @@ +# TODO: Move this to bash scripts intead of Github Actions +# so it can be run from Buildkite, see: .buildkite/scripts/release.sh + name: Release concurrency: release @@ -63,7 +66,7 @@ jobs: - name: Setup Bun uses: ./.github/actions/setup-bun with: - bun-version: "1.0.21" + bun-version: "1.1.20" - name: Install Dependencies run: bun install - name: Sign Release @@ -88,7 +91,7 @@ jobs: - name: Setup Bun uses: ./.github/actions/setup-bun with: - bun-version: "1.0.21" + bun-version: "1.1.20" - name: Install Dependencies run: bun install - name: Release @@ -117,7 +120,7 @@ jobs: if: ${{ env.BUN_VERSION != 'canary' }} uses: ./.github/actions/setup-bun with: - bun-version: "1.0.21" + bun-version: "1.1.20" - name: Setup Bun if: ${{ env.BUN_VERSION == 'canary' }} uses: ./.github/actions/setup-bun @@ -259,7 +262,7 @@ jobs: - name: Setup Bun uses: ./.github/actions/setup-bun with: - bun-version: "1.0.21" + bun-version: "1.1.20" - name: Install Dependencies run: bun install - name: Release @@ -270,6 +273,24 @@ jobs: AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY}} AWS_ENDPOINT: ${{ secrets.AWS_ENDPOINT }} AWS_BUCKET: bun + + notify-sentry: + name: Notify Sentry + runs-on: ubuntu-latest + needs: s3 + steps: + - name: Notify Sentry + uses: getsentry/action-release@v1.7.0 + env: + SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }} + SENTRY_ORG: ${{ secrets.SENTRY_ORG }} + SENTRY_PROJECT: ${{ secrets.SENTRY_PROJECT }} + with: + ignore_missing: true + ignore_empty: true + version: ${{ env.BUN_VERSION }} + environment: production + bump: name: "Bump version" runs-on: ubuntu-latest diff --git a/.github/workflows/run-format.yml b/.github/workflows/run-format.yml index 2a947cc5a2..4d03ce22e8 100644 --- a/.github/workflows/run-format.yml +++ b/.github/workflows/run-format.yml @@ -29,9 +29,9 @@ jobs: - name: Setup Bun uses: ./.github/actions/setup-bun with: - bun-version: "1.1.18" + bun-version: "1.1.20" - name: Setup Zig - uses: goto-bus-stop/setup-zig@c7b6cdd3adba8f8b96984640ff172c37c93f73ee + uses: mlugg/setup-zig@v1 with: version: ${{ inputs.zig-version }} - name: Install Dependencies diff --git a/.github/workflows/run-lint-cpp.yml b/.github/workflows/run-lint-cpp.yml index 12abea144b..7aae7eaacb 100644 --- a/.github/workflows/run-lint-cpp.yml +++ b/.github/workflows/run-lint-cpp.yml @@ -17,7 +17,7 @@ on: jobs: lint-cpp: name: Lint C++ - runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-13-xlarge' || 'macos-12' }} + runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-13-xlarge' || 'macos-13' }} steps: - name: Checkout uses: actions/checkout@v4 diff --git a/.github/workflows/run-test.yml b/.github/workflows/run-test.yml deleted file mode 100644 index 6efe322a54..0000000000 --- a/.github/workflows/run-test.yml +++ /dev/null @@ -1,224 +0,0 @@ -name: Test - -permissions: - contents: read - actions: read - -on: - workflow_call: - inputs: - runs-on: - type: string - required: true - tag: - type: string - required: true - pr-number: - type: string - required: true - run-id: - type: string - default: ${{ github.run_id }} - -jobs: - test: - name: Tests - runs-on: ${{ inputs.runs-on }} - steps: - - if: ${{ runner.os == 'Windows' }} - name: Setup Git - run: | - git config --global core.autocrlf false - git config --global core.eol lf - - name: Checkout - uses: actions/checkout@v4 - with: - sparse-checkout: | - package.json - bun.lockb - test - packages/bun-internal-test - packages/bun-types - - name: Setup Environment - shell: bash - run: | - echo "${{ inputs.pr-number }}" > pr-number.txt - - name: Download Bun - uses: actions/download-artifact@v4 - with: - name: bun-${{ inputs.tag }} - path: bun - github-token: ${{ github.token }} - run-id: ${{ inputs.run-id || github.run_id }} - - name: Download pnpm - uses: pnpm/action-setup@v4 - with: - version: 8 - - if: ${{ runner.os != 'Windows' }} - name: Setup Bun - shell: bash - run: | - unzip bun/bun-*.zip - cd bun-* - pwd >> $GITHUB_PATH - - if: ${{ runner.os == 'Windows' }} - name: Setup Cygwin - uses: secondlife/setup-cygwin@v3 - with: - packages: bash - - if: ${{ runner.os == 'Windows' }} - name: Setup Bun (Windows) - run: | - unzip bun/bun-*.zip - cd bun-* - pwd >> $env:GITHUB_PATH - - name: Setup Node.js - uses: actions/setup-node@v4 - with: - node-version: 20 - - name: Install Dependencies - timeout-minutes: 5 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: | - bun install - - name: Install Dependencies (test) - timeout-minutes: 5 - run: | - bun install --cwd test - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: Install Dependencies (runner) - timeout-minutes: 5 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: | - bun install --cwd packages/bun-internal-test - - name: Run Tests - id: test - timeout-minutes: 90 - shell: bash - env: - IS_BUN_CI: 1 - TMPDIR: ${{ runner.temp }} - BUN_TAG: ${{ inputs.tag }} - BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING: "true" - SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }} - TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }} - TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }} - TEST_INFO_STRIPE: ${{ secrets.TEST_INFO_STRIPE }} - TEST_INFO_AZURE_SERVICE_BUS: ${{ secrets.TEST_INFO_AZURE_SERVICE_BUS }} - SHELLOPTS: igncr - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: | - node packages/bun-internal-test/src/runner.node.mjs $(which bun) - - if: ${{ always() }} - name: Upload Results - uses: actions/upload-artifact@v4 - with: - name: bun-${{ inputs.tag }}-tests - path: | - test-report.* - comment.md - pr-number.txt - if-no-files-found: error - overwrite: true - - if: ${{ always() && steps.test.outputs.failing_tests != '' && github.event.pull_request && github.repository_owner == 'oven-sh' }} - name: Send Message - uses: sarisia/actions-status-discord@v1 - with: - webhook: ${{ secrets.DISCORD_WEBHOOK }} - nodetail: true - color: "#FF0000" - title: "" - description: | - ### ❌ [${{ github.event.pull_request.title }}](${{ github.event.pull_request.html_url }}) - - @${{ github.actor }}, there are ${{ steps.test.outputs.failing_tests_count || 'some' }} failing tests on bun-${{ inputs.tag }}. - - ${{ steps.test.outputs.failing_tests }} - - **[View logs](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }})** - - name: Fail - if: ${{ failure() || always() && steps.test.outputs.failing_tests != '' }} - run: | - echo "There are ${{ steps.test.outputs.failing_tests_count || 'some' }} failing tests on bun-${{ inputs.tag }}." - exit 1 - test-node: - name: Node.js Tests - # TODO: enable when we start paying attention to the results. In the meantime, this causes CI to queue jobs wasting developer time. - if: 0 - runs-on: ${{ inputs.runs-on }} - steps: - - if: ${{ runner.os == 'Windows' }} - name: Setup Git - run: | - git config --global core.autocrlf false - git config --global core.eol lf - - name: Checkout - uses: actions/checkout@v4 - with: - sparse-checkout: | - test/node.js - - name: Setup Environment - shell: bash - run: | - echo "${{ inputs.pr-number }}" > pr-number.txt - - name: Download Bun - uses: actions/download-artifact@v4 - with: - name: bun-${{ inputs.tag }} - path: bun - github-token: ${{ github.token }} - run-id: ${{ inputs.run-id || github.run_id }} - - if: ${{ runner.os != 'Windows' }} - name: Setup Bun - shell: bash - run: | - unzip bun/bun-*.zip - cd bun-* - pwd >> $GITHUB_PATH - - if: ${{ runner.os == 'Windows' }} - name: Setup Cygwin - uses: secondlife/setup-cygwin@v3 - with: - packages: bash - - if: ${{ runner.os == 'Windows' }} - name: Setup Bun (Windows) - run: | - unzip bun/bun-*.zip - cd bun-* - pwd >> $env:GITHUB_PATH - - name: Setup Node.js - uses: actions/setup-node@v4 - with: - node-version: 20 - - name: Checkout Tests - shell: bash - working-directory: test/node.js - run: | - node runner.mjs --pull - - name: Install Dependencies - timeout-minutes: 5 - shell: bash - working-directory: test/node.js - run: | - bun install - - name: Run Tests - timeout-minutes: 10 # Increase when more tests are added - shell: bash - working-directory: test/node.js - env: - TMPDIR: ${{ runner.temp }} - BUN_GARBAGE_COLLECTOR_LEVEL: "0" - BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING: "true" - run: | - node runner.mjs - - name: Upload Results - uses: actions/upload-artifact@v4 - with: - name: bun-${{ inputs.tag }}-node-tests - path: | - test/node.js/summary/*.json - if-no-files-found: error - overwrite: true diff --git a/.github/workflows/upload.yml b/.github/workflows/upload.yml deleted file mode 100644 index 232c449e1d..0000000000 --- a/.github/workflows/upload.yml +++ /dev/null @@ -1,82 +0,0 @@ -name: Upload Artifacts -run-name: Canary release ${{github.sha}} upload - -permissions: - contents: write - -on: - workflow_run: - workflows: - - CI - types: - - completed - branches: - - main - -jobs: - upload: - if: ${{ github.repository_owner == 'oven-sh' }} - name: Upload Artifacts - runs-on: ubuntu-latest - steps: - - name: Download Artifacts - uses: actions/download-artifact@v4 - with: - path: bun - pattern: bun-* - merge-multiple: true - github-token: ${{ github.token }} - run-id: ${{ github.event.workflow_run.id }} - - name: Check for Artifacts - run: | - if [ ! -d "bun" ] || [ -z "$(ls -A bun)" ]; then - echo "Error: No artifacts were downloaded or 'bun' directory does not exist." - exit 1 # Fail the job if the condition is met - else - echo "Artifacts downloaded successfully." - fi - - name: Upload to GitHub Releases - uses: ncipollo/release-action@v1 - with: - tag: canary - name: Canary (${{ github.sha }}) - prerelease: true - body: This canary release of Bun corresponds to the commit [${{ github.sha }}] - allowUpdates: true - replacesArtifacts: true - generateReleaseNotes: true - artifactErrorsFailBuild: true - artifacts: bun/**/bun-*.zip - token: ${{ github.token }} - - name: Upload to S3 (using SHA) - uses: shallwefootball/s3-upload-action@4350529f410221787ccf424e50133cbc1b52704e - with: - endpoint: ${{ secrets.AWS_ENDPOINT }} - aws_key_id: ${{ secrets.AWS_ACCESS_KEY_ID }} - aws_secret_access_key: ${{ secrets.AWS_SECRET_ACCESS_KEY}} - aws_bucket: ${{ secrets.AWS_BUCKET }} - source_dir: bun - destination_dir: releases/${{ github.event.workflow_run.head_sha || github.sha }}-canary - - name: Upload to S3 (using tag) - uses: shallwefootball/s3-upload-action@4350529f410221787ccf424e50133cbc1b52704e - with: - endpoint: ${{ secrets.AWS_ENDPOINT }} - aws_key_id: ${{ secrets.AWS_ACCESS_KEY_ID }} - aws_secret_access_key: ${{ secrets.AWS_SECRET_ACCESS_KEY}} - aws_bucket: ${{ secrets.AWS_BUCKET }} - source_dir: bun - destination_dir: releases/canary - - name: Announce on Discord - uses: sarisia/actions-status-discord@v1 - with: - webhook: ${{ secrets.BUN_DISCORD_GITHUB_CHANNEL_WEBHOOK }} - nodetail: true - color: "#1F6FEB" - title: "New Bun Canary available" - url: https://github.com/oven-sh/bun/commit/${{ github.sha }} - description: | - A new canary build of Bun has been automatically uploaded. To upgrade, run: - ```sh - bun upgrade --canary - # bun upgrade --stable <- to downgrade - ``` diff --git a/.gitignore b/.gitignore index 849c532b2b..a0c4419668 100644 --- a/.gitignore +++ b/.gitignore @@ -145,3 +145,4 @@ zig-cache zig-out test/node.js/upstream .zig-cache +scripts/env.local diff --git a/.gitmodules b/.gitmodules index 98845d5097..c5069240a4 100644 --- a/.gitmodules +++ b/.gitmodules @@ -82,3 +82,7 @@ url = https://github.com/oven-sh/zig depth = 1 shallow = true fetchRecurseSubmodules = false +[submodule "src/deps/libdeflate"] +path = src/deps/libdeflate +url = https://github.com/ebiggers/libdeflate +ignore = "dirty" diff --git a/.lldbinit b/.lldbinit new file mode 100644 index 0000000000..2d527f4e63 --- /dev/null +++ b/.lldbinit @@ -0,0 +1,2 @@ +command script import src/deps/zig/tools/lldb_pretty_printers.py +command script import src/bun.js/WebKit/Tools/lldb/lldb_webkit.py diff --git a/.vscode/launch.json b/.vscode/launch.json index 625ed0cf8d..5326a6f15f 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -17,6 +17,7 @@ "cwd": "${workspaceFolder}/test", "env": { "FORCE_COLOR": "1", + "BUN_DEBUG_QUIET_LOGS": "1", "BUN_GARBAGE_COLLECTOR_LEVEL": "1", }, "console": "internalConsole", @@ -145,14 +146,13 @@ "request": "launch", "name": "bun run [file]", "program": "${workspaceFolder}/build/bun-debug", - "args": ["run", "${file}"], + "args": ["run", "${fileBasename}"], "cwd": "${fileDirname}", "env": { "FORCE_COLOR": "0", "BUN_DEBUG_QUIET_LOGS": "1", "BUN_DEBUG_EventLoop": "1", "BUN_GARBAGE_COLLECTOR_LEVEL": "2", - "BUN_DEBUG_ALL": "1", }, "console": "internalConsole", }, diff --git a/.vscode/settings.json b/.vscode/settings.json index 91939e9e58..1701cb55df 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -42,8 +42,11 @@ "editor.defaultFormatter": "ziglang.vscode-zig", }, - // C++ + // lldb + "lldb.launch.initCommands": ["command source ${workspaceFolder}/.lldbinit"], "lldb.verboseLogging": false, + + // C++ "cmake.configureOnOpen": false, "C_Cpp.errorSquiggles": "enabled", "[cpp]": { diff --git a/CMakeLists.txt b/CMakeLists.txt index 35d7367489..f85c2c72a3 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -3,8 +3,8 @@ cmake_policy(SET CMP0091 NEW) cmake_policy(SET CMP0067 NEW) set(CMAKE_POLICY_DEFAULT_CMP0069 NEW) -set(Bun_VERSION "1.1.20") -set(WEBKIT_TAG b49be549da59347762aa83f849a65158d2a0d724) +set(Bun_VERSION "1.1.22") +set(WEBKIT_TAG f9a0fda2d2b2fd001a00bfcf8e7917a56b382516) set(BUN_WORKDIR "${CMAKE_CURRENT_BINARY_DIR}") message(STATUS "Configuring Bun ${Bun_VERSION} in ${BUN_WORKDIR}") @@ -22,6 +22,7 @@ set(REPORTED_NODEJS_VERSION "22.3.0") # If we do not set this, it will crash at startup on the first memory allocation. if(NOT WIN32 AND NOT APPLE) set(CMAKE_CXX_EXTENSIONS ON) + set(CMAKE_POSITION_INDEPENDENT_CODE FALSE) endif() # --- Build Type --- @@ -71,7 +72,7 @@ endif() # --- MacOS SDK --- if(APPLE AND DEFINED ENV{CI}) - set(CMAKE_OSX_DEPLOYMENT_TARGET "12.0") + set(CMAKE_OSX_DEPLOYMENT_TARGET "13.0") endif() if(APPLE AND NOT CMAKE_OSX_DEPLOYMENT_TARGET) @@ -115,7 +116,7 @@ endif() # we do some extra work afterwards to double-check, and we will rerun BUN_FIND_LLVM if the compiler did not match. # # If the user passes -DLLVM_PREFIX, most of this logic is skipped, but we still warn if invalid. -if(WIN32) +if(WIN32 OR APPLE) set(LLVM_VERSION 18) else() set(LLVM_VERSION 16) @@ -153,11 +154,12 @@ macro(BUN_FIND_LLVM) PATHS ENV PATH ${PLATFORM_LLVM_SEARCH_PATHS} DOC "Path to LLVM ${LLVM_VERSION}'s llvm-strip binary" ) + find_program( STRIP NAMES strip PATHS ENV PATH ${PLATFORM_LLVM_SEARCH_PATHS} - DOC "Path to LLVM ${LLVM_VERSION}'s llvm-strip binary" + DOC "Path to strip binary" ) find_program( DSYMUTIL @@ -308,6 +310,7 @@ endif() # -- Build Flags -- option(USE_STATIC_SQLITE "Statically link SQLite?" ${DEFAULT_ON_UNLESS_APPLE}) option(USE_CUSTOM_ZLIB "Use Bun's recommended version of zlib" ON) +option(USE_CUSTOM_LIBDEFLATE "Use Bun's recommended version of libdeflate" ON) option(USE_CUSTOM_BORINGSSL "Use Bun's recommended version of BoringSSL" ON) option(USE_CUSTOM_LIBARCHIVE "Use Bun's recommended version of libarchive" ON) option(USE_CUSTOM_MIMALLOC "Use Bun's recommended version of Mimalloc" ON) @@ -329,6 +332,11 @@ option(USE_STATIC_LIBATOMIC "Statically link libatomic, requires the presence of option(USE_LTO "Enable Link-Time Optimization" ${DEFAULT_LTO}) +if(APPLE AND USE_LTO) + set(USE_LTO OFF) + message(WARNING "Link-Time Optimization is not supported on macOS because it requires -fuse-ld=lld and lld causes many segfaults on macOS (likely related to stack size)") +endif() + if(WIN32 AND USE_LTO) set(CMAKE_LINKER_TYPE LLD) set(CMAKE_INTERPROCEDURAL_OPTIMIZATION OFF) @@ -645,16 +653,6 @@ file(GLOB BUN_CPP ${CONFIGURE_DEPENDS} ) list(APPEND BUN_RAW_SOURCES ${BUN_CPP}) -# -- Brotli -- -set(BROTLI_SRC "${CMAKE_CURRENT_SOURCE_DIR}/src/deps/brotli") -file(GLOB BROTLI_FILES ${CONFIGURE_DEPENDS} - "${BROTLI_SRC}/common/*.c" - "${BROTLI_SRC}/enc/*.c" - "${BROTLI_SRC}/dec/*.c" -) -list(APPEND BUN_RAW_SOURCES ${BROTLI_FILES}) -include_directories("${BUN_DEPS_DIR}/brotli/include") - # -- uSockets -- set(USOCKETS_SRC "${CMAKE_CURRENT_SOURCE_DIR}/packages/bun-usockets/src") file(GLOB USOCKETS_FILES ${CONFIGURE_DEPENDS} @@ -902,6 +900,7 @@ if(NOT BUN_LINK_ONLY AND NOT BUN_CPP_ONLY) "${ZIG_COMPILER}" "build" "obj" "--zig-lib-dir" "${ZIG_LIB_DIR}" "--prefix" "${BUN_ZIG_OBJ_DIR}" + "--verbose" "-Dgenerated-code=${BUN_WORKDIR}/codegen" "-freference-trace=10" "-Dversion=${Bun_VERSION}" @@ -1001,8 +1000,20 @@ add_compile_definitions( ) if(NOT ASSERT_ENABLED) + if(APPLE) + add_compile_definitions("_LIBCXX_ENABLE_ASSERTIONS=0") + add_compile_definitions("_LIBCPP_HARDENING_MODE=_LIBCPP_HARDENING_MODE_NONE") + endif() + add_compile_definitions("NDEBUG=1") else() + if(APPLE) + add_compile_definitions("_LIBCXX_ENABLE_ASSERTIONS=1") + add_compile_definitions("_LIBCPP_HARDENING_MODE=_LIBCPP_HARDENING_MODE_DEBUG") + elseif(CMAKE_SYSTEM_NAME STREQUAL "Linux") + add_compile_definitions("_GLIBCXX_ASSERTIONS=1") + endif() + add_compile_definitions("ASSERT_ENABLED=1") endif() @@ -1088,7 +1099,7 @@ elseif(CMAKE_BUILD_TYPE STREQUAL "Release") if(NOT WIN32) if(USE_LTO) - list(APPEND LTO_FLAG "-flto=full" "-emit-llvm") + list(APPEND LTO_FLAG "-flto=full" "-emit-llvm" "-fwhole-program-vtables" "-fforce-emit-vtables") endif() # Leave -Werror=unused off in release builds so we avoid errors from being used in ASSERT @@ -1177,21 +1188,25 @@ if(WIN32) # set_property(TARGET ${bun} PROPERTY MSVC_RUNTIME_LIBRARY "MultiThreaded$<$:Debug>") set_property(TARGET ${bun} PROPERTY MSVC_RUNTIME_LIBRARY "MultiThreaded") - target_compile_options(${bun} PUBLIC "/EHsc" "/GR-") + target_compile_options(${bun} PUBLIC "/EHsc" "/GR-" -Xclang -fno-c++-static-destructors) target_link_options(${bun} PUBLIC "/STACK:0x1200000,0x100000" "/DEF:${BUN_SRC}/symbols.def" "/errorlimit:0") else() target_compile_options(${bun} PUBLIC - -fPIC -mtune=${CPU_TARGET} -fconstexpr-steps=2542484 -fconstexpr-depth=54 -fno-exceptions + -fno-asynchronous-unwind-tables + -fno-unwind-tables + -fno-c++-static-destructors -fvisibility=hidden -fvisibility-inlines-hidden -fno-rtti -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer + -fno-pic + -fno-pie -faddrsig ) endif() @@ -1208,10 +1223,11 @@ endif() if(UNIX AND NOT APPLE) target_link_options(${bun} PUBLIC - "-fuse-ld=lld" - "-static-libstdc++" - "-static-libgcc" - "-Wl,-z,now" + -fuse-ld=lld-${LLVM_VERSION} + -fno-pic + -static-libstdc++ + -static-libgcc + "-Wl,-no-pie" "-Wl,-icf=safe" "-Wl,--as-needed" "-Wl,--gc-sections" @@ -1241,6 +1257,8 @@ if(UNIX AND NOT APPLE) "-rdynamic" "-Wl,--dynamic-list=${BUN_SRC}/symbols.dyn" "-Wl,--version-script=${BUN_SRC}/linker.lds" + -Wl,-z,lazy + -Wl,-z,norelro ) target_link_libraries(${bun} PRIVATE "c") @@ -1274,11 +1292,12 @@ endif() # --- Stripped Binary "bun" if(CMAKE_BUILD_TYPE STREQUAL "Release" AND NOT WIN32 AND NOT ASSERT_ENABLED) - if(CI AND APPLE) + # if(CI AND APPLE) + if(APPLE) add_custom_command( TARGET ${bun} POST_BUILD - COMMAND ${DSYMUTIL} -z -o ${BUN_WORKDIR}/${bun}.dSYM ${BUN_WORKDIR}/${bun} + COMMAND ${DSYMUTIL} -o ${BUN_WORKDIR}/${bun}.dSYM ${BUN_WORKDIR}/${bun} COMMENT "Generating .dSYM" ) endif() @@ -1352,6 +1371,19 @@ else() target_link_libraries(${bun} PRIVATE LibArchive::LibArchive) endif() +if(USE_CUSTOM_LIBDEFLATE) + include_directories(${BUN_DEPS_DIR}/libdeflate) + + if(WIN32) + target_link_libraries(${bun} PRIVATE "${BUN_DEPS_OUT_DIR}/deflate.lib") + else() + target_link_libraries(${bun} PRIVATE "${BUN_DEPS_OUT_DIR}/libdeflate.a") + endif() +else() + find_package(LibDeflate REQUIRED) + target_link_libraries(${bun} PRIVATE LibDeflate::LibDeflate) +endif() + if(USE_CUSTOM_MIMALLOC) include_directories(${BUN_DEPS_DIR}/mimalloc/include) @@ -1463,6 +1495,24 @@ else() target_compile_definitions(${bun} PRIVATE "LAZY_LOAD_SQLITE=1") endif() +# -- Brotli -- +set(BROTLI_SRC "${CMAKE_CURRENT_SOURCE_DIR}/src/deps/brotli") +file(GLOB BROTLI_FILES ${CONFIGURE_DEPENDS} + "${BROTLI_SRC}/common/*.c" + "${BROTLI_SRC}/enc/*.c" + "${BROTLI_SRC}/dec/*.c" +) +add_library(brotli STATIC ${BROTLI_FILES}) +target_include_directories(brotli PRIVATE "${BROTLI_SRC}/include") +target_compile_definitions(brotli PRIVATE "BROTLI_STATIC") + +if(WIN32) + target_compile_options(brotli PRIVATE /MT /U_DLL) +endif() + +target_link_libraries(${bun} PRIVATE brotli) +include_directories("${BUN_DEPS_DIR}/brotli/include") + if(USE_CUSTOM_LSHPACK) include_directories(${BUN_DEPS_DIR}/ls-hpack) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 4f3439f503..86d148847d 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -5,6 +5,7 @@ If you are using Windows, please refer to [this guide](/docs/project/building-wi {% details summary="For Ubuntu users" %} TL;DR: Ubuntu 22.04 is suggested. Bun currently requires `glibc >=2.32` in development which means if you're on Ubuntu 20.04 (glibc == 2.31), you may likely meet `error: undefined symbol: __libc_single_threaded `. You need to take extra configurations. Also, according to this [issue](https://github.com/llvm/llvm-project/issues/97314), LLVM 16 is no longer maintained on Ubuntu 24.04 (noble). And instead, you might want `brew` to install LLVM 16 for your Ubuntu 24.04. +{% /details %} ## Install Dependencies diff --git a/Dockerfile b/Dockerfile index 843f8f0ef5..7b707f03e2 100644 --- a/Dockerfile +++ b/Dockerfile @@ -54,10 +54,6 @@ ENV BUILDARCH=${BUILDARCH} ENV BUN_DEPS_OUT_DIR=${BUN_DEPS_OUT_DIR} ENV BUN_ENABLE_LTO 1 -ENV CXX=clang++-${LLVM_VERSION} -ENV CC=clang-${LLVM_VERSION} -ENV AR=/usr/bin/llvm-ar-${LLVM_VERSION} -ENV LD=lld-${LLVM_VERSION} ENV LC_CTYPE=en_US.UTF-8 ENV LC_ALL=en_US.UTF-8 @@ -94,6 +90,8 @@ RUN install_packages \ clangd-${LLVM_VERSION} \ libc++-${LLVM_VERSION}-dev \ libc++abi-${LLVM_VERSION}-dev \ + llvm-${LLVM_VERSION}-runtime \ + llvm-${LLVM_VERSION}-dev \ make \ cmake \ ninja-build \ @@ -120,6 +118,15 @@ RUN install_packages \ && ln -sf /usr/bin/lldb-${LLVM_VERSION} /usr/bin/lldb \ && ln -sf /usr/bin/clangd-${LLVM_VERSION} /usr/bin/clangd \ && ln -sf /usr/bin/llvm-ar-${LLVM_VERSION} /usr/bin/llvm-ar \ + && ln -sf /usr/bin/ld.lld /usr/bin/ld \ + && ln -sf /usr/bin/llvm-ranlib-${LLVM_VERSION} /usr/bin/ranlib \ + && ln -sf /usr/bin/clang /usr/bin/cc \ + && ln -sf /usr/bin/clang /usr/bin/c89 \ + && ln -sf /usr/bin/clang /usr/bin/c99 \ + && ln -sf /usr/bin/clang++ /usr/bin/c++ \ + && ln -sf /usr/bin/clang++ /usr/bin/g++ \ + && ln -sf /usr/bin/llvm-ar /usr/bin/ar \ + && ln -sf /usr/bin/clang /usr/bin/gcc \ && arch="$(dpkg --print-architecture)" \ && case "${arch##*-}" in \ amd64) variant="x64";; \ @@ -132,6 +139,7 @@ RUN install_packages \ && ln -s /usr/bin/bun /usr/bin/bunx \ && rm -rf bun-linux-${variant} bun-linux-${variant}.zip \ && mkdir -p ${BUN_DIR} ${BUN_DEPS_OUT_DIR} + # && if [ -n "${SCCACHE_BUCKET}" ]; then \ # echo "Setting up sccache" \ # && wget https://github.com/mozilla/sccache/releases/download/v0.5.4/sccache-v0.5.4-${BUILD_MACHINE_ARCH}-unknown-linux-musl.tar.gz \ @@ -168,13 +176,14 @@ ENV CCACHE_DIR=${CCACHE_DIR} COPY Makefile ${BUN_DIR}/Makefile COPY src/deps/c-ares ${BUN_DIR}/src/deps/c-ares +COPY scripts ${BUN_DIR}/scripts WORKDIR $BUN_DIR RUN --mount=type=cache,target=${CCACHE_DIR} \ cd $BUN_DIR \ - && make c-ares \ - && rm -rf ${BUN_DIR}/src/deps/c-ares ${BUN_DIR}/Makefile + && bash ./scripts/build-cares.sh \ + && rm -rf ${BUN_DIR}/src/deps/c-ares ${BUN_DIR}/Makefile ${BUN_DIR}/scripts FROM bun-base as lolhtml @@ -205,13 +214,14 @@ ENV CPU_TARGET=${CPU_TARGET} COPY Makefile ${BUN_DIR}/Makefile COPY src/deps/mimalloc ${BUN_DIR}/src/deps/mimalloc +COPY scripts ${BUN_DIR}/scripts ARG CCACHE_DIR=/ccache ENV CCACHE_DIR=${CCACHE_DIR} RUN --mount=type=cache,target=${CCACHE_DIR} \ cd ${BUN_DIR} \ - && make mimalloc \ + && bash ./scripts/build-mimalloc.sh \ && rm -rf src/deps/mimalloc Makefile FROM bun-base as mimalloc-debug @@ -241,14 +251,38 @@ ARG CCACHE_DIR=/ccache ENV CCACHE_DIR=${CCACHE_DIR} COPY Makefile ${BUN_DIR}/Makefile +COPY CMakeLists.txt ${BUN_DIR}/CMakeLists.txt +COPY scripts ${BUN_DIR}/scripts COPY src/deps/zlib ${BUN_DIR}/src/deps/zlib +COPY package.json bun.lockb Makefile .gitmodules ${BUN_DIR}/ WORKDIR $BUN_DIR RUN --mount=type=cache,target=${CCACHE_DIR} \ cd $BUN_DIR \ - && make zlib \ - && rm -rf src/deps/zlib Makefile + && bash ./scripts/build-zlib.sh && rm -rf src/deps/zlib scripts + + +FROM bun-base as libdeflate + +ARG BUN_DIR +ARG CPU_TARGET +ENV CPU_TARGET=${CPU_TARGET} +ARG CCACHE_DIR=/ccache +ENV CCACHE_DIR=${CCACHE_DIR} + +COPY Makefile ${BUN_DIR}/Makefile +COPY CMakeLists.txt ${BUN_DIR}/CMakeLists.txt +COPY scripts ${BUN_DIR}/scripts +COPY src/deps/libdeflate ${BUN_DIR}/src/deps/libdeflate +COPY package.json bun.lockb Makefile .gitmodules ${BUN_DIR}/ + +WORKDIR $BUN_DIR + +RUN --mount=type=cache,target=${CCACHE_DIR} \ + cd $BUN_DIR \ + && bash ./scripts/build-libdeflate.sh && rm -rf src/deps/libdeflate scripts + FROM bun-base as libarchive @@ -287,6 +321,7 @@ ARG CPU_TARGET ENV CPU_TARGET=${CPU_TARGET} COPY Makefile ${BUN_DIR}/Makefile +COPY scripts ${BUN_DIR}/scripts COPY src/deps/boringssl ${BUN_DIR}/src/deps/boringssl WORKDIR $BUN_DIR @@ -296,7 +331,7 @@ ENV CCACHE_DIR=${CCACHE_DIR} RUN --mount=type=cache,target=${CCACHE_DIR} \ cd ${BUN_DIR} \ - && make boringssl \ + && bash ./scripts/build-boringssl.sh \ && rm -rf src/deps/boringssl Makefile @@ -312,12 +347,14 @@ ENV CCACHE_DIR=${CCACHE_DIR} COPY Makefile ${BUN_DIR}/Makefile COPY src/deps/zstd ${BUN_DIR}/src/deps/zstd +COPY scripts ${BUN_DIR}/scripts WORKDIR $BUN_DIR RUN --mount=type=cache,target=${CCACHE_DIR} \ cd $BUN_DIR \ - && make zstd + && bash ./scripts/build-zstd.sh \ + && rm -rf src/deps/zstd scripts FROM bun-base as ls-hpack @@ -331,12 +368,14 @@ ENV CCACHE_DIR=${CCACHE_DIR} COPY Makefile ${BUN_DIR}/Makefile COPY src/deps/ls-hpack ${BUN_DIR}/src/deps/ls-hpack +COPY scripts ${BUN_DIR}/scripts WORKDIR $BUN_DIR RUN --mount=type=cache,target=${CCACHE_DIR} \ cd $BUN_DIR \ - && make lshpack + && bash ./scripts/build-lshpack.sh \ + && rm -rf src/deps/ls-hpack scripts FROM bun-base-with-zig as bun-identifier-cache @@ -394,6 +433,9 @@ COPY src ${BUN_DIR}/src COPY CMakeLists.txt ${BUN_DIR}/CMakeLists.txt COPY src/deps/boringssl/include ${BUN_DIR}/src/deps/boringssl/include +# for uWebSockets +COPY src/deps/libdeflate ${BUN_DIR}/src/deps/libdeflate + ARG CCACHE_DIR=/ccache ENV CCACHE_DIR=${CCACHE_DIR} @@ -492,11 +534,13 @@ RUN mkdir -p build bun-webkit # lol COPY src/bun.js/bindings/sqlite/sqlite3.c ${BUN_DIR}/src/bun.js/bindings/sqlite/sqlite3.c +COPY src/deps/brotli ${BUN_DIR}/src/deps/brotli COPY src/symbols.dyn src/linker.lds ${BUN_DIR}/src/ COPY CMakeLists.txt ${BUN_DIR}/CMakeLists.txt COPY --from=zlib ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/ +COPY --from=libdeflate ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/ COPY --from=libarchive ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/ COPY --from=boringssl ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/ COPY --from=lolhtml ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/ @@ -506,7 +550,8 @@ COPY --from=tinycc ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/ COPY --from=c-ares ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/ COPY --from=ls-hpack ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/ COPY --from=bun-compile-zig-obj /tmp/bun-zig.o ${BUN_DIR}/build/bun-zig.o -COPY --from=bun-cpp-objects ${BUN_DIR}/build/bun-cpp-objects.a ${BUN_DIR}/build/bun-cpp-objects.a +COPY --from=bun-cpp-objects ${BUN_DIR}/build/*.a ${BUN_DIR}/build/ +COPY --from=bun-cpp-objects ${BUN_DIR}/build/*.o ${BUN_DIR}/build/ COPY --from=bun-cpp-objects ${BUN_DIR}/bun-webkit/lib ${BUN_DIR}/bun-webkit/lib WORKDIR $BUN_DIR/build diff --git a/LATEST b/LATEST index 0ee9c5d6a3..a2a8e42b09 100644 --- a/LATEST +++ b/LATEST @@ -1 +1 @@ -1.1.20 \ No newline at end of file +1.1.21 \ No newline at end of file diff --git a/LICENSE.md b/LICENSE.md index 719bf08d76..4cc901b7bc 100644 --- a/LICENSE.md +++ b/LICENSE.md @@ -34,6 +34,8 @@ Bun statically links these libraries: | [`c-ares`](https://github.com/c-ares/c-ares) | MIT licensed | | [`libicu`](https://github.com/unicode-org/icu) 72 | [license here](https://github.com/unicode-org/icu/blob/main/icu4c/LICENSE) | | [`libbase64`](https://github.com/aklomp/base64/blob/master/LICENSE) | BSD 2-Clause | +| [`libuv`](https://github.com/libuv/libuv) (on Windows) | MIT | +| [`libdeflate`](https://github.com/ebiggers/libdeflate) | MIT | | A fork of [`uWebsockets`](https://github.com/jarred-sumner/uwebsockets) | Apache 2.0 licensed | | Parts of [Tigerbeetle's IO code](https://github.com/tigerbeetle/tigerbeetle/blob/532c8b70b9142c17e07737ab6d3da68d7500cbca/src/io/windows.zig#L1) | Apache 2.0 licensed | diff --git a/Makefile b/Makefile index db8d717ec9..c997d7da7b 100644 --- a/Makefile +++ b/Makefile @@ -26,7 +26,7 @@ ifeq ($(ARCH_NAME_RAW),arm64) ARCH_NAME = aarch64 DOCKER_BUILDARCH = arm64 BREW_PREFIX_PATH = /opt/homebrew -DEFAULT_MIN_MACOS_VERSION = 11.0 +DEFAULT_MIN_MACOS_VERSION = 13.0 MARCH_NATIVE = -mtune=$(CPU_TARGET) ifeq ($(OS_NAME),linux) MARCH_NATIVE = -march=armv8-a+crc -mtune=ampere1 @@ -157,7 +157,12 @@ CMAKE_FLAGS_WITHOUT_RELEASE = -DCMAKE_C_COMPILER=$(CC) \ -DCMAKE_OSX_DEPLOYMENT_TARGET=$(MIN_MACOS_VERSION) \ $(CMAKE_CXX_COMPILER_LAUNCHER_FLAG) \ -DCMAKE_AR=$(AR) \ - -DCMAKE_RANLIB=$(which llvm-16-ranlib 2>/dev/null || which llvm-ranlib 2>/dev/null) + -DCMAKE_RANLIB=$(which llvm-16-ranlib 2>/dev/null || which llvm-ranlib 2>/dev/null) \ + -DCMAKE_CXX_STANDARD=20 \ + -DCMAKE_C_STANDARD=17 \ + -DCMAKE_CXX_STANDARD_REQUIRED=ON \ + -DCMAKE_C_STANDARD_REQUIRED=ON \ + -DCMAKE_CXX_EXTENSIONS=ON @@ -184,8 +189,8 @@ endif OPTIMIZATION_LEVEL=-O3 $(MARCH_NATIVE) DEBUG_OPTIMIZATION_LEVEL= -O1 $(MARCH_NATIVE) -gdwarf-4 -CFLAGS_WITHOUT_MARCH = $(MACOS_MIN_FLAG) $(OPTIMIZATION_LEVEL) -fno-exceptions -fvisibility=hidden -fvisibility-inlines-hidden -BUN_CFLAGS = $(MACOS_MIN_FLAG) $(MARCH_NATIVE) $(OPTIMIZATION_LEVEL) -fno-exceptions -fvisibility=hidden -fvisibility-inlines-hidden +CFLAGS_WITHOUT_MARCH = $(MACOS_MIN_FLAG) $(OPTIMIZATION_LEVEL) -fno-exceptions -fvisibility=hidden -fvisibility-inlines-hidden -mno-omit-leaf-frame-pointer -fno-omit-frame-pointer -fno-asynchronous-unwind-tables -fno-unwind-tables -fno-pie -fno-pic +BUN_CFLAGS = $(MACOS_MIN_FLAG) $(MARCH_NATIVE) $(OPTIMIZATION_LEVEL) -fno-exceptions -fvisibility=hidden -fvisibility-inlines-hidden -mno-omit-leaf-frame-pointer -fno-omit-frame-pointer -fno-asynchronous-unwind-tables -fno-unwind-tables -fno-pie -fno-pic BUN_TMP_DIR := /tmp/make-bun CFLAGS=$(CFLAGS_WITHOUT_MARCH) $(MARCH_NATIVE) diff --git a/bench/gzip/bun.js b/bench/gzip/bun.js index 1c5cdcaddd..6b69ae1fbb 100644 --- a/bench/gzip/bun.js +++ b/bench/gzip/bun.js @@ -1,20 +1,43 @@ -import { run, bench } from "mitata"; +import { run, bench, group } from "mitata"; import { gzipSync, gunzipSync } from "bun"; -const data = new TextEncoder().encode("Hello World!".repeat(9999)); +const data = await Bun.file(require.resolve("@babel/standalone/babel.min.js")).arrayBuffer(); const compressed = gzipSync(data); -bench(`roundtrip - "Hello World!".repeat(9999))`, () => { - gunzipSync(gzipSync(data)); +const libraries = ["zlib"]; +if (Bun.semver.satisfies(Bun.version.replaceAll("-debug", ""), ">=1.1.21")) { + libraries.push("libdeflate"); +} +const options = { library: undefined }; +const benchFn = (name, fn) => { + if (libraries.length > 1) { + group(name, () => { + for (const library of libraries) { + bench(library, () => { + options.library = library; + fn(); + }); + } + }); + } else { + options.library = libraries[0]; + bench(name, () => { + fn(); + }); + } +}; + +benchFn(`roundtrip - @babel/standalone/babel.min.js`, () => { + gunzipSync(gzipSync(data, options), options); }); -bench(`gzipSync("Hello World!".repeat(9999)))`, () => { - gzipSync(data); +benchFn(`gzipSync(@babel/standalone/babel.min.js`, () => { + gzipSync(data, options); }); -bench(`gunzipSync("Hello World!".repeat(9999)))`, () => { - gunzipSync(compressed); +benchFn(`gunzipSync(@babel/standalone/babel.min.js`, () => { + gunzipSync(compressed, options); }); await run(); diff --git a/bench/gzip/bun.lockb b/bench/gzip/bun.lockb new file mode 100755 index 0000000000..96feac4287 Binary files /dev/null and b/bench/gzip/bun.lockb differ diff --git a/bench/gzip/node.mjs b/bench/gzip/node.mjs index 0d6ea51249..d7a1abade7 100644 --- a/bench/gzip/node.mjs +++ b/bench/gzip/node.mjs @@ -1,19 +1,22 @@ import { run, bench } from "mitata"; import { gzipSync, gunzipSync } from "zlib"; +import { createRequire } from "module"; +import { readFileSync } from "fs"; -const data = new TextEncoder().encode("Hello World!".repeat(9999)); +const require = createRequire(import.meta.url); +const data = readFileSync(require.resolve("@babel/standalone/babel.min.js")); const compressed = gzipSync(data); -bench(`roundtrip - "Hello World!".repeat(9999))`, () => { +bench(`roundtrip - @babel/standalone/babel.min.js)`, () => { gunzipSync(gzipSync(data)); }); -bench(`gzipSync("Hello World!".repeat(9999)))`, () => { +bench(`gzipSync(@babel/standalone/babel.min.js))`, () => { gzipSync(data); }); -bench(`gunzipSync("Hello World!".repeat(9999)))`, () => { +bench(`gunzipSync(@babel/standalone/babel.min.js))`, () => { gunzipSync(compressed); }); diff --git a/bench/gzip/package.json b/bench/gzip/package.json index f5c377686b..49e6c3a890 100644 --- a/bench/gzip/package.json +++ b/bench/gzip/package.json @@ -7,5 +7,8 @@ "bench:node": "$NODE node.mjs", "bench:deno": "$DENO run -A --unstable deno.js", "bench": "bun run bench:bun && bun run bench:node && bun run bench:deno" + }, + "dependencies": { + "@babel/standalone": "7.24.10" } } diff --git a/bench/sqlite/better-sqlite3.mjs b/bench/sqlite/better-sqlite3.mjs new file mode 100644 index 0000000000..9bf25105b9 --- /dev/null +++ b/bench/sqlite/better-sqlite3.mjs @@ -0,0 +1,31 @@ +import { run, bench } from "mitata"; +import { createRequire } from "module"; + +const require = createRequire(import.meta.url); +const db = require("better-sqlite3")("./src/northwind.sqlite"); + +{ + const sql = db.prepare(`SELECT * FROM "Order"`); + + bench('SELECT * FROM "Order"', () => { + sql.all(); + }); +} + +{ + const sql = db.prepare(`SELECT * FROM "Product"`); + + bench('SELECT * FROM "Product"', () => { + sql.all(); + }); +} + +{ + const sql = db.prepare(`SELECT * FROM "OrderDetail"`); + + bench('SELECT * FROM "OrderDetail"', () => { + sql.all(); + }); +} + +await run(); diff --git a/bench/sqlite/node.mjs b/bench/sqlite/node.mjs index 9bf25105b9..7602a87612 100644 --- a/bench/sqlite/node.mjs +++ b/bench/sqlite/node.mjs @@ -1,8 +1,9 @@ +// Run `node --experimental-sqlite bench/sqlite/node.mjs` to run the script. +// You will need `--experimental-sqlite` flag to run this script and node v22.5.0 or higher. import { run, bench } from "mitata"; -import { createRequire } from "module"; +import { DatabaseSync as Database } from "node:sqlite"; -const require = createRequire(import.meta.url); -const db = require("better-sqlite3")("./src/northwind.sqlite"); +const db = new Database("./src/northwind.sqlite"); { const sql = db.prepare(`SELECT * FROM "Order"`); diff --git a/build.zig b/build.zig index d6cae3fc03..cdf321c248 100644 --- a/build.zig +++ b/build.zig @@ -49,6 +49,7 @@ const BunBuildOptions = struct { reported_nodejs_version: Version, generated_code_dir: []const u8, + no_llvm: bool, cached_options_module: ?*Module = null, windows_shim: ?WindowsShim = null, @@ -181,6 +182,8 @@ pub fn build(b: *Build) !void { const obj_format = b.option(ObjectFormat, "obj_format", "Output file for object files") orelse .obj; + const no_llvm = b.option(bool, "no_llvm", "Experiment with Zig self hosted backends. No stability guaranteed") orelse false; + var build_options = BunBuildOptions{ .target = target, .optimize = optimize, @@ -189,6 +192,7 @@ pub fn build(b: *Build) !void { .arch = arch, .generated_code_dir = generated_code_dir, + .no_llvm = no_llvm, .version = try Version.parse(bun_version), .canary_revision = canary: { @@ -320,6 +324,7 @@ pub inline fn addMultiCheck( .version = root_build_options.version, .reported_nodejs_version = root_build_options.reported_nodejs_version, .generated_code_dir = root_build_options.generated_code_dir, + .no_llvm = root_build_options.no_llvm, }; var obj = addBunObject(b, &options); @@ -338,10 +343,15 @@ pub fn addBunObject(b: *Build, opts: *BunBuildOptions) *Compile { }, .target = opts.target, .optimize = opts.optimize, + .use_llvm = !opts.no_llvm, + .use_lld = if (opts.os == .mac) false else !opts.no_llvm, + + // https://github.com/ziglang/zig/issues/17430 .pic = true, + + .omit_frame_pointer = false, .strip = false, // stripped at the end }); - obj.bundle_compiler_rt = false; obj.formatted_panics = true; obj.root_module.omit_frame_pointer = false; @@ -359,9 +369,10 @@ pub fn addBunObject(b: *Build, opts: *BunBuildOptions) *Compile { } if (opts.os == .linux) { - obj.link_emit_relocs = true; - obj.link_eh_frame_hdr = true; + obj.link_emit_relocs = false; + obj.link_eh_frame_hdr = false; obj.link_function_sections = true; + obj.link_data_sections = true; if (opts.optimize == .Debug) { obj.root_module.valgrind = true; diff --git a/bun.lockb b/bun.lockb index 7981e9c2c9..0e38e94cfc 100755 Binary files a/bun.lockb and b/bun.lockb differ diff --git a/docs/api/fetch.md b/docs/api/fetch.md new file mode 100644 index 0000000000..e02f064e18 --- /dev/null +++ b/docs/api/fetch.md @@ -0,0 +1,308 @@ +Bun implements the WHATWG `fetch` standard, with some extensions to meet the needs of server-side JavaScript. + +Bun also implements `node:http`, but `fetch` is generally recommended instead. + +## Sending an HTTP request + +To send an HTTP request, use `fetch` + +```ts +const response = await fetch("http://example.com"); + +console.log(response.status); // => 200 + +const text = await response.text(); // or response.json(), response.formData(), etc. +``` + +`fetch` also works with HTTPS URLs. + +```ts +const response = await fetch("https://example.com"); +``` + +You can also pass `fetch` a [`Request`](https://developer.mozilla.org/en-US/docs/Web/API/Request) object. + +```ts +const request = new Request("http://example.com", { + method: "POST", + body: "Hello, world!", +}); + +const response = await fetch(request); +``` + +### Sending a POST request + +To send a POST request, pass an object with the `method` property set to `"POST"`. + +```ts +const response = await fetch("http://example.com", { + method: "POST", + body: "Hello, world!", +}); +``` + +`body` can be a string, a `FormData` object, an `ArrayBuffer`, a `Blob`, and more. See the [MDN documentation](https://developer.mozilla.org/en-US/docs/Web/API/Body/body) for more information. + +### Proxying requests + +To proxy a request, pass an object with the `proxy` property set to a URL. + +```ts +const response = await fetch("http://example.com", { + proxy: "http://proxy.com", +}); +``` + +### Custom headers + +To set custom headers, pass an object with the `headers` property set to an object. + +```ts +const response = await fetch("http://example.com", { + headers: { + "X-Custom-Header": "value", + }, +}); +``` + +You can also set headers using the [Headers](https://developer.mozilla.org/en-US/docs/Web/API/Headers) object. + +```ts +const headers = new Headers(); +headers.append("X-Custom-Header", "value"); + +const response = await fetch("http://example.com", { + headers, +}); +``` + +### Response bodies + +To read the response body, use one of the following methods: + +- `response.text(): Promise`: Returns a promise that resolves with the response body as a string. +- `response.json(): Promise`: Returns a promise that resolves with the response body as a JSON object. +- `response.formData(): Promise`: Returns a promise that resolves with the response body as a `FormData` object. +- `response.bytes(): Promise`: Returns a promise that resolves with the response body as a `Uint8Array`. +- `response.arrayBuffer(): Promise`: Returns a promise that resolves with the response body as an `ArrayBuffer`. +- `response.blob(): Promise`: Returns a promise that resolves with the response body as a `Blob`. + +#### Streaming response bodies + +You can use async iterators to stream the response body. + +```ts +const response = await fetch("http://example.com"); + +for await (const chunk of response.body) { + console.log(chunk); +} +``` + +You can also more directly access the `ReadableStream` object. + +```ts +const response = await fetch("http://example.com"); + +const stream = response.body; + +const reader = stream.getReader(); +const { value, done } = await reader.read(); +``` + +### Fetching a URL with a timeout + +To fetch a URL with a timeout, use `AbortSignal.timeout`: + +```ts +const response = await fetch("http://example.com", { + signal: AbortSignal.timeout(1000), +}); +``` + +#### Canceling a request + +To cancel a request, use an `AbortController`: + +```ts +const controller = new AbortController(); + +const response = await fetch("http://example.com", { + signal: controller.signal, +}); + +controller.abort(); +``` + +### Unix domain sockets + +To fetch a URL using a Unix domain socket, use the `unix: string` option: + +```ts +const response = await fetch("https://hostname/a/path", { + unix: "/var/run/path/to/unix.sock", + method: "POST", + body: JSON.stringify({ message: "Hello from Bun!" }), + headers: { + "Content-Type": "application/json", + }, +}); +``` + +### TLS + +To use a client certificate, use the `tls` option: + +```ts +await fetch("https://example.com", { + tls: { + key: Bun.file("/path/to/key.pem"), + cert: Bun.file("/path/to/cert.pem"), + // ca: [Bun.file("/path/to/ca.pem")], + }, +}); +``` + +#### Custom TLS Validation + +To customize the TLS validation, use the `checkServerIdentity` option in `tls` + +```ts +await fetch("https://example.com", { + tls: { + checkServerIdentity: (hostname, peerCertificate) => { + // Return an error if the certificate is invalid + }, + }, +}); +``` + +This is similar to how it works in Node's `net` module. + +## Debugging + +To help with debugging, you can pass `verbose: true` to `fetch`: + +```ts +const response = await fetch("http://example.com", { + verbose: true, +}); +``` + +This will print the request and response headers to your terminal: + +```sh +[fetch] > HTTP/1.1 GET http://example.com/ +[fetch] > Connection: keep-alive +[fetch] > User-Agent: Bun/1.1.21 +[fetch] > Accept: */* +[fetch] > Host: example.com +[fetch] > Accept-Encoding: gzip, deflate, br + +[fetch] < 200 OK +[fetch] < Content-Encoding: gzip +[fetch] < Age: 201555 +[fetch] < Cache-Control: max-age=604800 +[fetch] < Content-Type: text/html; charset=UTF-8 +[fetch] < Date: Sun, 21 Jul 2024 02:41:14 GMT +[fetch] < Etag: "3147526947+gzip" +[fetch] < Expires: Sun, 28 Jul 2024 02:41:14 GMT +[fetch] < Last-Modified: Thu, 17 Oct 2019 07:18:26 GMT +[fetch] < Server: ECAcc (sac/254F) +[fetch] < Vary: Accept-Encoding +[fetch] < X-Cache: HIT +[fetch] < Content-Length: 648 +``` + +Note: `verbose: boolean` is not part of the Web standard `fetch` API and is specific to Bun. + +## Performance + +Before an HTTP request can be sent, the DNS lookup must be performed. This can take a significant amount of time, especially if the DNS server is slow or the network connection is poor. + +After the DNS lookup, the TCP socket must be connected and the TLS handshake might need to be performed. This can also take a significant amount of time. + +After the request completes, consuming the response body can also take a significant amount of time and memory. + +At every step of the way, Bun provides APIs to help you optimize the performance of your application. + +### DNS prefetching + +To prefetch a DNS entry, you can use the `dns.prefetch` API. This API is useful when you know you'll need to connect to a host soon and want to avoid the initial DNS lookup. + +```ts +import { dns } from "bun"; + +dns.prefetch("bun.sh", 443); +``` + +#### DNS caching + +By default, Bun caches and deduplicates DNS queries in-memory for up to 30 seconds. You can see the cache stats by calling `dns.getCacheStats()`: + +To learn more about DNS caching in Bun, see the [DNS caching](/docs/api/dns) documentation. + +### Preconnect to a host + +To preconnect to a host, you can use the `fetch.preconnect` API. This API is useful when you know you'll need to connect to a host soon and want to start the initial DNS lookup, TCP socket connection, and TLS handshake early. + +```ts +import { fetch } from "bun"; + +fetch.preconnect("https://bun.sh"); +``` + +Note: calling `fetch` immediately after `fetch.preconnect` will not make your request faster. Preconnecting only helps if you know you'll need to connect to a host soon, but you're not ready to make the request yet. + +#### Preconnect at startup + +To preconnect to a host at startup, you can pass `--fetch-preconnect`: + +```sh +$ bun --fetch-preconnect https://bun.sh ./my-script.ts +``` + +This is sort of like `` in HTML. + +This feature is not implemented on Windows yet. If you're interested in using this feature on Windows, please file an issue and we can implement support for it on Windows. + +### Connection pooling & HTTP keep-alive + +Bun automatically reuses connections to the same host. This is known as connection pooling. This can significantly reduce the time it takes to establish a connection. You don't need to do anything to enable this; it's automatic. + +#### Simultaneous connection limit + +By default, Bun limits the maximum number of simultaneous `fetch` requests to 256. We do this for several reasons: + +- It improves overall system stability. Operating systems have an upper limit on the number of simultaneous open TCP sockets, usually in the low thousands. Nearing this limit causes your entire computer to behave strangely. Applications hang and crash. +- It encourages HTTP Keep-Alive connection reuse. For short-lived HTTP requests, the slowest step is often the initial connection setup. Reusing connections can save a lot of time. + +When the limit is exceeded, the requests are queued and sent as soon as the next request ends. + +You can increase the maximum number of simultaneous connections via the `BUN_CONFIG_MAX_HTTP_REQUESTS` environment variable: + +```sh +$ BUN_CONFIG_MAX_HTTP_REQUESTS=512 bun ./my-script.ts +``` + +The max value for this limit is currently set to 65,336. The maximum port number is 65,535, so it's quite difficult for any one computer to exceed this limit. + +### Response buffering + +Bun goes to great lengths to optimize the performance of reading the response body. The fastest way to read the response body is to use one of these methods: + +- `response.text(): Promise` +- `response.json(): Promise` +- `response.formData(): Promise` +- `response.bytes(): Promise` +- `response.arrayBuffer(): Promise` +- `response.blob(): Promise` + +You can also use `Bun.write` to write the response body to a file on disk: + +```ts +import { write } from "bun"; + +await write("output.txt", response); +``` diff --git a/docs/nav.ts b/docs/nav.ts index 41da0c4281..93331f983f 100644 --- a/docs/nav.ts +++ b/docs/nav.ts @@ -287,8 +287,11 @@ export default { divider("API"), page("api/http", "HTTP server", { - description: `Bun implements Web-standard fetch, plus a Bun-native API for building fast HTTP servers.`, + description: `Bun implements a fast HTTP server built on Request/Response objects, along with supporting node:http APIs.`, }), // "`Bun.serve`"), + page("api/fetch", "HTTP client", { + description: `Bun implements Web-standard fetch with some Bun-native extensions.`, + }), // "fetch"), page("api/websockets", "WebSockets", { description: `Bun supports server-side WebSockets with on-the-fly compression, TLS support, and a Bun-native pubsub API.`, }), // "`Bun.serve`"), diff --git a/docs/runtime/nodejs-apis.md b/docs/runtime/nodejs-apis.md index 36407cdf6e..91a8b61a20 100644 --- a/docs/runtime/nodejs-apis.md +++ b/docs/runtime/nodejs-apis.md @@ -153,7 +153,7 @@ Some methods are not optimized yet. ### [`node:util`](https://nodejs.org/api/util.html) -🟡 Missing `MIMEParams` `MIMEType` `aborted` `debug` `getSystemErrorMap` `getSystemErrorName` `transferableAbortController` `transferableAbortSignal` `stripVTControlCharacters` +🟡 Missing `MIMEParams` `MIMEType` `aborted` `debug` `getSystemErrorMap` `transferableAbortController` `transferableAbortSignal` `stripVTControlCharacters` ### [`node:v8`](https://nodejs.org/api/v8.html) diff --git a/docs/runtime/web-apis.md b/docs/runtime/web-apis.md index 98c822274f..4280aa4078 100644 --- a/docs/runtime/web-apis.md +++ b/docs/runtime/web-apis.md @@ -7,22 +7,36 @@ The following Web APIs are partially or completely supported. --- - HTTP -- [`fetch`](https://developer.mozilla.org/en-US/docs/Web/API/fetch) [`Response`](https://developer.mozilla.org/en-US/docs/Web/API/Response) [`Request`](https://developer.mozilla.org/en-US/docs/Web/API/Request) [`Headers`](https://developer.mozilla.org/en-US/docs/Web/API/Headers) [`AbortController`](https://developer.mozilla.org/en-US/docs/Web/API/AbortController) [`AbortSignal`](https://developer.mozilla.org/en-US/docs/Web/API/AbortSignal) +- [`fetch`](https://developer.mozilla.org/en-US/docs/Web/API/fetch) + [`Response`](https://developer.mozilla.org/en-US/docs/Web/API/Response) + [`Request`](https://developer.mozilla.org/en-US/docs/Web/API/Request) + [`Headers`](https://developer.mozilla.org/en-US/docs/Web/API/Headers) + [`AbortController`](https://developer.mozilla.org/en-US/docs/Web/API/AbortController) + [`AbortSignal`](https://developer.mozilla.org/en-US/docs/Web/API/AbortSignal) --- - URLs -- [`URL`](https://developer.mozilla.org/en-US/docs/Web/API/URL) [`URLSearchParams`](https://developer.mozilla.org/en-US/docs/Web/API/URLSearchParams) +- [`URL`](https://developer.mozilla.org/en-US/docs/Web/API/URL) + [`URLSearchParams`](https://developer.mozilla.org/en-US/docs/Web/API/URLSearchParams) --- - Web Workers -- [`Worker`](https://developer.mozilla.org/en-US/docs/Web/API/Worker) [`self.postMessage`](https://developer.mozilla.org/en-US/docs/Web/API/DedicatedWorkerGlobalScope/postMessage) [`structuredClone`](https://developer.mozilla.org/en-US/docs/Web/API/structuredClone) [`MessagePort`](https://developer.mozilla.org/en-US/docs/Web/API/MessagePort) [`MessageChannel`](https://developer.mozilla.org/en-US/docs/Web/API/MessageChannel), [`BroadcastChannel`](https://developer.mozilla.org/en-US/docs/Web/API/BroadcastChannel). +- [`Worker`](https://developer.mozilla.org/en-US/docs/Web/API/Worker) + [`self.postMessage`](https://developer.mozilla.org/en-US/docs/Web/API/DedicatedWorkerGlobalScope/postMessage) + [`structuredClone`](https://developer.mozilla.org/en-US/docs/Web/API/structuredClone) + [`MessagePort`](https://developer.mozilla.org/en-US/docs/Web/API/MessagePort) + [`MessageChannel`](https://developer.mozilla.org/en-US/docs/Web/API/MessageChannel), [`BroadcastChannel`](https://developer.mozilla.org/en-US/docs/Web/API/BroadcastChannel). --- - Streams -- [`ReadableStream`](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream) [`WritableStream`](https://developer.mozilla.org/en-US/docs/Web/API/WritableStream) [`TransformStream`](https://developer.mozilla.org/en-US/docs/Web/API/TransformStream) [`ByteLengthQueuingStrategy`](https://developer.mozilla.org/en-US/docs/Web/API/ByteLengthQueuingStrategy) [`CountQueuingStrategy`](https://developer.mozilla.org/en-US/docs/Web/API/CountQueuingStrategy) and associated classes +- [`ReadableStream`](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream) + [`WritableStream`](https://developer.mozilla.org/en-US/docs/Web/API/WritableStream) + [`TransformStream`](https://developer.mozilla.org/en-US/docs/Web/API/TransformStream) + [`ByteLengthQueuingStrategy`](https://developer.mozilla.org/en-US/docs/Web/API/ByteLengthQueuingStrategy) + [`CountQueuingStrategy`](https://developer.mozilla.org/en-US/docs/Web/API/CountQueuingStrategy) and associated classes --- @@ -37,7 +51,10 @@ The following Web APIs are partially or completely supported. --- - Encoding and decoding -- [`atob`](https://developer.mozilla.org/en-US/docs/Web/API/atob) [`btoa`](https://developer.mozilla.org/en-US/docs/Web/API/btoa) [`TextEncoder`](https://developer.mozilla.org/en-US/docs/Web/API/TextEncoder) [`TextDecoder`](https://developer.mozilla.org/en-US/docs/Web/API/TextDecoder) +- [`atob`](https://developer.mozilla.org/en-US/docs/Web/API/atob) + [`btoa`](https://developer.mozilla.org/en-US/docs/Web/API/btoa) + [`TextEncoder`](https://developer.mozilla.org/en-US/docs/Web/API/TextEncoder) + [`TextDecoder`](https://developer.mozilla.org/en-US/docs/Web/API/TextDecoder) --- @@ -47,7 +64,8 @@ The following Web APIs are partially or completely supported. --- - Timeouts -- [`setTimeout`](https://developer.mozilla.org/en-US/docs/Web/API/setTimeout) [`clearTimeout`](https://developer.mozilla.org/en-US/docs/Web/API/clearTimeout) +- [`setTimeout`](https://developer.mozilla.org/en-US/docs/Web/API/setTimeout) + [`clearTimeout`](https://developer.mozilla.org/en-US/docs/Web/API/clearTimeout) --- @@ -57,14 +75,16 @@ The following Web APIs are partially or completely supported. --- - Crypto -- [`crypto`](https://developer.mozilla.org/en-US/docs/Web/API/Crypto) [`SubtleCrypto`](https://developer.mozilla.org/en-US/docs/Web/API/SubtleCrypto) +- [`crypto`](https://developer.mozilla.org/en-US/docs/Web/API/Crypto) + [`SubtleCrypto`](https://developer.mozilla.org/en-US/docs/Web/API/SubtleCrypto) [`CryptoKey`](https://developer.mozilla.org/en-US/docs/Web/API/CryptoKey) --- - Debugging -- [`console`](https://developer.mozilla.org/en-US/docs/Web/API/console) [`performance`](https://developer.mozilla.org/en-US/docs/Web/API/Performance) +- [`console`](https://developer.mozilla.org/en-US/docs/Web/API/console) + [`performance`](https://developer.mozilla.org/en-US/docs/Web/API/Performance) --- @@ -79,7 +99,9 @@ The following Web APIs are partially or completely supported. --- - User interaction -- [`alert`](https://developer.mozilla.org/en-US/docs/Web/API/Window/alert) [`confirm`](https://developer.mozilla.org/en-US/docs/Web/API/Window/confirm) [`prompt`](https://developer.mozilla.org/en-US/docs/Web/API/Window/prompt) (intended for interactive CLIs) +- [`alert`](https://developer.mozilla.org/en-US/docs/Web/API/Window/alert) + [`confirm`](https://developer.mozilla.org/en-US/docs/Web/API/Window/confirm) + [`prompt`](https://developer.mozilla.org/en-US/docs/Web/API/Window/prompt) (intended for interactive CLIs) {d}:{d}\n", .{ + source_index, part_index, dependency.source_index.get(), dependency.part_index, + }); + } + + c.markPartLiveForTreeShaking( dependency.part_index, dependency.source_index.get(), side_effects, @@ -10197,18 +10260,20 @@ const LinkerContext = struct { entry_point_kinds, ); } - - return true; } pub fn matchImportWithExport( c: *LinkerContext, - init_tracker: *ImportTracker, + init_tracker: ImportTracker, re_exports: *std.ArrayList(js_ast.Dependency), ) MatchImport { + const cycle_detector_top = c.cycle_detector.items.len; + defer c.cycle_detector.shrinkRetainingCapacity(cycle_detector_top); + var tracker = init_tracker; var ambiguous_results = std.ArrayList(MatchImport).init(c.allocator); defer ambiguous_results.clearAndFree(); + var result: MatchImport = MatchImport{}; const named_imports = c.graph.ast.items(.named_imports); @@ -10222,30 +10287,26 @@ const LinkerContext = struct { // // This uses a O(n^2) array scan instead of a O(n) map because the vast // majority of cases have one or two elements - for (c.cycle_detector.items) |prev_tracker| { - if (std.meta.eql(tracker.*, prev_tracker)) { + for (c.cycle_detector.items[cycle_detector_top..]) |prev_tracker| { + if (std.meta.eql(tracker, prev_tracker)) { result = .{ .kind = .cycle }; break :loop; } } - const prev_import_ref = tracker.import_ref; - if (tracker.source_index.isInvalid()) { // External break; } const prev_source_index = tracker.source_index.get(); - c.cycle_detector.append(tracker.*) catch unreachable; + c.cycle_detector.append(tracker) catch bun.outOfMemory(); // Resolve the import by one step - var advanced = c.advanceImportTracker(tracker); - advanced.tracker.* = advanced.value; - const next_tracker = advanced.tracker.*; + const advanced = c.advanceImportTracker(&tracker); + const next_tracker = advanced.value; const status = advanced.status; const potentially_ambiguous_export_star_refs = advanced.import_data; - const other_id = advanced.value.source_index.get(); switch (status) { .cjs, .cjs_without_exports, .disabled, .external => { @@ -10259,7 +10320,7 @@ const LinkerContext = struct { // property access. Don't do this if the namespace reference is invalid // though. This is the case for star imports, where the import is the // namespace. - const named_import: js_ast.NamedImport = named_imports[prev_source_index].get(prev_import_ref).?; + const named_import: js_ast.NamedImport = named_imports[prev_source_index].get(tracker.import_ref).?; if (named_import.namespace_ref != null and named_import.namespace_ref.?.isValid()) { if (result.kind == .normal) { @@ -10295,13 +10356,13 @@ const LinkerContext = struct { // if the file was rewritten from CommonJS into ESM // and the developer imported an export that doesn't exist // We don't do a runtime error since that CJS would have returned undefined. - const named_import: js_ast.NamedImport = named_imports[prev_source_index].get(prev_import_ref).?; + const named_import: js_ast.NamedImport = named_imports[prev_source_index].get(tracker.import_ref).?; if (named_import.namespace_ref != null and named_import.namespace_ref.?.isValid()) { - const symbol = c.graph.symbols.get(prev_import_ref).?; + const symbol = c.graph.symbols.get(tracker.import_ref).?; symbol.import_item_status = .missing; result.kind = .normal_and_namespace; - result.namespace_ref = prev_import_ref; + result.namespace_ref = tracker.import_ref; result.alias = named_import.alias.?; result.name_loc = named_import.alias_loc orelse Logger.Loc.Empty; } @@ -10309,7 +10370,7 @@ const LinkerContext = struct { .dynamic_fallback => { // If it's a file with dynamic export fallback, rewrite the import to a property access - const named_import: js_ast.NamedImport = named_imports[prev_source_index].get(prev_import_ref).?; + const named_import: js_ast.NamedImport = named_imports[prev_source_index].get(tracker.import_ref).?; if (named_import.namespace_ref != null and named_import.namespace_ref.?.isValid()) { if (result.kind == .normal) { result.kind = .normal_and_namespace; @@ -10326,8 +10387,8 @@ const LinkerContext = struct { }, .no_match => { // Report mismatched imports and exports - const symbol = c.graph.symbols.get(prev_import_ref).?; - const named_import: js_ast.NamedImport = named_imports[prev_source_index].get(prev_import_ref).?; + const symbol = c.graph.symbols.get(tracker.import_ref).?; + const named_import: js_ast.NamedImport = named_imports[prev_source_index].get(tracker.import_ref).?; const source = c.source_(prev_source_index); const next_source = c.source_(next_tracker.source_index.get()); @@ -10408,15 +10469,7 @@ const LinkerContext = struct { for (potentially_ambiguous_export_star_refs) |*ambiguous_tracker| { // If this is a re-export of another import, follow the import if (named_imports[ambiguous_tracker.data.source_index.get()].contains(ambiguous_tracker.data.import_ref)) { - c.cycle_detector.clearRetainingCapacity(); - c.swap_cycle_detector.clearRetainingCapacity(); - - const old_cycle_detector = c.cycle_detector; - c.cycle_detector = c.swap_cycle_detector; - const ambig = c.matchImportWithExport(&ambiguous_tracker.data, re_exports); - c.cycle_detector.clearRetainingCapacity(); - c.swap_cycle_detector = c.cycle_detector; - c.cycle_detector = old_cycle_detector; + const ambig = c.matchImportWithExport(ambiguous_tracker.data, re_exports); ambiguous_results.append(ambig) catch unreachable; } else { ambiguous_results.append(.{ @@ -10443,7 +10496,7 @@ const LinkerContext = struct { // Depend on the statement(s) that declared this import symbol in the // original file { - const deps = c.topLevelSymbolsToParts(other_id, tracker.import_ref); + const deps = c.topLevelSymbolsToParts(prev_source_index, tracker.import_ref); re_exports.ensureUnusedCapacity(deps.len) catch unreachable; for (deps) |dep| { re_exports.appendAssumeCapacity( @@ -10459,7 +10512,7 @@ const LinkerContext = struct { // iteration of the loop to resolve that import as well const next_id = next_tracker.source_index.get(); if (named_imports[next_id].contains(next_tracker.import_ref)) { - tracker.* = next_tracker; + tracker = next_tracker; continue :loop; } }, @@ -10589,7 +10642,10 @@ const LinkerContext = struct { // // This depends on the "__esm" symbol and declares the "init_foo" symbol // for similar reasons to the CommonJS closure above. - const esm_parts = c.topLevelSymbolsToPartsForRuntime(c.esm_runtime_ref); + const esm_parts = if (wrapper_ref.isValid()) + c.topLevelSymbolsToPartsForRuntime(c.esm_runtime_ref) + else + &.{}; // generate a dummy part that depends on the "__esm" symbol const dependencies = c.allocator.alloc(js_ast.Dependency, esm_parts.len) catch unreachable; @@ -10618,19 +10674,22 @@ const LinkerContext = struct { ) catch unreachable; bun.assert(part_index != js_ast.namespace_export_part_index); wrapper_part_index.* = Index.part(part_index); - c.graph.generateSymbolImportAndUse( - source_index, - part_index, - c.esm_runtime_ref, - 1, - Index.runtime, - ) catch unreachable; + + if (wrapper_ref.isValid()) { + c.graph.generateSymbolImportAndUse( + source_index, + part_index, + c.esm_runtime_ref, + 1, + Index.runtime, + ) catch unreachable; + } }, else => {}, } } - pub fn advanceImportTracker(c: *LinkerContext, tracker: *ImportTracker) ImportTracker.Iterator { + pub fn advanceImportTracker(c: *LinkerContext, tracker: *const ImportTracker) ImportTracker.Iterator { const id = tracker.source_index.get(); var named_imports: *JSAst.NamedImports = &c.graph.ast.items(.named_imports)[id]; var import_records = c.graph.ast.items(.import_records)[id]; @@ -10643,7 +10702,6 @@ const LinkerContext = struct { return .{ .value = .{}, .status = .external, - .tracker = tracker, }; // Is this an external file? @@ -10652,7 +10710,6 @@ const LinkerContext = struct { return .{ .value = .{}, .status = .external, - .tracker = tracker, }; } @@ -10666,7 +10723,6 @@ const LinkerContext = struct { .source_index = record.source_index, }, .status = .disabled, - .tracker = tracker, }; } @@ -10688,7 +10744,6 @@ const LinkerContext = struct { .import_ref = Ref.None, }, .status = .cjs_without_exports, - .tracker = tracker, }; } const other_kind = exports_kind[other_id]; @@ -10700,7 +10755,6 @@ const LinkerContext = struct { .import_ref = Ref.None, }, .status = .cjs, - .tracker = tracker, }; } @@ -10713,7 +10767,6 @@ const LinkerContext = struct { .value = matching_export.data, .status = .found, .import_data = matching_export.potentially_ambiguous_export_star_refs.slice(), - .tracker = tracker, }; } } @@ -10729,7 +10782,6 @@ const LinkerContext = struct { }, .status = .found, .import_data = matching_export.potentially_ambiguous_export_star_refs.slice(), - .tracker = tracker, }; } @@ -10745,7 +10797,6 @@ const LinkerContext = struct { .dynamic_fallback_interop_default else .dynamic_fallback, - .tracker = tracker, }; } @@ -10755,7 +10806,6 @@ const LinkerContext = struct { return .{ .value = .{}, .status = .probably_typescript_type, - .tracker = tracker, }; } @@ -10764,7 +10814,6 @@ const LinkerContext = struct { .source_index = Index.source(other_source_index), }, .status = .no_match, - .tracker = tracker, }; } @@ -10798,15 +10847,12 @@ const LinkerContext = struct { const import_ref = ref; - var import_tracker = ImportData{ - .data = .{ + var re_exports = std.ArrayList(js_ast.Dependency).init(c.allocator); + const result = c.matchImportWithExport( + .{ .source_index = Index.source(source_index), .import_ref = import_ref, }, - }; - var re_exports = std.ArrayList(js_ast.Dependency).init(c.allocator); - const result = c.matchImportWithExport( - &import_tracker.data, &re_exports, ); @@ -10924,14 +10970,13 @@ const LinkerContext = struct { if (i == source_index) return; } - - this.source_index_stack.append(source_index) catch unreachable; + this.source_index_stack.append(source_index) catch bun.outOfMemory(); const stack_end_pos = this.source_index_stack.items.len; - const id = source_index; + defer this.source_index_stack.shrinkRetainingCapacity(stack_end_pos - 1); - const import_records = this.import_records_list[id].slice(); + const import_records = this.import_records_list[source_index].slice(); - for (this.export_star_records[id]) |import_id| { + for (this.export_star_records[source_index]) |import_id| { const other_source_index = import_records[import_id].source_index.get(); const other_id = other_source_index; @@ -10948,9 +10993,11 @@ const LinkerContext = struct { // re-exports as property accesses off of a generated require() call. if (this.exports_kind[other_id] == .cjs) continue; + var iter = this.named_exports[other_id].iterator(); next_export: while (iter.next()) |entry| { const alias = entry.key_ptr.*; + const name = entry.value_ptr.*; // ES6 export star statements ignore exports named "default" if (strings.eqlComptime(alias, "default")) @@ -10962,34 +11009,35 @@ const LinkerContext = struct { continue :next_export; } } - const ref = entry.value_ptr.ref; - var resolved = resolved_exports.getOrPut(this.allocator, entry.key_ptr.*) catch unreachable; - if (!resolved.found_existing) { - resolved.value_ptr.* = .{ + + const gop = resolved_exports.getOrPut(this.allocator, alias) catch bun.outOfMemory(); + if (!gop.found_existing) { + // Initialize the re-export + gop.value_ptr.* = .{ .data = .{ - .import_ref = ref, + .import_ref = name.ref, .source_index = Index.source(other_source_index), - .name_loc = entry.value_ptr.alias_loc, + .name_loc = name.alias_loc, }, }; // Make sure the symbol is marked as imported so that code splitting // imports it correctly if it ends up being shared with another chunk - this.imports_to_bind[id].put(this.allocator, entry.value_ptr.ref, .{ + this.imports_to_bind[source_index].put(this.allocator, name.ref, .{ .data = .{ - .import_ref = ref, + .import_ref = name.ref, .source_index = Index.source(other_source_index), }, - }) catch unreachable; - } else if (resolved.value_ptr.data.source_index.get() != other_source_index) { + }) catch bun.outOfMemory(); + } else if (gop.value_ptr.data.source_index.get() != other_source_index) { // Two different re-exports colliding makes it potentially ambiguous - resolved.value_ptr.potentially_ambiguous_export_star_refs.push(this.allocator, .{ + gop.value_ptr.potentially_ambiguous_export_star_refs.push(this.allocator, .{ .data = .{ .source_index = Index.source(other_source_index), - .import_ref = ref, - .name_loc = entry.value_ptr.alias_loc, + .import_ref = name.ref, + .name_loc = name.alias_loc, }, - }) catch unreachable; + }) catch bun.outOfMemory(); } } @@ -11236,7 +11284,6 @@ pub const ImportTracker = struct { status: Status = Status.no_match, value: ImportTracker = .{}, import_data: []ImportData = &.{}, - tracker: *ImportTracker, }; }; diff --git a/src/c.zig b/src/c.zig index 244172ed03..ff9226a660 100644 --- a/src/c.zig +++ b/src/c.zig @@ -104,7 +104,6 @@ pub fn lstat_absolute(path: [:0]const u8) !Stat { // renameatZ fails when renaming across mount points // we assume that this is relatively uncommon -// TODO: change types to use `bun.FileDescriptor` pub fn moveFileZ(from_dir: bun.FileDescriptor, filename: [:0]const u8, to_dir: bun.FileDescriptor, destination: [:0]const u8) !void { switch (bun.sys.renameatConcurrentlyWithoutFallback(from_dir, filename, to_dir, destination)) { .err => |err| { diff --git a/src/cli.zig b/src/cli.zig index 6c74ac63e1..d7135e3612 100644 --- a/src/cli.zig +++ b/src/cli.zig @@ -70,6 +70,19 @@ pub const Cli = struct { pub threadlocal var is_main_thread: bool = false; }; +pub const debug_flags = if (Environment.isDebug) struct { + var resolve_breakpoints: []const []const u8 = &.{}; + + pub fn hasResolveBreakpoint(str: []const u8) bool { + for (resolve_breakpoints) |bp| { + if (strings.contains(str, bp)) { + return true; + } + } + return false; + } +} else @compileError("Do not access this namespace []const u8; in a release build"); + const LoaderMatcher = strings.ExactSizeMatcher(4); const ColonListType = @import("./cli/colon_list_type.zig").ColonListType; pub const LoaderColonList = ColonListType(Api.Loader, Arguments.loader_resolver); @@ -146,7 +159,7 @@ pub const Arguments = struct { pub const ParamType = clap.Param(clap.Help); - const base_params_ = [_]ParamType{ + const base_params_ = (if (Environment.isDebug) debug_params else [_]ParamType{}) ++ [_]ParamType{ clap.parseParam("--env-file ... Load environment variables from the specified file(s)") catch unreachable, clap.parseParam("--cwd Absolute path to resolve files & entry points from. This just changes the process' cwd.") catch unreachable, clap.parseParam("-c, --config ? Specify path to Bun config file. Default $cwd/bunfig.toml") catch unreachable, @@ -157,6 +170,10 @@ pub const Arguments = struct { clap.parseParam("--verbose-error-trace") catch unreachable, } else [_]ParamType{}; + const debug_params = [_]ParamType{ + clap.parseParam("--breakpoint-resolve ... DEBUG MODE: breakpoint when resolving something that includes this string") catch unreachable, + }; + const transpiler_params_ = [_]ParamType{ clap.parseParam("--main-fields ... Main fields to lookup in package.json. Defaults to --target dependent") catch unreachable, clap.parseParam("--extension-order ... Defaults to: .tsx,.ts,.jsx,.js,.json ") catch unreachable, @@ -189,6 +206,7 @@ pub const Arguments = struct { clap.parseParam("-p, --port Set the default port for Bun.serve") catch unreachable, clap.parseParam("-u, --origin ") catch unreachable, clap.parseParam("--conditions ... Pass custom conditions to resolve") catch unreachable, + clap.parseParam("--fetch-preconnect ... Preconnect to a URL while code is loading") catch unreachable, }; const auto_or_run_params = [_]ParamType{ @@ -632,6 +650,8 @@ pub const Arguments = struct { } ctx.runtime_options.if_present = args.flag("--if-present"); ctx.runtime_options.smol = args.flag("--smol"); + ctx.runtime_options.preconnect = args.options("--fetch-preconnect"); + if (args.option("--inspect")) |inspect_flag| { ctx.runtime_options.debugger = if (inspect_flag.len == 0) Command.Debugger{ .enable = .{} } @@ -959,6 +979,10 @@ pub const Arguments = struct { } } + if (Environment.isDebug) { + debug_flags.resolve_breakpoints = args.options("--breakpoint-resolve"); + } + return opts; } }; @@ -1045,7 +1069,7 @@ pub const HelpCommand = struct { \\ update {s:<16} Update outdated dependencies \\ link [\] Register or link a local npm package \\ unlink Unregister a local npm package - \\ patch \ Prepare a package for patching + \\ patch \ Prepare a package for patching \\ pm \ Additional package management utilities \\ \\ build ./a.ts ./b.jsx Bundle TypeScript & JavaScript into a single file @@ -1207,16 +1231,11 @@ pub const Command = struct { script: []const u8 = "", eval_and_print: bool = false, } = .{}, + preconnect: []const []const u8 = &[_][]const u8{}, }; var global_cli_ctx: Context = undefined; - - var context_data: ContextData = ContextData{ - .args = std.mem.zeroes(Api.TransformOptions), - .log = undefined, - .start_time = 0, - .allocator = undefined, - }; + var context_data: ContextData = undefined; pub const init = ContextData.create; @@ -1260,10 +1279,13 @@ pub const Command = struct { pub fn create(allocator: std.mem.Allocator, log: *logger.Log, comptime command: Command.Tag) anyerror!Context { Cli.cmd = command; + context_data = .{ + .args = std.mem.zeroes(Api.TransformOptions), + .log = log, + .start_time = start_time, + .allocator = allocator, + }; global_cli_ctx = &context_data; - global_cli_ctx.log = log; - global_cli_ctx.start_time = start_time; - global_cli_ctx.allocator = allocator; if (comptime Command.Tag.uses_global_options.get(command)) { global_cli_ctx.args = try Arguments.parse(allocator, global_cli_ctx, command); diff --git a/src/cli/create_command.zig b/src/cli/create_command.zig index aaf2f44cf4..870360f506 100644 --- a/src/cli/create_command.zig +++ b/src/cli/create_command.zig @@ -1811,7 +1811,6 @@ pub const Example = struct { const examples_url: string = "https://registry.npmjs.org/bun-examples-all/latest"; var url: URL = undefined; - pub const timeout: u32 = 6000; var app_name_buf: [512]u8 = undefined; pub fn print(examples: []const Example, default_app_name: ?string) void { @@ -1977,7 +1976,6 @@ pub const Example = struct { headers_buf, mutable, "", - 60 * std.time.ns_per_min, http_proxy, null, HTTP.FetchRedirect.follow, @@ -2055,7 +2053,6 @@ pub const Example = struct { "", mutable, "", - 60 * std.time.ns_per_min, http_proxy, null, HTTP.FetchRedirect.follow, @@ -2145,7 +2142,6 @@ pub const Example = struct { "", mutable, "", - 60 * std.time.ns_per_min, http_proxy, null, HTTP.FetchRedirect.follow, @@ -2188,7 +2184,6 @@ pub const Example = struct { "", mutable, "", - 60 * std.time.ns_per_min, http_proxy, null, HTTP.FetchRedirect.follow, diff --git a/src/cli/exec_command.zig b/src/cli/exec_command.zig index de51c9c20d..5ea2fd36cc 100644 --- a/src/cli/exec_command.zig +++ b/src/cli/exec_command.zig @@ -50,7 +50,7 @@ pub const ExecCommand = struct { // Output.flush(); // } - Global.exitWide(code); + Global.exit(code); // } } }; diff --git a/src/cli/package_manager_command.zig b/src/cli/package_manager_command.zig index 55631722ef..226d59d1c2 100644 --- a/src/cli/package_manager_command.zig +++ b/src/cli/package_manager_command.zig @@ -61,7 +61,8 @@ pub const PackageManagerCommand = struct { @memcpy(lockfile_buffer[0..lockfile_.len], lockfile_); lockfile_buffer[lockfile_.len] = 0; const lockfile = lockfile_buffer[0..lockfile_.len :0]; - var pm = try PackageManager.init(ctx, PackageManager.Subcommand.pm); + const cli = try PackageManager.CommandLineArguments.parse(ctx.allocator, .pm); + var pm = try PackageManager.init(ctx, cli, PackageManager.Subcommand.pm); const load_lockfile = pm.lockfile.loadFromDisk(pm, ctx.allocator, ctx.log, lockfile, true); handleLoadLockfileErrors(load_lockfile, pm); @@ -120,8 +121,8 @@ pub const PackageManagerCommand = struct { pub fn exec(ctx: Command.Context) !void { var args = try std.process.argsAlloc(ctx.allocator); args = args[1..]; - - var pm = PackageManager.init(ctx, PackageManager.Subcommand.pm) catch |err| { + const cli = try PackageManager.CommandLineArguments.parse(ctx.allocator, .pm); + var pm = PackageManager.init(ctx, cli, PackageManager.Subcommand.pm) catch |err| { if (err == error.MissingPackageJSON) { var cwd_buf: bun.PathBuffer = undefined; if (bun.getcwd(&cwd_buf)) |cwd| { diff --git a/src/cli/run_command.zig b/src/cli/run_command.zig index 846c149f06..5e8abbe276 100644 --- a/src/cli/run_command.zig +++ b/src/cli/run_command.zig @@ -328,7 +328,7 @@ pub const RunCommand = struct { Output.flush(); } - Global.exitWide(code); + Global.exit(code); } return true; @@ -578,8 +578,7 @@ pub const RunCommand = struct { }); } - Output.flush(); - Global.raiseIgnoringPanicHandler(@intFromEnum(signal)); + Global.raiseIgnoringPanicHandler(signal); }, .exited => |exit_code| { @@ -592,8 +591,7 @@ pub const RunCommand = struct { }); } - Output.flush(); - Global.raiseIgnoringPanicHandler(@intFromEnum(exit_code.signal)); + Global.raiseIgnoringPanicHandler(exit_code.signal); } const code = exit_code.code; diff --git a/src/cli/test_command.zig b/src/cli/test_command.zig index ebf9ef5b06..39a3558f41 100644 --- a/src/cli/test_command.zig +++ b/src/cli/test_command.zig @@ -598,7 +598,7 @@ const Scanner = struct { }; // always ignore node_modules. - if (strings.contains(slice, "/" ++ "node_modules" ++ "/")) { + if (strings.contains(slice, "/node_modules/") or strings.contains(slice, "\\node_modules\\")) { return false; } @@ -1105,7 +1105,7 @@ pub const TestCommand = struct { if (reporter.summary.fail > 0 or (coverage.enabled and coverage.fractions.failing and coverage.fail_on_low_coverage)) { Global.exit(1); } else if (reporter.jest.unhandled_errors_between_tests > 0) { - Global.exitWide(@intCast(reporter.jest.unhandled_errors_between_tests)); + Global.exit(reporter.jest.unhandled_errors_between_tests); } } diff --git a/src/cli/upgrade_command.zig b/src/cli/upgrade_command.zig index 2f731d6701..0c5ba5c28e 100644 --- a/src/cli/upgrade_command.zig +++ b/src/cli/upgrade_command.zig @@ -163,7 +163,6 @@ pub const UpgradeCheckerThread = struct { }; pub const UpgradeCommand = struct { - pub const timeout: u32 = 30000; const default_github_headers: string = "Acceptapplication/vnd.github.v3+json"; var github_repository_url_buf: bun.PathBuffer = undefined; var current_executable_buf: bun.PathBuffer = undefined; @@ -245,7 +244,6 @@ pub const UpgradeCommand = struct { headers_buf, &metadata_body, "", - 60 * std.time.ns_per_min, http_proxy, null, HTTP.FetchRedirect.follow, @@ -528,12 +526,10 @@ pub const UpgradeCommand = struct { "", zip_file_buffer, "", - timeout, http_proxy, null, HTTP.FetchRedirect.follow, ); - async_http.client.timeout = timeout; async_http.client.progress_node = progress; async_http.client.reject_unauthorized = env_loader.getTLSRejectUnauthorized(); diff --git a/src/codegen/bundle-functions.ts b/src/codegen/bundle-functions.ts index dfc66b8368..79f4d2e956 100644 --- a/src/codegen/bundle-functions.ts +++ b/src/codegen/bundle-functions.ts @@ -8,13 +8,19 @@ // library, instead of RegExp hacks. // // For explanation on this, please nag @paperdave to write documentation on how everything works. +// +// The output is intended to be similar to what WebCore does internally with a couple differences: +// +// - We concatenate all the sources into one big string, which then createsa +// single JSC::SourceProvider and pass start/end positions to each function's +// JSC::SourceCode. JSC does this, but WebCore does not seem to. import { readdirSync, rmSync } from "fs"; import path from "path"; import { sliceSourceCode } from "./builtin-parser"; -import { applyGlobalReplacements, define } from "./replacements"; -import { cap, fmtCPPCharArray, low, writeIfNotChanged } from "./helpers"; import { createAssertClientJS, createLogClientJS } from "./client-js"; import { getJS2NativeDTS } from "./generate-js2native"; +import { addCPPCharArray, cap, low, writeIfNotChanged } from "./helpers"; +import { applyGlobalReplacements, define } from "./replacements"; const PARALLEL = false; const KEEP_TMP = true; @@ -52,6 +58,7 @@ interface BundledBuiltin { source: string; params: string[]; visibility: string; + sourceOffset: number; } /** @@ -228,6 +235,10 @@ $$capture_start$$(${fn.async ? "async " : ""}${ constructKind: fn.directives.ConstructKind ?? "None", isLinkTimeConstant: !!fn.directives.linkTimeConstant, intrinsic: fn.directives.intrinsic ?? "NoIntrinsic", + + // Not known yet. + sourceOffset: 0, + overriddenName: fn.directives.getter ? `"get ${fn.name}"_s` : fn.directives.overriddenName @@ -275,6 +286,34 @@ export async function bundleBuiltinFunctions({ requireTransformer }: BundleBuilt } } + let combinedSourceCodeChars = ""; + let combinedSourceCodeLength = 0; + // Compute source offsets + { + for (const { basename, functions } of files) { + for (const fn of functions) { + fn.sourceOffset = combinedSourceCodeLength; + combinedSourceCodeLength += fn.source.length; + if (combinedSourceCodeChars && !combinedSourceCodeChars.endsWith(",")) { + combinedSourceCodeChars += ","; + } + combinedSourceCodeChars += addCPPCharArray(fn.source, false); + + // If you want to see the individual function sources: + // if (true) { + // Bun.write(CODEGEN_DIR + "/functions/" + low(basename) + cap(fn.name) + ".js", fn.source + "\n"); + // } + } + } + } + + let additionalPrivateNames = new Set(); + + function privateName(name) { + additionalPrivateNames.add(name); + return "builtinNames." + name + "PrivateName()"; + } + // C++ codegen let bundledCPP = `// Generated by ${import.meta.path} namespace Zig { class GlobalObject; } @@ -283,48 +322,78 @@ export async function bundleBuiltinFunctions({ requireTransformer }: BundleBuilt #include "JSDOMGlobalObject.h" #include "WebCoreJSClientData.h" #include - + #include "BunBuiltinNames.h" + namespace WebCore { - + static const LChar combinedSourceCodeBuffer[${combinedSourceCodeLength + 1}] = { ${combinedSourceCodeChars}, 0 }; + static const std::span internalCombinedSource = { combinedSourceCodeBuffer, ${combinedSourceCodeLength} }; `; for (const { basename, functions } of files) { - bundledCPP += `/* ${basename}.ts */\n`; + bundledCPP += ` +#pragma mark ${basename} +`; + const lowerBasename = low(basename); for (const fn of functions) { - const [code, count] = fmtCPPCharArray(fn.source, true); - const name = `${lowerBasename}${cap(fn.name)}Code`; - bundledCPP += `// ${fn.name} - const JSC::ConstructAbility s_${name}ConstructAbility = JSC::ConstructAbility::${fn.constructAbility}; - const JSC::InlineAttribute s_${name}InlineAttribute = JSC::InlineAttribute::${ - fn.directives.alwaysInline ? "Always" : "None" - }; - const JSC::ConstructorKind s_${name}ConstructorKind = JSC::ConstructorKind::${fn.constructKind}; - const JSC::ImplementationVisibility s_${name}ImplementationVisibility = JSC::ImplementationVisibility::${ - fn.visibility - }; - const int s_${name}Length = ${fn.source.length}; - const JSC::Intrinsic s_${name}Intrinsic = JSC::NoIntrinsic; - const char s_${name}Bytes[${count}] = ${code}; - const char* s_${name} = s_${name}Bytes; - `; + const name = `${basename}${cap(fn.name)}`; + bundledCPP += ` +JSC::FunctionExecutable* ${lowerBasename}${cap(fn.name)}CodeGenerator(JSC::VM& vm) +{ + auto &builtins = static_cast(vm.clientData)->builtinFunctions().${lowerBasename}Builtins(); + auto *executable = builtins.${lowerBasename}${cap(fn.name)}CodeExecutable(); + return executable->link(vm, nullptr, builtins.${lowerBasename}${cap(fn.name)}CodeSource(), std::nullopt, JSC::NoIntrinsic); +} +`; } - bundledCPP += `#define DEFINE_BUILTIN_GENERATOR(codeName, functionName, overriddenName, argumentCount) \\ - JSC::FunctionExecutable* codeName##Generator(JSC::VM& vm) \\ - {\\ - JSVMClientData* clientData = static_cast(vm.clientData); \\ - return clientData->builtinFunctions().${lowerBasename}Builtins().codeName##Executable()->link(vm, nullptr, clientData->builtinFunctions().${lowerBasename}Builtins().codeName##Source(), std::nullopt, s_##codeName##Intrinsic); \\ + } + + const initializeSourceCodeFn = (fn: BundledBuiltin, basename: string) => { + const name = `${low(basename)}${cap(fn.name)}CodeSource`; + return `m_${name}(SourceCode(sourceProvider.copyRef(), ${fn.sourceOffset}, ${fn.source.length + fn.sourceOffset}, 1, 1))`; + }; + for (const { basename, internal, functions } of files) { + bundledCPP += ` +#pragma mark ${basename} + +${basename}BuiltinsWrapper::${basename}BuiltinsWrapper(JSC::VM& vm, RefPtr sourceProvider, BunBuiltinNames &builtinNames) + : m_vm(vm)`; + + if (internal) { + bundledCPP += `, ${functions.map(fn => `m_${fn.name}PrivateName(${privateName(fn.name)})`).join(",\n ")}`; } - WEBCORE_FOREACH_${basename.toUpperCase()}_BUILTIN_CODE(DEFINE_BUILTIN_GENERATOR) - #undef DEFINE_BUILTIN_GENERATOR - - `; + bundledCPP += `, ${functions.map(fn => initializeSourceCodeFn(fn, basename)).join(",\n ")} {} +`; } + bundledCPP += ` +RefPtr createBuiltinsSourceProvider() { + return JSC::StringSourceProvider::create(StringImpl::createWithoutCopying(internalCombinedSource), SourceOrigin(), String(), SourceTaintedOrigin()); +} +`; + + bundledCPP += ` +JSBuiltinFunctions::JSBuiltinFunctions(JSC::VM& vm, RefPtr provider, BunBuiltinNames& builtinNames) : m_vm(vm), + ${files.map(({ basename }) => `m_${low(basename)}Builtins(vm, provider, builtinNames)`).join(", ")} +{} + +void JSBuiltinFunctions::exportNames() { +`; + + for (const { basename, internal } of files) { + if (internal) { + bundledCPP += ` m_${low(basename)}Builtins.exportNames();\n`; + } + } + + bundledCPP += ` +} + +`; + bundledCPP += ` - JSBuiltinInternalFunctions::JSBuiltinInternalFunctions(JSC::VM& vm) - : m_vm(vm) +JSBuiltinInternalFunctions::JSBuiltinInternalFunctions(JSC::VM& vm) : m_vm(vm) `; for (const { basename, internal } of files) { @@ -333,10 +402,9 @@ export async function bundleBuiltinFunctions({ requireTransformer }: BundleBuilt } } - bundledCPP += ` - { - UNUSED_PARAM(vm); - } + bundledCPP += `{ + UNUSED_PARAM(vm); + } template void JSBuiltinInternalFunctions::visit(Visitor& visitor) @@ -417,12 +485,10 @@ export async function bundleBuiltinFunctions({ requireTransformer }: BundleBuilt const name = `${lowerBasename}${cap(fn.name)}Code`; bundledHeader += `// ${fn.name} #define WEBCORE_BUILTIN_${basename.toUpperCase()}_${fn.name.toUpperCase()} 1 - extern const char* s_${name}; - extern const int s_${name}Length; - extern const JSC::ConstructAbility s_${name}ConstructAbility; - extern const JSC::InlineAttribute s_${name}InlineAttribute; - extern const JSC::ConstructorKind s_${name}ConstructorKind; - extern const JSC::ImplementationVisibility s_${name}ImplementationVisibility; + static constexpr JSC::ConstructAbility s_${name}ConstructAbility = JSC::ConstructAbility::${fn.constructAbility}; + static constexpr JSC::InlineAttribute s_${name}InlineAttribute = JSC::InlineAttribute::${fn.directives.alwaysInline ? "Always" : "None"}; + static constexpr JSC::ConstructorKind s_${name}ConstructorKind = JSC::ConstructorKind::${fn.constructKind}; + static constexpr JSC::ImplementationVisibility s_${name}ImplementationVisibility = JSC::ImplementationVisibility::${fn.visibility}; `; } @@ -450,14 +516,7 @@ export async function bundleBuiltinFunctions({ requireTransformer }: BundleBuilt class ${basename}BuiltinsWrapper : private JSC::WeakHandleOwner { public: - explicit ${basename}BuiltinsWrapper(JSC::VM& vm) - : m_vm(vm) - WEBCORE_FOREACH_${basename.toUpperCase()}_BUILTIN_FUNCTION_NAME(INITIALIZE_BUILTIN_NAMES) - #define INITIALIZE_BUILTIN_SOURCE_MEMBERS(name, functionName, overriddenName, length) , m_##name##Source(JSC::makeSource(StringImpl::createWithoutCopying({reinterpret_cast(s_##name), static_cast(length)}), { }, JSC::SourceTaintedOrigin::Untainted)) - WEBCORE_FOREACH_${basename.toUpperCase()}_BUILTIN_CODE(INITIALIZE_BUILTIN_SOURCE_MEMBERS) - #undef INITIALIZE_BUILTIN_SOURCE_MEMBERS - { - } + explicit ${basename}BuiltinsWrapper(JSC::VM& vm, RefPtr sourceProvider, BunBuiltinNames &builtinNames); #define EXPOSE_BUILTIN_EXECUTABLES(name, functionName, overriddenName, length) \\ JSC::UnlinkedFunctionExecutable* name##Executable(); \\ @@ -544,25 +603,9 @@ export async function bundleBuiltinFunctions({ requireTransformer }: BundleBuilt } bundledHeader += `class JSBuiltinFunctions { public: - explicit JSBuiltinFunctions(JSC::VM& vm) - : m_vm(vm) - `; - - for (const { basename } of files) { - bundledHeader += ` , m_${low(basename)}Builtins(m_vm)\n`; - } - - bundledHeader += ` - { - `; - - for (const { basename, internal } of files) { - if (internal) { - bundledHeader += ` m_${low(basename)}Builtins.exportNames();\n`; - } - } - - bundledHeader += ` } + explicit JSBuiltinFunctions(JSC::VM& vm, RefPtr provider, BunBuiltinNames &builtinNames); + void exportNames(); + `; for (const { basename } of files) { @@ -613,7 +656,53 @@ export async function bundleBuiltinFunctions({ requireTransformer }: BundleBuilt } // namespace WebCore `; + // Handle builtin names + { + const BunBuiltinNamesHeader = require("fs").readFileSync( + path.join(import.meta.dir, "../js/builtins/BunBuiltinNames.h"), + "utf8", + ); + let definedBuiltinNamesStartI = BunBuiltinNamesHeader.indexOf( + "#define BUN_COMMON_PRIVATE_IDENTIFIERS_EACH_PROPERTY_NAME", + ); + let definedBuiltinNamesMacroEndI = BunBuiltinNamesHeader.indexOf( + "--- END of BUN_COMMON_PRIVATE_IDENTIFIERS_EACH_PROPERTY_NAME ---", + ); + const definedBuiltinNames = BunBuiltinNamesHeader.slice(definedBuiltinNamesStartI, definedBuiltinNamesMacroEndI) + .split("\n") + .map(x => x.trim()) + .filter(x => x.startsWith("macro(")) + .map(x => x.slice(x.indexOf("(") + 1, x.indexOf(")"))) + .map(x => x.trim()) + .sort(); + const uniqueDefinedBuiltinNames = new Set(); + for (let name of definedBuiltinNames) { + const prevSize = uniqueDefinedBuiltinNames.size; + uniqueDefinedBuiltinNames.add(name); + if (uniqueDefinedBuiltinNames.size === prevSize) { + throw new Error(`Duplicate private name "${name}" in BunBuiltinNames.h`); + } + } + for (let additionalPrivateName of additionalPrivateNames) { + if (uniqueDefinedBuiltinNames.has(additionalPrivateName)) { + additionalPrivateNames.delete(additionalPrivateName); + } + } + + let additionalPrivateNamesHeader = `// Generated by ${import.meta.path} +#pragma once + +#ifndef BUN_ADDITIONAL_BUILTIN_NAMES +#define BUN_ADDITIONAL_BUILTIN_NAMES(macro) \\ + ${Array.from(additionalPrivateNames) + .map(x => `macro(${x})`) + .join(" \\\n ")} +#endif +`; + + writeIfNotChanged(path.join(CODEGEN_DIR, "BunBuiltinNames+extras.h"), additionalPrivateNamesHeader); + } writeIfNotChanged(path.join(CODEGEN_DIR, "WebCoreJSBuiltins.h"), bundledHeader); writeIfNotChanged(path.join(CODEGEN_DIR, "WebCoreJSBuiltins.cpp"), bundledCPP); diff --git a/src/codegen/helpers.ts b/src/codegen/helpers.ts index 04f436c25f..a97007b767 100644 --- a/src/codegen/helpers.ts +++ b/src/codegen/helpers.ts @@ -18,7 +18,17 @@ export function fmtCPPCharArray(str: string, nullTerminated: boolean = true) { .join(",") + (nullTerminated ? ",0" : "") + "}"; - return [chars, normalized.length + (nullTerminated ? 1 : 0)]; + return [chars, normalized.length + (nullTerminated ? 1 : 0)] as const; +} + +export function addCPPCharArray(str: string, nullTerminated: boolean = true) { + const normalized = str.trim() + "\n"; + return ( + normalized + .split("") + .map(a => a.charCodeAt(0)) + .join(",") + (nullTerminated ? ",0" : "") + ); } export function declareASCIILiteral(name: string, value: string) { diff --git a/src/compile_target.zig b/src/compile_target.zig index 0a8ec3ff79..e3330614e7 100644 --- a/src/compile_target.zig +++ b/src/compile_target.zig @@ -153,7 +153,6 @@ pub fn downloadToPath(this: *const CompileTarget, env: *bun.DotEnv.Loader, alloc { var progress = refresher.start("Downloading", 0); defer progress.end(); - const timeout = 30000; const http_proxy: ?bun.URL = env.getHttpProxy(url); async_http.* = HTTP.AsyncHTTP.initSync( @@ -164,12 +163,10 @@ pub fn downloadToPath(this: *const CompileTarget, env: *bun.DotEnv.Loader, alloc "", compressed_archive_bytes, "", - timeout, http_proxy, null, HTTP.FetchRedirect.follow, ); - async_http.client.timeout = timeout; async_http.client.progress_node = progress; async_http.client.reject_unauthorized = env.getTLSRejectUnauthorized(); @@ -294,8 +291,10 @@ pub fn downloadToPath(this: *const CompileTarget, env: *bun.DotEnv.Loader, alloc const dirname = bun.path.dirname(dest_z, .loose); if (dirname.len > 0) { std.fs.cwd().makePath(dirname) catch {}; + continue; } - continue; + + // fallthrough, failed for another reason } node.end(); Output.err(err, "Failed to move cross-compiled bun binary into cache directory {}", .{bun.fmt.fmtPath(u8, dest_z, .{})}); diff --git a/src/crash_handler.zig b/src/crash_handler.zig index ee608efef6..26e714efcf 100644 --- a/src/crash_handler.zig +++ b/src/crash_handler.zig @@ -51,7 +51,12 @@ var panic_mutex = std.Thread.Mutex{}; threadlocal var panic_stage: usize = 0; /// This can be set by various parts of the codebase to indicate a broader -/// action being taken, for example "Crashed while parsing /path/to/file.js" +/// action being taken. It is printed when a crash happens, which can help +/// narrow down what the bug is. Example: "Crashed while parsing /path/to/file.js" +/// +/// Some of these are enabled in release builds, which may encourage users to +/// attach the affected files to crash report. Others, which may have low crash +/// rate or only crash due to assertion failures, are debug-only. See `Action`. pub threadlocal var current_action: ?Action = null; const CPUFeatures = @import("./bun.js/bindings/CPUFeatures.zig").CPUFeatures; @@ -102,11 +107,54 @@ pub const Action = union(enum) { visit: []const u8, print: []const u8, + /// bun.bundle_v2.LinkerContext.generateCompileResultForJSChunk + bundle_generate_chunk: if (bun.Environment.isDebug) struct { + context: *const anyopaque, // unfortunate dependency loop workaround + chunk: *const bun.bundle_v2.Chunk, + part_range: *const bun.bundle_v2.PartRange, + + pub fn linkerContext(data: *const @This()) *const bun.bundle_v2.LinkerContext { + return @ptrCast(@alignCast(data.context)); + } + } else void, + + resolver: if (bun.Environment.isDebug) struct { + source_dir: []const u8, + import_path: []const u8, + kind: bun.ImportKind, + } else void, + pub fn format(act: Action, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { switch (act) { .parse => |path| try writer.print("parsing {s}", .{path}), .visit => |path| try writer.print("visiting {s}", .{path}), .print => |path| try writer.print("printing {s}", .{path}), + .bundle_generate_chunk => |data| if (bun.Environment.isDebug) { + try writer.print( + \\generating bundler chunk + \\ chunk entry point: {s} + \\ source: {s} + \\ part range: {d}..{d} + , + .{ + data.linkerContext().graph.bundler_graph.input_files + .items(.source)[data.chunk.entry_point.source_index] + .path.text, + data.linkerContext().graph.bundler_graph.input_files + .items(.source)[data.part_range.source_index.get()] + .path.text, + data.part_range.part_index_begin, + data.part_range.part_index_end, + }, + ); + }, + .resolver => |res| if (bun.Environment.isDebug) { + try writer.print("resolving {s} from {s} ({s})", .{ + res.import_path, + res.source_dir, + res.kind.label(), + }); + }, } } }; @@ -120,10 +168,6 @@ pub fn crashHandler( ) noreturn { @setCold(true); - // If a segfault happens while panicking, we want it to actually segfault, not trigger - // the handler. - resetSegfaultHandler(); - if (bun.Environment.isDebug) bun.Output.disableScopedDebugWriter(); @@ -149,7 +193,7 @@ pub fn crashHandler( const writer = std.io.getStdErr().writer(); // The format of the panic trace is slightly different in debug - // builds Mainly, we demangle the backtrace immediately instead + // builds. Mainly, we demangle the backtrace immediately instead // of using a trace string. // // To make the release-mode behavior easier to demo, debug mode @@ -160,6 +204,9 @@ pub fn crashHandler( break :check_flag false; } } + // Act like release build when explicitly enabling reporting + if (isReportingEnabled()) break :check_flag false; + break :check_flag true; }; @@ -175,11 +222,10 @@ pub fn crashHandler( } writer.writeAll("oh no") catch std.posix.abort(); if (Output.enable_ansi_colors) { - writer.writeAll(Output.prettyFmt(": ", true)) catch std.posix.abort(); + writer.writeAll(Output.prettyFmt(": multiple threads are crashing\n", true)) catch std.posix.abort(); } else { - writer.writeAll(Output.prettyFmt(": ", true)) catch std.posix.abort(); + writer.writeAll(Output.prettyFmt(": multiple threads are crashing\n", true)) catch std.posix.abort(); } - writer.writeAll("multiple threads are crashing") catch std.posix.abort(); } if (reason != .out_of_memory or debug_trace) { @@ -234,6 +280,8 @@ pub fn crashHandler( }; if (debug_trace) { + has_printed_message = true; + dumpStackTrace(trace.*); trace_str_buf.writer().print("{}", .{TraceString{ @@ -294,12 +342,22 @@ pub fn crashHandler( writer.writeAll("\n") catch std.posix.abort(); } } + // Be aware that this function only lets one thread return from it. // This is important so that we do not try to run the following reload logic twice. waitForOtherThreadToFinishPanicking(); report(trace_str_buf.slice()); + // At this point, the crash handler has performed it's job. Reset the segfault handler + // so that a crash will actually crash. We need this because we want the process to + // exit with a signal, and allow tools to be able to gather core dumps. + // + // This is done so late (in comparison to the Zig Standard Library's panic handler) + // because if multiple threads segfault (more often the case on Windows), we don't + // want another thread to interrupt the crashing of the first one. + resetSegfaultHandler(); + if (bun.auto_reload_on_crash and // Do not reload if the panic arose FROM the reload function. !bun.isProcessReloadInProgressOnAnotherThread()) @@ -319,6 +377,8 @@ pub fn crashHandler( inline 1, 2 => |t| { if (t == 1) { panic_stage = 2; + + resetSegfaultHandler(); Output.flush(); } panic_stage = 3; @@ -332,6 +392,7 @@ pub fn crashHandler( }, 3 => { // Panicked while printing "Panicked during a panic." + panic_stage = 4; }, else => { // Panicked or otherwise looped into the panic handler while trying to exit. @@ -721,6 +782,8 @@ pub fn init() void { } pub fn resetSegfaultHandler() void { + if (!enable) return; + if (bun.Environment.os == .windows) { if (windows_segfault_handle) |handle| { const rc = windows.kernel32.RemoveVectoredExceptionHandler(handle); @@ -851,6 +914,22 @@ fn waitForOtherThreadToFinishPanicking() void { } } +/// This is to be called by any thread that is attempting to exit the process. +/// If another thread is panicking, this will sleep this thread forever, under +/// the assumption that the crash handler will terminate the program. +/// +/// There have been situations in the past where a bundler thread starts +/// panicking, but the main thread ends up marking a test as passing and then +/// exiting with code zero before the crash handler can finish the crash. +pub fn sleepForeverIfAnotherThreadIsCrashing() void { + if (panicking.load(.acquire) > 0) { + // Sleep forever without hammering the CPU + var futex = std.atomic.Value(u32).init(0); + while (true) std.Thread.Futex.wait(&futex, 0); + comptime unreachable; + } +} + /// Each platform is encoded as a single character. It is placed right after the /// slash after the version, so someone just reading the trace string can tell /// what platform it came from. L, M, and W are for Linux, macOS, and Windows, @@ -1511,6 +1590,7 @@ pub const js_bindings = struct { .{ "panic", jsPanic }, .{ "rootError", jsRootError }, .{ "outOfMemory", jsOutOfMemory }, + .{ "raiseIgnoringPanicHandler", jsRaiseIgnoringPanicHandler }, }) |tuple| { const name = JSC.ZigString.static(tuple[0]); obj.put(global, name, JSC.createCallback(global, name, 1, tuple[1])); @@ -1548,11 +1628,15 @@ pub const js_bindings = struct { bun.outOfMemory(); } + pub fn jsRaiseIgnoringPanicHandler(_: *JSC.JSGlobalObject, _: *JSC.CallFrame) JSC.JSValue { + bun.Global.raiseIgnoringPanicHandler(.SIGSEGV); + } + pub fn jsGetFeaturesAsVLQ(global: *JSC.JSGlobalObject, _: *JSC.CallFrame) JSC.JSValue { const bits = bun.Analytics.packedFeatures(); var buf = std.BoundedArray(u8, 16){}; writeU64AsTwoVLQs(buf.writer(), @bitCast(bits)) catch { - // there is definetly enough space in the bounded array + // there is definitely enough space in the bounded array unreachable; }; return bun.String.createLatin1(buf.slice()).toJS(global); diff --git a/src/darwin_c.zig b/src/darwin_c.zig index 075b6c1852..a5b3a8721a 100644 --- a/src/darwin_c.zig +++ b/src/darwin_c.zig @@ -415,6 +415,75 @@ pub const SystemErrno = enum(u8) { }; }; +pub const UV_E2BIG: i32 = @intFromEnum(SystemErrno.E2BIG); +pub const UV_EACCES: i32 = @intFromEnum(SystemErrno.EACCES); +pub const UV_EADDRINUSE: i32 = @intFromEnum(SystemErrno.EADDRINUSE); +pub const UV_EADDRNOTAVAIL: i32 = @intFromEnum(SystemErrno.EADDRNOTAVAIL); +pub const UV_EAFNOSUPPORT: i32 = @intFromEnum(SystemErrno.EAFNOSUPPORT); +pub const UV_EAGAIN: i32 = @intFromEnum(SystemErrno.EAGAIN); +pub const UV_EALREADY: i32 = @intFromEnum(SystemErrno.EALREADY); +pub const UV_EBADF: i32 = @intFromEnum(SystemErrno.EBADF); +pub const UV_EBUSY: i32 = @intFromEnum(SystemErrno.EBUSY); +pub const UV_ECANCELED: i32 = @intFromEnum(SystemErrno.ECANCELED); +pub const UV_ECHARSET: i32 = -bun.windows.libuv.UV__ECHARSET; +pub const UV_ECONNABORTED: i32 = @intFromEnum(SystemErrno.ECONNABORTED); +pub const UV_ECONNREFUSED: i32 = @intFromEnum(SystemErrno.ECONNREFUSED); +pub const UV_ECONNRESET: i32 = @intFromEnum(SystemErrno.ECONNRESET); +pub const UV_EDESTADDRREQ: i32 = @intFromEnum(SystemErrno.EDESTADDRREQ); +pub const UV_EEXIST: i32 = @intFromEnum(SystemErrno.EEXIST); +pub const UV_EFAULT: i32 = @intFromEnum(SystemErrno.EFAULT); +pub const UV_EHOSTUNREACH: i32 = @intFromEnum(SystemErrno.EHOSTUNREACH); +pub const UV_EINTR: i32 = @intFromEnum(SystemErrno.EINTR); +pub const UV_EINVAL: i32 = @intFromEnum(SystemErrno.EINVAL); +pub const UV_EIO: i32 = @intFromEnum(SystemErrno.EIO); +pub const UV_EISCONN: i32 = @intFromEnum(SystemErrno.EISCONN); +pub const UV_EISDIR: i32 = @intFromEnum(SystemErrno.EISDIR); +pub const UV_ELOOP: i32 = @intFromEnum(SystemErrno.ELOOP); +pub const UV_EMFILE: i32 = @intFromEnum(SystemErrno.EMFILE); +pub const UV_EMSGSIZE: i32 = @intFromEnum(SystemErrno.EMSGSIZE); +pub const UV_ENAMETOOLONG: i32 = @intFromEnum(SystemErrno.ENAMETOOLONG); +pub const UV_ENETDOWN: i32 = @intFromEnum(SystemErrno.ENETDOWN); +pub const UV_ENETUNREACH: i32 = @intFromEnum(SystemErrno.ENETUNREACH); +pub const UV_ENFILE: i32 = @intFromEnum(SystemErrno.ENFILE); +pub const UV_ENOBUFS: i32 = @intFromEnum(SystemErrno.ENOBUFS); +pub const UV_ENODEV: i32 = @intFromEnum(SystemErrno.ENODEV); +pub const UV_ENOENT: i32 = @intFromEnum(SystemErrno.ENOENT); +pub const UV_ENOMEM: i32 = @intFromEnum(SystemErrno.ENOMEM); +pub const UV_ENONET: i32 = -bun.windows.libuv.UV_ENONET; +pub const UV_ENOSPC: i32 = @intFromEnum(SystemErrno.ENOSPC); +pub const UV_ENOSYS: i32 = @intFromEnum(SystemErrno.ENOSYS); +pub const UV_ENOTCONN: i32 = @intFromEnum(SystemErrno.ENOTCONN); +pub const UV_ENOTDIR: i32 = @intFromEnum(SystemErrno.ENOTDIR); +pub const UV_ENOTEMPTY: i32 = @intFromEnum(SystemErrno.ENOTEMPTY); +pub const UV_ENOTSOCK: i32 = @intFromEnum(SystemErrno.ENOTSOCK); +pub const UV_ENOTSUP: i32 = @intFromEnum(SystemErrno.ENOTSUP); +pub const UV_EPERM: i32 = @intFromEnum(SystemErrno.EPERM); +pub const UV_EPIPE: i32 = @intFromEnum(SystemErrno.EPIPE); +pub const UV_EPROTO: i32 = @intFromEnum(SystemErrno.EPROTO); +pub const UV_EPROTONOSUPPORT: i32 = @intFromEnum(SystemErrno.EPROTONOSUPPORT); +pub const UV_EPROTOTYPE: i32 = @intFromEnum(SystemErrno.EPROTOTYPE); +pub const UV_EROFS: i32 = @intFromEnum(SystemErrno.EROFS); +pub const UV_ESHUTDOWN: i32 = @intFromEnum(SystemErrno.ESHUTDOWN); +pub const UV_ESPIPE: i32 = @intFromEnum(SystemErrno.ESPIPE); +pub const UV_ESRCH: i32 = @intFromEnum(SystemErrno.ESRCH); +pub const UV_ETIMEDOUT: i32 = @intFromEnum(SystemErrno.ETIMEDOUT); +pub const UV_ETXTBSY: i32 = @intFromEnum(SystemErrno.ETXTBSY); +pub const UV_EXDEV: i32 = @intFromEnum(SystemErrno.EXDEV); +pub const UV_EFBIG: i32 = @intFromEnum(SystemErrno.EFBIG); +pub const UV_ENOPROTOOPT: i32 = @intFromEnum(SystemErrno.ENOPROTOOPT); +pub const UV_ERANGE: i32 = @intFromEnum(SystemErrno.ERANGE); +pub const UV_ENXIO: i32 = @intFromEnum(SystemErrno.ENXIO); +pub const UV_EMLINK: i32 = @intFromEnum(SystemErrno.EMLINK); +pub const UV_EHOSTDOWN: i32 = @intFromEnum(SystemErrno.EHOSTDOWN); +pub const UV_EREMOTEIO: i32 = -bun.windows.libuv.UV_EREMOTEIO; +pub const UV_ENOTTY: i32 = @intFromEnum(SystemErrno.ENOTTY); +pub const UV_EFTYPE: i32 = @intFromEnum(SystemErrno.EFTYPE); +pub const UV_EILSEQ: i32 = @intFromEnum(SystemErrno.EILSEQ); +pub const UV_EOVERFLOW: i32 = @intFromEnum(SystemErrno.EOVERFLOW); +pub const UV_ESOCKTNOSUPPORT: i32 = @intFromEnum(SystemErrno.ESOCKTNOSUPPORT); +pub const UV_ENODATA: i32 = @intFromEnum(SystemErrno.ENODATA); +pub const UV_EUNATCH: i32 = -bun.windows.libuv.UV_EUNATCH; + // Courtesy of https://github.com/nodejs/node/blob/master/deps/uv/src/unix/darwin-stub.h pub const struct_CFArrayCallBacks = opaque {}; pub const CFIndex = c_long; diff --git a/src/defines.zig b/src/defines.zig index 2974547b7c..296bd987a4 100644 --- a/src/defines.zig +++ b/src/defines.zig @@ -66,8 +66,8 @@ pub const DefineData = struct { }; } - pub fn from_mergable_input(defines: RawDefines, user_defines: *UserDefines, log: *logger.Log, allocator: std.mem.Allocator) !void { - try user_defines.ensureUnusedCapacity(@as(u32, @truncate(defines.count()))); + pub fn fromMergeableInput(defines: RawDefines, user_defines: *UserDefines, log: *logger.Log, allocator: std.mem.Allocator) !void { + try user_defines.ensureUnusedCapacity(@truncate(defines.count())); var iter = defines.iterator(); while (iter.next()) |entry| { var keySplitter = std.mem.split(u8, entry.key_ptr.*, "."); @@ -85,42 +85,33 @@ pub const DefineData = struct { // check for nested identifiers var valueSplitter = std.mem.split(u8, entry.value_ptr.*, "."); var isIdent = true; + while (valueSplitter.next()) |part| { if (!js_lexer.isIdentifier(part) or js_lexer.Keywords.has(part)) { isIdent = false; break; } } - if (isIdent) { + if (isIdent) { // Special-case undefined. it's not an identifier here // https://github.com/evanw/esbuild/issues/1407 - if (strings.eqlComptime(entry.value_ptr.*, "undefined")) { - user_defines.putAssumeCapacity( - entry.key_ptr.*, - DefineData{ - .value = js_ast.Expr.Data{ .e_undefined = js_ast.E.Undefined{} }, - .original_name = entry.value_ptr.*, - .can_be_removed_if_unused = true, - }, - ); - } else { - const ident = js_ast.E.Identifier{ .ref = Ref.None, .can_be_removed_if_unused = true }; + const value = if (strings.eqlComptime(entry.value_ptr.*, "undefined")) + js_ast.Expr.Data{ .e_undefined = js_ast.E.Undefined{} } + else + js_ast.Expr.Data{ .e_identifier = .{ + .ref = Ref.None, + .can_be_removed_if_unused = true, + } }; - user_defines.putAssumeCapacity( - entry.key_ptr.*, - DefineData{ - .value = js_ast.Expr.Data{ .e_identifier = ident }, - .original_name = entry.value_ptr.*, - .can_be_removed_if_unused = true, - }, - ); - } - - // user_defines.putAssumeCapacity( - // entry.key_ptr, - // DefineData{ .value = js_ast.Expr.Data{.e_identifier = } }, - // ); + user_defines.putAssumeCapacity( + entry.key_ptr.*, + DefineData{ + .value = value, + .original_name = entry.value_ptr.*, + .can_be_removed_if_unused = true, + }, + ); continue; } const _log = log; @@ -129,47 +120,18 @@ pub const DefineData = struct { .path = defines_path, .key_path = fs.Path.initWithNamespace("defines", "internal"), }; - var expr = try json_parser.ParseEnvJSON(&source, _log, allocator); - var data: js_ast.Expr.Data = undefined; - switch (expr.data) { - .e_missing => { - data = .{ .e_missing = js_ast.E.Missing{} }; - }, - // We must copy so we don't recycle - .e_string => { - data = .{ .e_string = try allocator.create(js_ast.E.String) }; - data.e_string.* = try expr.data.e_string.clone(allocator); - }, - .e_null, .e_boolean, .e_number => { - data = expr.data; - }, - // We must copy so we don't recycle - .e_object => |obj| { - expr.data.e_object = try allocator.create(js_ast.E.Object); - expr.data.e_object.* = obj.*; - data = expr.data; - }, - // We must copy so we don't recycle - .e_array => |obj| { - expr.data.e_array = try allocator.create(js_ast.E.Array); - expr.data.e_array.* = obj.*; - data = expr.data; - }, - else => { - continue; - }, - } - + const expr = try json_parser.ParseEnvJSON(&source, _log, allocator); + const cloned = try expr.data.deepClone(allocator); user_defines.putAssumeCapacity(entry.key_ptr.*, DefineData{ - .value = data, - .can_be_removed_if_unused = @as(js_ast.Expr.Tag, data).isPrimitiveLiteral(), + .value = cloned, + .can_be_removed_if_unused = expr.isPrimitiveLiteral(), }); } } - pub fn from_input(defines: RawDefines, log: *logger.Log, allocator: std.mem.Allocator) !UserDefines { + pub fn fromInput(defines: RawDefines, log: *logger.Log, allocator: std.mem.Allocator) !UserDefines { var user_defines = UserDefines.init(allocator); - try from_mergable_input(defines, &user_defines, log, allocator); + try fromMergeableInput(defines, &user_defines, log, allocator); return user_defines; } diff --git a/src/deps/c_ares.zig b/src/deps/c_ares.zig index 06871361cc..e009a2ab54 100644 --- a/src/deps/c_ares.zig +++ b/src/deps/c_ares.zig @@ -315,7 +315,7 @@ pub const struct_nameinfo = extern struct { const node_slice = this.node[0..node_len]; array.putIndex(globalThis, 0, JSC.ZigString.fromUTF8(node_slice).toJS(globalThis)); } else { - array.putIndex(globalThis, 0, JSC.JSValue.jsUndefined()); + array.putIndex(globalThis, 0, .undefined); } if (this.service != null) { @@ -323,7 +323,7 @@ pub const struct_nameinfo = extern struct { const service_slice = this.service[0..service_len]; array.putIndex(globalThis, 1, JSC.ZigString.fromUTF8(service_slice).toJS(globalThis)); } else { - array.putIndex(globalThis, 1, JSC.JSValue.jsUndefined()); + array.putIndex(globalThis, 1, .undefined); } return array; @@ -1579,7 +1579,7 @@ pub export fn Bun__canonicalizeIP( const addr_slice = addr.toSlice(bun.default_allocator); const addr_str = addr_slice.slice(); if (addr_str.len >= INET6_ADDRSTRLEN) { - return JSC.JSValue.jsUndefined(); + return .undefined; } var ip_std_text: [INET6_ADDRSTRLEN + 1]u8 = undefined; @@ -1593,12 +1593,12 @@ pub export fn Bun__canonicalizeIP( if (ares_inet_pton(af, &ip_addr, &ip_std_text) != 1) { af = AF.INET6; if (ares_inet_pton(af, &ip_addr, &ip_std_text) != 1) { - return JSC.JSValue.jsUndefined(); + return .undefined; } } // ip_addr will contain the null-terminated string of the cannonicalized IP if (ares_inet_ntop(af, &ip_std_text, &ip_addr, @sizeOf(@TypeOf(ip_addr))) == null) { - return JSC.JSValue.jsUndefined(); + return .undefined; } // use the null-terminated size to return the string const size = bun.len(bun.cast([*:0]u8, &ip_addr)); diff --git a/src/deps/libdeflate b/src/deps/libdeflate new file mode 160000 index 0000000000..dc76454a39 --- /dev/null +++ b/src/deps/libdeflate @@ -0,0 +1 @@ +Subproject commit dc76454a39e7e83b68c3704b6e3784654f8d5ac5 diff --git a/src/deps/libdeflate.zig b/src/deps/libdeflate.zig new file mode 100644 index 0000000000..d38d6dcb9f --- /dev/null +++ b/src/deps/libdeflate.zig @@ -0,0 +1,149 @@ +const std = @import("std"); +const bun = @import("root").bun; +pub const Options = extern struct { + sizeof_options: usize = @sizeOf(Options), + malloc_func: ?*const fn (usize) callconv(.C) ?*anyopaque = @import("std").mem.zeroes(?*const fn (usize) callconv(.C) ?*anyopaque), + free_func: ?*const fn (?*anyopaque) callconv(.C) void = @import("std").mem.zeroes(?*const fn (?*anyopaque) callconv(.C) void), +}; +pub extern fn libdeflate_alloc_compressor(compression_level: c_int) ?*Compressor; +pub extern fn libdeflate_alloc_compressor_ex(compression_level: c_int, options: ?*const Options) ?*Compressor; +pub extern fn libdeflate_deflate_compress(compressor: *Compressor, in: ?*const anyopaque, in_nbytes: usize, out: ?*anyopaque, out_nbytes_avail: usize) usize; +pub extern fn libdeflate_deflate_compress_bound(compressor: *Compressor, in_nbytes: usize) usize; +pub extern fn libdeflate_zlib_compress(compressor: *Compressor, in: ?*const anyopaque, in_nbytes: usize, out: ?*anyopaque, out_nbytes_avail: usize) usize; +pub extern fn libdeflate_zlib_compress_bound(compressor: *Compressor, in_nbytes: usize) usize; +pub extern fn libdeflate_gzip_compress(compressor: *Compressor, in: ?*const anyopaque, in_nbytes: usize, out: ?*anyopaque, out_nbytes_avail: usize) usize; +pub extern fn libdeflate_gzip_compress_bound(compressor: *Compressor, in_nbytes: usize) usize; +pub extern fn libdeflate_free_compressor(compressor: *Compressor) void; + +fn load_once() void { + libdeflate_set_memory_allocator(bun.Mimalloc.mi_malloc, bun.Mimalloc.mi_free); +} + +var loaded_once = std.once(load_once); + +pub fn load() void { + loaded_once.call(); +} + +pub const Compressor = opaque { + pub fn alloc(compression_level: c_int) ?*Compressor { + return libdeflate_alloc_compressor(compression_level); + } + + pub fn alloc_ex(compression_level: c_int, options: ?*const Options) ?*Compressor { + return libdeflate_alloc_compressor_ex(compression_level, options); + } + + pub fn deinit(this: *Compressor) void { + return libdeflate_free_compressor(this); + } + + /// Compresses `input` into `output` and returns the number of bytes written. + pub fn inflate(this: *Compressor, input: []const u8, output: []u8) Result { + const written = libdeflate_deflate_compress(this, input.ptr, input.len, output.ptr, output.len); + return Result{ .read = input.len, .written = written, .status = Status.success }; + } + + pub fn maxBytesNeeded(this: *Compressor, input: []const u8, encoding: Encoding) usize { + return switch (encoding) { + Encoding.deflate => return libdeflate_deflate_compress_bound(this, input.len), + Encoding.zlib => return libdeflate_zlib_compress_bound(this, input.len), + Encoding.gzip => return libdeflate_gzip_compress_bound(this, input.len), + }; + } + + pub fn compress(this: *Compressor, input: []const u8, output: []u8, encoding: Encoding) Result { + switch (encoding) { + Encoding.deflate => return this.inflate(input, output), + Encoding.zlib => return this.zlib(input, output), + Encoding.gzip => return this.gzip(input, output), + } + } + + pub fn zlib(this: *Compressor, input: []const u8, output: []u8) Result { + const result = libdeflate_zlib_compress(this, input.ptr, input.len, output.ptr, output.len); + return Result{ .read = input.len, .written = result, .status = Status.success }; + } + + pub fn gzip(this: *Compressor, input: []const u8, output: []u8) Result { + const result = libdeflate_gzip_compress(this, input.ptr, input.len, output.ptr, output.len); + return Result{ .read = input.len, .written = result, .status = Status.success }; + } +}; + +pub const Decompressor = opaque { + pub fn alloc() ?*Decompressor { + return libdeflate_alloc_decompressor(); + } + + pub fn deinit(this: *Decompressor) void { + return libdeflate_free_decompressor(this); + } + + pub fn deflate(this: *Decompressor, input: []const u8, output: []u8) Result { + var actual_in_bytes_ret: usize = input.len; + var actual_out_bytes_ret: usize = output.len; + const result = libdeflate_deflate_decompress_ex(this, input.ptr, input.len, output.ptr, output.len, &actual_in_bytes_ret, &actual_out_bytes_ret); + return Result{ .read = actual_in_bytes_ret, .written = actual_out_bytes_ret, .status = result }; + } + + pub fn zlib(this: *Decompressor, input: []const u8, output: []u8) Result { + var actual_in_bytes_ret: usize = input.len; + var actual_out_bytes_ret: usize = output.len; + const result = libdeflate_zlib_decompress_ex(this, input.ptr, input.len, output.ptr, output.len, &actual_in_bytes_ret, &actual_out_bytes_ret); + return Result{ .read = actual_in_bytes_ret, .written = actual_out_bytes_ret, .status = result }; + } + + pub fn gzip(this: *Decompressor, input: []const u8, output: []u8) Result { + var actual_in_bytes_ret: usize = input.len; + var actual_out_bytes_ret: usize = output.len; + const result = libdeflate_gzip_decompress_ex(this, input.ptr, input.len, output.ptr, output.len, &actual_in_bytes_ret, &actual_out_bytes_ret); + return Result{ .read = actual_in_bytes_ret, .written = actual_out_bytes_ret, .status = result }; + } + + pub fn decompress(this: *Decompressor, input: []const u8, output: []u8, encoding: Encoding) Result { + switch (encoding) { + Encoding.deflate => return this.deflate(input, output), + Encoding.zlib => return this.zlib(input, output), + Encoding.gzip => return this.gzip(input, output), + } + } +}; + +pub const Result = struct { + read: usize, + written: usize, + status: Status, +}; + +pub const Encoding = enum { + deflate, + zlib, + gzip, +}; + +pub extern fn libdeflate_alloc_decompressor() ?*Decompressor; +pub extern fn libdeflate_alloc_decompressor_ex(options: ?*const Options) ?*Decompressor; +pub const LIBDEFLATE_SUCCESS = 0; +pub const LIBDEFLATE_BAD_DATA = 1; +pub const LIBDEFLATE_SHORT_OUTPUT = 2; +pub const LIBDEFLATE_INSUFFICIENT_SPACE = 3; +pub const Status = enum(c_uint) { + success = LIBDEFLATE_SUCCESS, + bad_data = LIBDEFLATE_BAD_DATA, + short_output = LIBDEFLATE_SHORT_OUTPUT, + insufficient_space = LIBDEFLATE_INSUFFICIENT_SPACE, +}; +pub extern fn libdeflate_deflate_decompress(decompressor: *Decompressor, in: ?*const anyopaque, in_nbytes: usize, out: ?*anyopaque, out_nbytes_avail: usize, actual_out_nbytes_ret: *usize) Status; +pub extern fn libdeflate_deflate_decompress_ex(decompressor: *Decompressor, in: ?*const anyopaque, in_nbytes: usize, out: ?*anyopaque, out_nbytes_avail: usize, actual_in_nbytes_ret: *usize, actual_out_nbytes_ret: *usize) Status; +pub extern fn libdeflate_zlib_decompress(decompressor: *Decompressor, in: ?*const anyopaque, in_nbytes: usize, out: ?*anyopaque, out_nbytes_avail: usize, actual_out_nbytes_ret: *usize) Status; +pub extern fn libdeflate_zlib_decompress_ex(decompressor: *Decompressor, in: ?*const anyopaque, in_nbytes: usize, out: ?*anyopaque, out_nbytes_avail: usize, actual_in_nbytes_ret: *usize, actual_out_nbytes_ret: *usize) Status; +pub extern fn libdeflate_gzip_decompress(decompressor: *Decompressor, in: ?*const anyopaque, in_nbytes: usize, out: ?*anyopaque, out_nbytes_avail: usize, actual_out_nbytes_ret: *usize) Status; +pub extern fn libdeflate_gzip_decompress_ex(decompressor: *Decompressor, in: ?*const anyopaque, in_nbytes: usize, out: ?*anyopaque, out_nbytes_avail: usize, actual_in_nbytes_ret: *usize, actual_out_nbytes_ret: *usize) Status; +pub extern fn libdeflate_free_decompressor(decompressor: *Decompressor) void; +pub extern fn libdeflate_adler32(adler: u32, buffer: ?*const anyopaque, len: usize) u32; +pub extern fn libdeflate_crc32(crc: u32, buffer: ?*const anyopaque, len: usize) u32; +pub extern fn libdeflate_set_memory_allocator(malloc_func: ?*const fn (usize) callconv(.C) ?*anyopaque, free_func: ?*const fn (?*anyopaque) callconv(.C) void) void; +pub const libdeflate_compressor = Compressor; +pub const libdeflate_options = Options; +pub const libdeflate_decompressor = Decompressor; diff --git a/src/deps/libuwsockets.cpp b/src/deps/libuwsockets.cpp index c1af335cd7..26da8228b2 100644 --- a/src/deps/libuwsockets.cpp +++ b/src/deps/libuwsockets.cpp @@ -1224,14 +1224,14 @@ extern "C" if (ssl) { uWS::HttpResponse *uwsRes = (uWS::HttpResponse *)res; - uwsRes->onWritable([handler, res, opcional_data](uint64_t a) - { return handler(res, a, opcional_data); }); + auto onWritable = reinterpret_cast*, uint64_t, void*)>(handler); + uwsRes->onWritable(opcional_data, onWritable); } else { uWS::HttpResponse *uwsRes = (uWS::HttpResponse *)res; - uwsRes->onWritable([handler, res, opcional_data](uint64_t a) - { return handler(res, a, opcional_data); }); + auto onWritable = reinterpret_cast*, uint64_t, void*)>(handler); + uwsRes->onWritable(opcional_data, onWritable); } } @@ -1252,11 +1252,10 @@ extern "C" if (ssl) { uWS::HttpResponse *uwsRes = (uWS::HttpResponse *)res; + auto* onAborted = reinterpret_cast*, void*)>(handler); if (handler) { - uwsRes->onAborted( - [handler, res, opcional_data] - { handler(res, opcional_data); }); + uwsRes->onAborted(opcional_data, onAborted); } else { @@ -1266,11 +1265,10 @@ extern "C" else { uWS::HttpResponse *uwsRes = (uWS::HttpResponse *)res; + auto* onAborted = reinterpret_cast*, void*)>(handler); if (handler) { - uwsRes->onAborted( - [handler, res, opcional_data] - { handler(res, opcional_data); }); + uwsRes->onAborted(opcional_data, onAborted); } else { @@ -1288,21 +1286,21 @@ extern "C" if (ssl) { uWS::HttpResponse *uwsRes = (uWS::HttpResponse *)res; + auto onData = reinterpret_cast* response, const char* chunk, size_t chunk_length, bool, void*)>(handler); if (handler) { - uwsRes->onData([handler, res, opcional_data](auto chunk, bool is_end) - { handler(res, chunk.data(), chunk.length(), is_end, opcional_data); }); + uwsRes->onData(opcional_data, onData); } else { - uwsRes->onData(nullptr); + uwsRes->onData(opcional_data, nullptr); } } else { uWS::HttpResponse *uwsRes = (uWS::HttpResponse *)res; + auto onData = reinterpret_cast* response, const char* chunk, size_t chunk_length, bool, void*)>(handler); if (handler) { - uwsRes->onData([handler, res, opcional_data](auto chunk, bool is_end) - { handler(res, chunk.data(), chunk.length(), is_end, opcional_data); }); + uwsRes->onData(opcional_data, onData); } else { - uwsRes->onData(nullptr); + uwsRes->onData(opcional_data, nullptr); } } } @@ -1632,4 +1630,9 @@ extern "C" return strlen(*dest); } } + + // we need to manually call this at thread exit + extern "C" void bun_clear_loop_at_thread_exit() { + uWS::Loop::clearLoopAtThreadExit(); + } } diff --git a/src/deps/uws.zig b/src/deps/uws.zig index c630dbaf93..4878d8dfad 100644 --- a/src/deps/uws.zig +++ b/src/deps/uws.zig @@ -1086,7 +1086,7 @@ pub const Timer = opaque { pub const SocketContext = opaque { pub fn getNativeHandle(this: *SocketContext, comptime ssl: bool) *anyopaque { - return us_socket_context_get_native_handle(comptime @as(i32, @intFromBool(ssl)), this).?; + return us_socket_context_get_native_handle(@intFromBool(ssl), this).?; } fn _deinit_ssl(this: *SocketContext) void { @@ -1143,10 +1143,7 @@ pub const SocketContext = opaque { } fn getLoop(this: *SocketContext, ssl: bool) ?*Loop { - if (ssl) { - return us_socket_context_loop(@as(i32, 1), this); - } - return us_socket_context_loop(@as(i32, 0), this); + return us_socket_context_loop(@intFromBool(ssl), this); } /// closes and deinit the SocketContexts @@ -1164,7 +1161,7 @@ pub const SocketContext = opaque { pub fn close(this: *SocketContext, ssl: bool) void { debug("us_socket_context_close({d})", .{@intFromPtr(this)}); - us_socket_context_close(@as(i32, @intFromBool(ssl)), this); + us_socket_context_close(@intFromBool(ssl), this); } pub fn ext(this: *SocketContext, ssl: bool, comptime ContextType: type) ?*ContextType { @@ -3090,3 +3087,8 @@ pub const udp = struct { extern fn us_udp_packet_buffer_payload(buf: ?*PacketBuffer, index: c_int) [*]u8; extern fn us_udp_packet_buffer_payload_length(buf: ?*PacketBuffer, index: c_int) c_int; }; + +extern fn bun_clear_loop_at_thread_exit() void; +pub fn onThreadExit() void { + bun_clear_loop_at_thread_exit(); +} diff --git a/src/fd.zig b/src/fd.zig index 27e9745e2e..b8c7fec50e 100644 --- a/src/fd.zig +++ b/src/fd.zig @@ -272,7 +272,6 @@ pub const FDImpl = packed struct { if (env.isDebug) { if (result) |err| { if (err.errno == @intFromEnum(posix.E.BADF)) { - // TODO(@paperdave): Zig Compiler Bug, if you remove `this` from the log. An error is correctly printed, but with the wrong reference trace bun.Output.debugWarn("close({s}) = EBADF. This is an indication of a file descriptor UAF", .{this_fmt}); } else { log("close({s}) = {}", .{ this_fmt, err }); diff --git a/src/feature_flags.zig b/src/feature_flags.zig index 7b2e14dbbe..8bd8141608 100644 --- a/src/feature_flags.zig +++ b/src/feature_flags.zig @@ -155,8 +155,7 @@ pub const export_star_redirect = false; pub const streaming_file_uploads_for_http_client = true; -// TODO: fix concurrent transpiler on Windows -pub const concurrent_transpiler = !env.isWindows; +pub const concurrent_transpiler = true; // https://github.com/oven-sh/bun/issues/5426#issuecomment-1813865316 pub const disable_auto_js_to_ts_in_node_modules = true; @@ -181,3 +180,17 @@ pub const breaking_changes_1_2 = false; pub const nonblocking_stdout_and_stderr_on_posix = false; pub const postgresql = env.is_canary or env.isDebug; + +// TODO: fix Windows-only test failures in fetch-preconnect.test.ts +pub const is_fetch_preconnect_supported = env.isPosix; + +pub const libdeflate_supported = env.isNative; + +// Mostly exists as a way to turn it off later, if necessary. +pub fn isLibdeflateEnabled() bool { + if (!libdeflate_supported) { + return false; + } + + return !bun.getRuntimeFeatureFlag("BUN_FEATURE_FLAG_NO_LIBDEFLATE"); +} diff --git a/src/fmt.zig b/src/fmt.zig index 8bb95896be..938469c277 100644 --- a/src/fmt.zig +++ b/src/fmt.zig @@ -534,140 +534,78 @@ pub const QuickAndDirtyJavaScriptSyntaxHighlighter = struct { pub fn colorCode(this: Keyword) ColorCode { return switch (this) { - Keyword.abstract => ColorCode.blue, - Keyword.as => ColorCode.blue, - Keyword.@"async" => ColorCode.magenta, - Keyword.@"await" => ColorCode.magenta, - Keyword.case => ColorCode.magenta, - Keyword.@"catch" => ColorCode.magenta, - Keyword.class => ColorCode.magenta, - Keyword.@"const" => ColorCode.magenta, - Keyword.@"continue" => ColorCode.magenta, - Keyword.debugger => ColorCode.magenta, - Keyword.default => ColorCode.magenta, - Keyword.delete => ColorCode.red, - Keyword.do => ColorCode.magenta, - Keyword.@"else" => ColorCode.magenta, - Keyword.@"break" => ColorCode.magenta, - Keyword.undefined => ColorCode.orange, - Keyword.@"enum" => ColorCode.blue, - Keyword.@"export" => ColorCode.magenta, - Keyword.extends => ColorCode.magenta, - Keyword.false => ColorCode.orange, - Keyword.finally => ColorCode.magenta, - Keyword.@"for" => ColorCode.magenta, - Keyword.function => ColorCode.magenta, - Keyword.@"if" => ColorCode.magenta, - Keyword.implements => ColorCode.blue, - Keyword.import => ColorCode.magenta, - Keyword.in => ColorCode.magenta, - Keyword.instanceof => ColorCode.magenta, - Keyword.interface => ColorCode.blue, - Keyword.let => ColorCode.magenta, - Keyword.new => ColorCode.magenta, - Keyword.null => ColorCode.orange, - Keyword.package => ColorCode.magenta, - Keyword.private => ColorCode.blue, - Keyword.protected => ColorCode.blue, - Keyword.public => ColorCode.blue, - Keyword.@"return" => ColorCode.magenta, - Keyword.static => ColorCode.magenta, - Keyword.super => ColorCode.magenta, - Keyword.@"switch" => ColorCode.magenta, - Keyword.this => ColorCode.orange, - Keyword.throw => ColorCode.magenta, - Keyword.true => ColorCode.orange, - Keyword.@"try" => ColorCode.magenta, - Keyword.type => ColorCode.blue, - Keyword.typeof => ColorCode.magenta, - Keyword.@"var" => ColorCode.magenta, - Keyword.void => ColorCode.magenta, - Keyword.@"while" => ColorCode.magenta, - Keyword.with => ColorCode.magenta, - Keyword.yield => ColorCode.magenta, - Keyword.string => ColorCode.blue, - Keyword.number => ColorCode.blue, - Keyword.boolean => ColorCode.blue, - Keyword.symbol => ColorCode.blue, - Keyword.any => ColorCode.blue, - Keyword.object => ColorCode.blue, - Keyword.unknown => ColorCode.blue, - Keyword.never => ColorCode.blue, - Keyword.namespace => ColorCode.blue, - Keyword.declare => ColorCode.blue, - Keyword.readonly => ColorCode.blue, + .abstract => .blue, + .as => .blue, + .@"async" => .magenta, + .@"await" => .magenta, + .case => .magenta, + .@"catch" => .magenta, + .class => .magenta, + .@"const" => .magenta, + .@"continue" => .magenta, + .debugger => .magenta, + .default => .magenta, + .delete => .red, + .do => .magenta, + .@"else" => .magenta, + .@"break" => .magenta, + .undefined => .orange, + .@"enum" => .blue, + .@"export" => .magenta, + .extends => .magenta, + .false => .orange, + .finally => .magenta, + .@"for" => .magenta, + .function => .magenta, + .@"if" => .magenta, + .implements => .blue, + .import => .magenta, + .in => .magenta, + .instanceof => .magenta, + .interface => .blue, + .let => .magenta, + .new => .magenta, + .null => .orange, + .package => .magenta, + .private => .blue, + .protected => .blue, + .public => .blue, + .@"return" => .magenta, + .static => .magenta, + .super => .magenta, + .@"switch" => .magenta, + .this => .orange, + .throw => .magenta, + .true => .orange, + .@"try" => .magenta, + .type => .blue, + .typeof => .magenta, + .@"var" => .magenta, + .void => .magenta, + .@"while" => .magenta, + .with => .magenta, + .yield => .magenta, + .string => .blue, + .number => .blue, + .boolean => .blue, + .symbol => .blue, + .any => .blue, + .object => .blue, + .unknown => .blue, + .never => .blue, + .namespace => .blue, + .declare => .blue, + .readonly => .blue, }; } }; - pub const Keywords = ComptimeStringMap(Keyword, .{ - .{ "abstract", Keyword.abstract }, - .{ "any", Keyword.any }, - .{ "as", Keyword.as }, - .{ "async", Keyword.@"async" }, - .{ "await", Keyword.@"await" }, - .{ "boolean", Keyword.boolean }, - .{ "break", Keyword.@"break" }, - .{ "case", Keyword.case }, - .{ "catch", Keyword.@"catch" }, - .{ "class", Keyword.class }, - .{ "const", Keyword.@"const" }, - .{ "continue", Keyword.@"continue" }, - .{ "debugger", Keyword.debugger }, - .{ "declare", Keyword.declare }, - .{ "default", Keyword.default }, - .{ "delete", Keyword.delete }, - .{ "do", Keyword.do }, - .{ "else", Keyword.@"else" }, - .{ "enum", Keyword.@"enum" }, - .{ "export", Keyword.@"export" }, - .{ "extends", Keyword.extends }, - .{ "false", Keyword.false }, - .{ "finally", Keyword.finally }, - .{ "for", Keyword.@"for" }, - .{ "function", Keyword.function }, - .{ "if", Keyword.@"if" }, - .{ "implements", Keyword.implements }, - .{ "import", Keyword.import }, - .{ "in", Keyword.in }, - .{ "instanceof", Keyword.instanceof }, - .{ "interface", Keyword.interface }, - .{ "let", Keyword.let }, - .{ "namespace", Keyword.namespace }, - .{ "never", Keyword.never }, - .{ "new", Keyword.new }, - .{ "null", Keyword.null }, - .{ "number", Keyword.number }, - .{ "object", Keyword.object }, - .{ "package", Keyword.package }, - .{ "private", Keyword.private }, - .{ "protected", Keyword.protected }, - .{ "public", Keyword.public }, - .{ "readonly", Keyword.readonly }, - .{ "return", Keyword.@"return" }, - .{ "static", Keyword.static }, - .{ "string", Keyword.string }, - .{ "super", Keyword.super }, - .{ "switch", Keyword.@"switch" }, - .{ "symbol", Keyword.symbol }, - .{ "this", Keyword.this }, - .{ "throw", Keyword.throw }, - .{ "true", Keyword.true }, - .{ "try", Keyword.@"try" }, - .{ "type", Keyword.type }, - .{ "typeof", Keyword.typeof }, - .{ "undefined", Keyword.undefined }, - .{ "unknown", Keyword.unknown }, - .{ "var", Keyword.@"var" }, - .{ "void", Keyword.void }, - .{ "while", Keyword.@"while" }, - .{ "with", Keyword.with }, - .{ "yield", Keyword.yield }, - }); + pub const Keywords = bun.ComptimeEnumMap(Keyword); - pub fn format(this: @This(), comptime _: []const u8, _: fmt.FormatOptions, writer: anytype) !void { - const text = this.text; + pub fn format(this: @This(), comptime unused_fmt: []const u8, _: fmt.FormatOptions, writer: anytype) !void { + comptime bun.assert(unused_fmt.len == 0); + var text = this.text; if (this.limited) { if (!this.enable_colors or text.len > 2048 or text.len == 0 or !strings.isAllASCII(text)) { try writer.writeAll(text); @@ -675,22 +613,21 @@ pub const QuickAndDirtyJavaScriptSyntaxHighlighter = struct { } } - var remain = text; var prev_keyword: ?Keyword = null; - outer: while (remain.len > 0) { - if (js_lexer.isIdentifierStart(remain[0])) { + outer: while (text.len > 0) { + if (js_lexer.isIdentifierStart(text[0])) { var i: usize = 1; - while (i < remain.len and js_lexer.isIdentifierContinue(remain[i])) { + while (i < text.len and js_lexer.isIdentifierContinue(text[i])) { i += 1; } - if (Keywords.get(remain[0..i])) |keyword| { + if (Keywords.get(text[0..i])) |keyword| { if (keyword != .as) prev_keyword = keyword; const code = keyword.colorCode(); - try writer.print(Output.prettyFmt("{s}{s}", true), .{ code.color(), remain[0..i] }); + try writer.print(Output.prettyFmt("{s}{s}", true), .{ code.color(), text[0..i] }); } else { write: { if (prev_keyword) |prev| { @@ -698,20 +635,20 @@ pub const QuickAndDirtyJavaScriptSyntaxHighlighter = struct { .new => { prev_keyword = null; - if (i < remain.len and remain[i] == '(') { - try writer.print(Output.prettyFmt("{s}", true), .{remain[0..i]}); + if (i < text.len and text[i] == '(') { + try writer.print(Output.prettyFmt("{s}", true), .{text[0..i]}); break :write; } }, .abstract, .namespace, .declare, .type, .interface => { - try writer.print(Output.prettyFmt("{s}", true), .{remain[0..i]}); + try writer.print(Output.prettyFmt("{s}", true), .{text[0..i]}); prev_keyword = null; break :write; }, .import => { - if (strings.eqlComptime(remain[0..i], "from")) { + if (strings.eqlComptime(text[0..i], "from")) { const code = ColorCode.magenta; - try writer.print(Output.prettyFmt("{s}{s}", true), .{ code.color(), remain[0..i] }); + try writer.print(Output.prettyFmt("{s}{s}", true), .{ code.color(), text[0..i] }); prev_keyword = null; break :write; @@ -721,25 +658,25 @@ pub const QuickAndDirtyJavaScriptSyntaxHighlighter = struct { } } - try writer.writeAll(remain[0..i]); + try writer.writeAll(text[0..i]); } } - remain = remain[i..]; + text = text[i..]; } else { - switch (remain[0]) { + switch (text[0]) { '0'...'9' => { prev_keyword = null; var i: usize = 1; - if (remain.len > 1 and remain[0] == '0' and remain[1] == 'x') { + if (text.len > 1 and text[0] == '0' and text[1] == 'x') { i += 1; - while (i < remain.len and switch (remain[i]) { + while (i < text.len and switch (text[i]) { '0'...'9', 'a'...'f', 'A'...'F' => true, else => false, }) { i += 1; } } else { - while (i < remain.len and switch (remain[i]) { + while (i < text.len and switch (text[i]) { '0'...'9', '.', 'e', 'E', 'x', 'X', 'b', 'B', 'o', 'O' => true, else => false, }) { @@ -747,30 +684,30 @@ pub const QuickAndDirtyJavaScriptSyntaxHighlighter = struct { } } - try writer.print(Output.prettyFmt("{s}", true), .{remain[0..i]}); - remain = remain[i..]; + try writer.print(Output.prettyFmt("{s}", true), .{text[0..i]}); + text = text[i..]; }, inline '`', '"', '\'' => |char| { prev_keyword = null; var i: usize = 1; - while (i < remain.len and remain[i] != char) { - if (comptime char == '`') { - if (remain[i] == '$' and i + 1 < remain.len and remain[i + 1] == '{') { + while (i < text.len and text[i] != char) { + if (char == '`') { + if (text[i] == '$' and i + 1 < text.len and text[i + 1] == '{') { const curly_start = i; i += 2; - while (i < remain.len and remain[i] != '}') { - if (remain[i] == '\\') { + while (i < text.len and text[i] != '}') { + if (text[i] == '\\') { i += 1; } i += 1; } - try writer.print(Output.prettyFmt("{s}", true), .{remain[0..curly_start]}); + try writer.print(Output.prettyFmt("{s}", true), .{text[0..curly_start]}); try writer.writeAll("${"); const curly_remain = QuickAndDirtyJavaScriptSyntaxHighlighter{ - .text = remain[curly_start + 2 .. i], + .text = text[curly_start + 2 .. i], .enable_colors = this.enable_colors, .limited = false, }; @@ -779,22 +716,22 @@ pub const QuickAndDirtyJavaScriptSyntaxHighlighter = struct { try curly_remain.format("", .{}, writer); } - if (i < remain.len and remain[i] == '}') { + if (i < text.len and text[i] == '}') { i += 1; } try writer.writeAll("}"); - remain = remain[i..]; + text = text[i..]; i = 0; - if (remain.len > 0 and remain[0] == char) { + if (text.len > 0 and text[0] == char) { try writer.writeAll(Output.prettyFmt("`", true)); - remain = remain[1..]; + text = text[1..]; continue :outer; } continue; } } - if (i + 1 < remain.len and remain[i] == '\\') { + if (i + 1 < text.len and text[i] == '\\') { i += 1; } @@ -802,58 +739,58 @@ pub const QuickAndDirtyJavaScriptSyntaxHighlighter = struct { } // Include the trailing quote, if any - i += @as(usize, @intFromBool(i > 1 and i < remain.len and remain[i] == char)); + i += @intFromBool(i < text.len); - try writer.print(Output.prettyFmt("{s}", true), .{remain[0..i]}); - remain = remain[i..]; + try writer.print(Output.prettyFmt("{s}", true), .{text[0..i]}); + text = text[i..]; }, '/' => { prev_keyword = null; var i: usize = 1; // the start of a line comment - if (i < remain.len and remain[i] == '/') { - while (i < remain.len and remain[i] != '\n') { + if (i < text.len and text[i] == '/') { + while (i < text.len and text[i] != '\n') { i += 1; } - const remain_to_print = remain[0..i]; - if (i < remain.len and remain[i] == '\n') { + const remain_to_print = text[0..i]; + if (i < text.len and text[i] == '\n') { i += 1; } - if (i < remain.len and remain[i] == '\r') { + if (i < text.len and text[i] == '\r') { i += 1; } try writer.print(Output.prettyFmt("{s}", true), .{remain_to_print}); - remain = remain[i..]; + text = text[i..]; continue; } as_multiline_comment: { - if (i < remain.len and remain[i] == '*') { + if (i < text.len and text[i] == '*') { i += 1; - while (i + 2 < remain.len and !strings.eqlComptime(remain[i..][0..2], "*/")) { + while (i + 2 < text.len and !strings.eqlComptime(text[i..][0..2], "*/")) { i += 1; } - if (i + 2 < remain.len and strings.eqlComptime(remain[i..][0..2], "*/")) { + if (i + 2 < text.len and strings.eqlComptime(text[i..][0..2], "*/")) { i += 2; } else { i = 1; break :as_multiline_comment; } - try writer.print(Output.prettyFmt("{s}", true), .{remain[0..i]}); - remain = remain[i..]; + try writer.print(Output.prettyFmt("{s}", true), .{text[0..i]}); + text = text[i..]; continue; } } - try writer.writeAll(remain[0..i]); - remain = remain[i..]; + try writer.writeAll(text[0..i]); + text = text[i..]; }, '}', '{' => { // support potentially highlighting "from" in an import statement @@ -861,39 +798,39 @@ pub const QuickAndDirtyJavaScriptSyntaxHighlighter = struct { prev_keyword = null; } - try writer.writeAll(remain[0..1]); - remain = remain[1..]; + try writer.writeAll(text[0..1]); + text = text[1..]; }, '[', ']' => { prev_keyword = null; - try writer.writeAll(remain[0..1]); - remain = remain[1..]; + try writer.writeAll(text[0..1]); + text = text[1..]; }, ';' => { prev_keyword = null; try writer.print(Output.prettyFmt(";", true), .{}); - remain = remain[1..]; + text = text[1..]; }, '.' => { prev_keyword = null; var i: usize = 1; - if (remain.len > 1 and (js_lexer.isIdentifierStart(remain[1]) or remain[1] == '#')) { + if (text.len > 1 and (js_lexer.isIdentifierStart(text[1]) or text[1] == '#')) { i = 2; - while (i < remain.len and js_lexer.isIdentifierContinue(remain[i])) { + while (i < text.len and js_lexer.isIdentifierContinue(text[i])) { i += 1; } - if (i < remain.len and (remain[i] == '(')) { - try writer.print(Output.prettyFmt("{s}", true), .{remain[0..i]}); - remain = remain[i..]; + if (i < text.len and (text[i] == '(')) { + try writer.print(Output.prettyFmt("{s}", true), .{text[0..i]}); + text = text[i..]; continue; } i = 1; } - try writer.writeAll(remain[0..1]); - remain = remain[1..]; + try writer.writeAll(text[0..1]); + text = text[1..]; }, '<' => { @@ -901,44 +838,44 @@ pub const QuickAndDirtyJavaScriptSyntaxHighlighter = struct { // JSX jsx: { - if (remain.len > 1 and remain[0] == '/') { + if (text.len > 1 and text[0] == '/') { i = 2; } prev_keyword = null; - while (i < remain.len and js_lexer.isIdentifierContinue(remain[i])) { + while (i < text.len and js_lexer.isIdentifierContinue(text[i])) { i += 1; } else { i = 1; break :jsx; } - while (i < remain.len and remain[i] != '>') { + while (i < text.len and text[i] != '>') { i += 1; - if (i < remain.len and remain[i] == '<') { + if (i < text.len and text[i] == '<') { i = 1; break :jsx; } } - if (i < remain.len and remain[i] == '>') { + if (i < text.len and text[i] == '>') { i += 1; - try writer.print(Output.prettyFmt("{s}", true), .{remain[0..i]}); - remain = remain[i..]; + try writer.print(Output.prettyFmt("{s}", true), .{text[0..i]}); + text = text[i..]; continue; } i = 1; } - try writer.print(Output.prettyFmt("{s}", true), .{remain[0..i]}); - remain = remain[i..]; + try writer.print(Output.prettyFmt("{s}", true), .{text[0..i]}); + text = text[i..]; }, else => { - try writer.writeAll(remain[0..1]); - remain = remain[1..]; + try writer.writeAll(text[0..1]); + text = text[1..]; }, } } diff --git a/src/generated_versions_list.zig b/src/generated_versions_list.zig index 6a39381135..a2dbf7bd8c 100644 --- a/src/generated_versions_list.zig +++ b/src/generated_versions_list.zig @@ -4,11 +4,12 @@ pub const boringssl = "29a2cd359458c9384694b75456026e4b57e3e567"; pub const libarchive = "898dc8319355b7e985f68a9819f182aaed61b53a"; pub const mimalloc = "4c283af60cdae205df5a872530c77e2a6a307d43"; pub const picohttpparser = "066d2b1e9ab820703db0837a7255d92d30f0c9f5"; -pub const webkit = "b49be549da59347762aa83f849a65158d2a0d724"; +pub const webkit = "f9a0fda2d2b2fd001a00bfcf8e7917a56b382516"; pub const zig = @import("std").fmt.comptimePrint("{}", .{@import("builtin").zig_version}); pub const zlib = "886098f3f339617b4243b286f5ed364b9989e245"; pub const tinycc = "ab631362d839333660a265d3084d8ff060b96753"; pub const lolhtml = "8d4c273ded322193d017042d1f48df2766b0f88b"; pub const c_ares = "d1722e6e8acaf10eb73fa995798a9cd421d9f85e"; +pub const libdeflate = "dc76454a39e7e83b68c3704b6e3784654f8d5ac5"; pub const zstd = "794ea1b0afca0f020f4e57b6732332231fb23c70"; pub const lshpack = "3d0f1fc1d6e66a642e7a98c55deb38aa986eb4b0"; diff --git a/src/heap_breakdown.zig b/src/heap_breakdown.zig index 747d2a769d..d77b12917a 100644 --- a/src/heap_breakdown.zig +++ b/src/heap_breakdown.zig @@ -6,44 +6,46 @@ const vm_size_t = usize; pub const enabled = Environment.allow_assert and Environment.isMac; -pub fn allocator(comptime T: type) std.mem.Allocator { - return getZone(T).allocator(); +fn heapLabel(comptime T: type) [:0]const u8 { + const base_name = if (@hasDecl(T, "heap_label")) + T.heap_label + else + bun.meta.typeBaseName(@typeName(T)); + return "Bun__" ++ base_name; } -pub fn getZone(comptime T: type) *Zone { +pub fn allocator(comptime T: type) std.mem.Allocator { + return namedAllocator(comptime heapLabel(T)); +} +pub fn namedAllocator(comptime name: [:0]const u8) std.mem.Allocator { + return getZone(name).allocator(); +} + +pub fn getZoneT(comptime T: type) *Zone { + return getZone(comptime heapLabel(T)); +} + +pub fn getZone(comptime name: [:0]const u8) *Zone { comptime bun.assert(enabled); const static = struct { - pub var zone: std.atomic.Value(?*Zone) = .{ .raw = null }; - pub var lock: bun.Lock = bun.Lock.init(); - }; - - return static.zone.load(.monotonic) orelse brk: { - static.lock.lock(); - defer static.lock.unlock(); - - if (static.zone.load(.monotonic)) |z| { - break :brk z; + pub var zone: *Zone = undefined; + pub fn initOnce() void { + zone = Zone.init(name); } - const z = Zone.init(T); - static.zone.store(z, .monotonic); - break :brk z; + pub var once = std.once(initOnce); }; + + static.once.call(); + return static.zone; } pub const Zone = opaque { - pub fn init(comptime T: type) *Zone { + pub fn init(comptime name: [:0]const u8) *Zone { const zone = malloc_create_zone(0, 0); - const title: [:0]const u8 = comptime title: { - const base_name = if (@hasDecl(T, "heap_label")) - T.heap_label - else - bun.meta.typeBaseName(@typeName(T)); - break :title "Bun__" ++ base_name; - }; - malloc_set_zone_name(zone, title.ptr); + malloc_set_zone_name(zone, name.ptr); return zone; } @@ -78,8 +80,8 @@ pub const Zone = opaque { return false; } - fn rawFree(zone: *anyopaque, buf: [*]u8, _: u8, _: usize) void { - malloc_zone_free(@ptrCast(zone), @ptrCast(buf)); + fn rawFree(zone: *anyopaque, buf: []u8, _: u8, _: usize) void { + malloc_zone_free(@ptrCast(zone), @ptrCast(buf.ptr)); } pub const vtable = std.mem.Allocator.VTable{ @@ -97,8 +99,10 @@ pub const Zone = opaque { /// Create a single-item pointer with initialized data. pub inline fn create(zone: *Zone, comptime T: type, data: T) *T { + const align_of_t: usize = @alignOf(T); + const log2_align_of_t = @ctz(align_of_t); const ptr: *T = @alignCast(@ptrCast( - rawAlloc(zone, @sizeOf(T), @alignOf(T), @returnAddress()) orelse bun.outOfMemory(), + rawAlloc(zone, @sizeOf(T), log2_align_of_t, @returnAddress()) orelse bun.outOfMemory(), )); ptr.* = data; return ptr; diff --git a/src/hive_array.zig b/src/hive_array.zig index 0f1ee8d8e5..c3479da816 100644 --- a/src/hive_array.zig +++ b/src/hive_array.zig @@ -67,7 +67,7 @@ pub fn HiveArray(comptime T: type, comptime capacity: u16) type { } pub const Fallback = struct { - hive: HiveArray(T, capacity), + hive: if (capacity > 0) HiveArray(T, capacity) else void, allocator: std.mem.Allocator, pub const This = @This(); @@ -75,37 +75,53 @@ pub fn HiveArray(comptime T: type, comptime capacity: u16) type { pub fn init(allocator: std.mem.Allocator) This { return .{ .allocator = allocator, - .hive = HiveArray(T, capacity).init(), + .hive = if (capacity > 0) HiveArray(T, capacity).init() else {}, }; } pub fn get(self: *This) *T { - if (self.hive.get()) |value| { - return value; + if (comptime capacity > 0) { + if (self.hive.get()) |value| { + return value; + } } return self.allocator.create(T) catch unreachable; } pub fn getAndSeeIfNew(self: *This, new: *bool) *T { - if (self.hive.get()) |value| { - new.* = false; - return value; + if (comptime capacity > 0) { + if (self.hive.get()) |value| { + new.* = false; + return value; + } } return self.allocator.create(T) catch unreachable; } pub fn tryGet(self: *This) !*T { - if (self.hive.get()) |value| { - return value; + if (comptime capacity > 0) { + if (self.hive.get()) |value| { + return value; + } } return try self.allocator.create(T); } + pub fn in(self: *const This, value: *const T) bool { + if (comptime capacity > 0) { + if (self.hive.in(value)) return true; + } + + return false; + } + pub fn put(self: *This, value: *T) void { - if (self.hive.put(value)) return; + if (comptime capacity > 0) { + if (self.hive.put(value)) return; + } self.allocator.destroy(value); } diff --git a/src/http.zig b/src/http.zig index 187ae3d31d..5e461392eb 100644 --- a/src/http.zig +++ b/src/http.zig @@ -350,6 +350,11 @@ fn NewHTTPContext(comptime ssl: bool) type { socket.close(.failure); } + fn closeSocket(socket: HTTPSocket) void { + markSocketAsDead(socket); + socket.close(.normal); + } + fn getTagged(ptr: *anyopaque) ActiveSocket { return ActiveSocket.from(bun.cast(**anyopaque, ptr).*); } @@ -472,8 +477,7 @@ fn NewHTTPContext(comptime ssl: bool) type { } } - markSocketAsDead(socket); - socket.close(.normal); + closeSocket(socket); } pub const Handler = struct { @@ -487,7 +491,7 @@ fn NewHTTPContext(comptime ssl: bool) type { } if (active.get(PooledSocket)) |pooled| { - assert(context().pending_sockets.put(pooled)); + addMemoryBackToPool(pooled); return; } @@ -523,33 +527,43 @@ fn NewHTTPContext(comptime ssl: bool) type { if (!client.checkServerIdentity(comptime ssl, socket, handshake_error)) { client.did_have_handshaking_error = true; + if (!socket.isClosed()) terminateSocket(socket); return; } return client.firstCall(comptime ssl, socket); } else { // if authorized it self is false, this means that the connection was rejected - markSocketAsDead(socket); + terminateSocket(socket); if (client.state.stage != .done and client.state.stage != .fail) client.fail(error.ConnectionRefused); return; } } - // we can reach here if we are aborted - if (!socket.isClosed()) { + if (socket.isClosed()) { + markSocketAsDead(socket); if (active.get(PooledSocket)) |pooled| { - assert(context().pending_sockets.put(pooled)); - return; + addMemoryBackToPool(pooled); } - terminateSocket(socket); - } else { - if (active.get(PooledSocket)) |pooled| { - assert(context().pending_sockets.put(pooled)); + return; + } + + if (authorized) { + if (active.is(PooledSocket)) { + // Allow pooled sockets to be reused if the handshake was successful. + socket.setTimeout(0); + socket.setTimeoutMinutes(5); return; } } + + if (active.get(PooledSocket)) |pooled| { + addMemoryBackToPool(pooled); + } + + terminateSocket(socket); } pub fn onClose( ptr: *anyopaque, @@ -565,11 +579,16 @@ fn NewHTTPContext(comptime ssl: bool) type { } if (tagged.get(PooledSocket)) |pooled| { - assert(context().pending_sockets.put(pooled)); + addMemoryBackToPool(pooled); } return; } + + fn addMemoryBackToPool(pooled: *PooledSocket) void { + assert(context().pending_sockets.put(pooled)); + } + pub fn onData( ptr: *anyopaque, socket: HTTPSocket, @@ -620,14 +639,12 @@ fn NewHTTPContext(comptime ssl: bool) type { socket: HTTPSocket, ) void { const tagged = getTagged(ptr); - if (tagged.get(HTTPClient)) |client| { - return client.onTimeout( - comptime ssl, - socket, - ); + return client.onTimeout(comptime ssl, socket); } else if (tagged.get(PooledSocket)) |pooled| { - assert(context().pending_sockets.put(pooled)); + // If a socket has been sitting around for 5 minutes + // Let's close it and remove it from the pool. + addMemoryBackToPool(pooled); } terminateSocket(socket); @@ -640,23 +657,23 @@ fn NewHTTPContext(comptime ssl: bool) type { const tagged = getTagged(ptr); markSocketAsDead(socket); if (tagged.get(HTTPClient)) |client| { - return client.onConnectError( - comptime ssl, - socket, - ); + client.onConnectError(); } else if (tagged.get(PooledSocket)) |pooled| { - assert(context().pending_sockets.put(pooled)); + addMemoryBackToPool(pooled); } - - if (comptime Environment.isDebug) - // caller should already have closed it. - bun.debugAssert(socket.isClosed()); + // us_connecting_socket_close is always called internally by uSockets } pub fn onEnd( _: *anyopaque, socket: HTTPSocket, ) void { - // TCP fin gets closed immediately. + // TCP fin must be closed, but we must keep the original tagged + // pointer so that their onClose callback is called. + // + // Three possible states: + // 1. HTTP Keep-Alive socket: it must be removed from the pool + // 2. HTTP Client socket: it might need to be retried + // 3. Dead socket: it is already marked as dead socket.close(.failure); } }; @@ -760,6 +777,8 @@ pub const HTTPThread = struct { has_awoken: std.atomic.Value(bool) = std.atomic.Value(bool).init(false), timer: std.time.Timer, + lazy_libdeflater: ?*LibdeflateState = null, + const ShutdownMessage = struct { async_http_id: u32, is_tls: bool, @@ -767,6 +786,23 @@ pub const HTTPThread = struct { const threadlog = Output.scoped(.HTTPThread, true); + pub const LibdeflateState = struct { + decompressor: *bun.libdeflate.Decompressor = undefined, + shared_buffer: [512 * 1024]u8 = undefined, + + pub usingnamespace bun.New(@This()); + }; + + pub fn deflater(this: *@This()) *LibdeflateState { + if (this.lazy_libdeflater == null) { + this.lazy_libdeflater = LibdeflateState.new(.{ + .decompressor = bun.libdeflate.Decompressor.alloc() orelse bun.outOfMemory(), + }); + } + + return this.lazy_libdeflater.?; + } + fn initOnce() void { http_thread = .{ .loop = undefined, @@ -778,7 +814,7 @@ pub const HTTPThread = struct { }, .timer = std.time.Timer.start() catch unreachable, }; - + bun.libdeflate.load(); const thread = std.Thread.spawn( .{ .stack_size = bun.default_thread_stack_size, @@ -889,10 +925,11 @@ pub const HTTPThread = struct { } while (this.queued_tasks.pop()) |http| { - var cloned = default_allocator.create(AsyncHTTP) catch unreachable; - cloned.* = http.*; - cloned.real = http; - cloned.onStart(); + var cloned = ThreadlocalAsyncHTTP.new(.{ + .async_http = http.*, + }); + cloned.async_http.real = http; + cloned.async_http.onStart(); if (comptime Environment.allow_assert) { count += 1; } @@ -1095,6 +1132,13 @@ pub fn firstCall( comptime is_ssl: bool, socket: NewHTTPContext(is_ssl).HTTPSocket, ) void { + if (comptime FeatureFlags.is_fetch_preconnect_supported) { + if (client.is_preconnect_only) { + client.onPreconnect(is_ssl, socket); + return; + } + } + if (client.state.request_stage == .pending) { client.onWritable(true, comptime is_ssl, socket); } @@ -1144,17 +1188,17 @@ pub fn onTimeout( comptime is_ssl: bool, socket: NewHTTPContext(is_ssl).HTTPSocket, ) void { - _ = socket; + if (client.disable_timeout) return; log("Timeout {s}\n", .{client.url.href}); + defer NewHTTPContext(is_ssl).terminateSocket(socket); + if (client.state.stage != .done and client.state.stage != .fail) { client.fail(error.Timeout); } } pub fn onConnectError( client: *HTTPClient, - comptime is_ssl: bool, - _: NewHTTPContext(is_ssl).HTTPSocket, ) void { log("onConnectError {s}\n", .{client.url.href}); if (client.state.stage != .done and client.state.stage != .fail) @@ -1408,6 +1452,8 @@ pub const InternalState = struct { received_last_chunk: bool = false, did_set_content_encoding: bool = false, is_redirect_pending: bool = false, + is_libdeflate_fast_path_disabled: bool = false, + resend_request_body_on_redirect: bool = false, transfer_encoding: Encoding = Encoding.identity, encoding: Encoding = Encoding.identity, content_encoding_i: u8 = std.math.maxInt(u8), @@ -1495,40 +1541,93 @@ pub const InternalState = struct { return this.received_last_chunk; } - fn decompressBytes(this: *InternalState, buffer: []const u8, body_out_str: *MutableString) !void { - log("Decompressing {d} bytes\n", .{buffer.len}); - + fn decompressBytes(this: *InternalState, buffer: []const u8, body_out_str: *MutableString, is_final_chunk: bool) !void { defer this.compressed_body.reset(); var gzip_timer: std.time.Timer = undefined; if (extremely_verbose) gzip_timer = std.time.Timer.start() catch @panic("Timer failure"); - try this.decompressor.updateBuffers(this.encoding, buffer, body_out_str); - this.decompressor.readAll(this.isDone()) catch |err| { - if (this.isDone() or error.ShortRead != err) { - Output.prettyErrorln("Decompression error: {s}", .{bun.asByteSlice(@errorName(err))}); - Output.flush(); - return err; + var still_needs_to_decompress = true; + + if (FeatureFlags.isLibdeflateEnabled()) { + // Fast-path: use libdeflate + if (is_final_chunk and !this.is_libdeflate_fast_path_disabled and this.encoding.canUseLibDeflate() and this.isDone()) libdeflate: { + this.is_libdeflate_fast_path_disabled = true; + + log("Decompressing {d} bytes with libdeflate\n", .{buffer.len}); + var deflater = http_thread.deflater(); + + // gzip stores the size of the uncompressed data in the last 4 bytes of the stream + // But it's only valid if the stream is less than 4.7 GB, since it's 4 bytes. + // If we know that the stream is going to be larger than our + // pre-allocated buffer, then let's dynamically allocate the exact + // size. + if (this.encoding == Encoding.gzip and buffer.len > 16 and buffer.len < 1024 * 1024 * 1024) { + const estimated_size: u32 = @bitCast(buffer[buffer.len - 4 ..][0..4].*); + // Since this is arbtirary input from the internet, let's set an upper bound of 32 MB for the allocation size. + if (estimated_size > deflater.shared_buffer.len and estimated_size < 32 * 1024 * 1024) { + try body_out_str.list.ensureTotalCapacityPrecise(body_out_str.allocator, estimated_size); + const result = deflater.decompressor.decompress(buffer, body_out_str.list.allocatedSlice(), .gzip); + + if (result.status == .success) { + body_out_str.list.items.len = result.written; + still_needs_to_decompress = false; + } + + break :libdeflate; + } + } + + const result = deflater.decompressor.decompress(buffer, &deflater.shared_buffer, switch (this.encoding) { + .gzip => .gzip, + .deflate => .deflate, + else => unreachable, + }); + + if (result.status == .success) { + try body_out_str.list.ensureTotalCapacityPrecise(body_out_str.allocator, result.written); + body_out_str.list.appendSliceAssumeCapacity(deflater.shared_buffer[0..result.written]); + still_needs_to_decompress = false; + } } - }; + } + + // Slow path, or brotli: use the .decompressor + if (still_needs_to_decompress) { + log("Decompressing {d} bytes\n", .{buffer.len}); + if (body_out_str.list.capacity == 0) { + const min = @min(@ceil(@as(f64, @floatFromInt(buffer.len)) * 1.5), @as(f64, 1024 * 1024 * 2)); + try body_out_str.growBy(@max(@as(usize, @intFromFloat(min)), 32)); + } + + try this.decompressor.updateBuffers(this.encoding, buffer, body_out_str); + + this.decompressor.readAll(this.isDone()) catch |err| { + if (this.isDone() or error.ShortRead != err) { + Output.prettyErrorln("Decompression error: {s}", .{bun.asByteSlice(@errorName(err))}); + Output.flush(); + return err; + } + }; + } if (extremely_verbose) this.gzip_elapsed = gzip_timer.read(); } - fn decompress(this: *InternalState, buffer: MutableString, body_out_str: *MutableString) !void { - try this.decompressBytes(buffer.list.items, body_out_str); + fn decompress(this: *InternalState, buffer: MutableString, body_out_str: *MutableString, is_final_chunk: bool) !void { + try this.decompressBytes(buffer.list.items, body_out_str, is_final_chunk); } - pub fn processBodyBuffer(this: *InternalState, buffer: MutableString) !bool { + pub fn processBodyBuffer(this: *InternalState, buffer: MutableString, is_final_chunk: bool) !bool { if (this.is_redirect_pending) return false; var body_out_str = this.body_out_str.?; switch (this.encoding) { Encoding.brotli, Encoding.gzip, Encoding.deflate => { - try this.decompress(buffer, body_out_str); + try this.decompress(buffer, body_out_str, is_final_chunk); }, else => { if (!body_out_str.owns(buffer.list.items)) { @@ -1566,7 +1665,6 @@ remaining_redirect_count: i8 = default_redirect_count, allow_retry: bool = false, redirect_type: FetchRedirect = FetchRedirect.follow, redirect: []u8 = &.{}, -timeout: usize = 0, progress_node: ?*Progress.Node = null, disable_timeout: bool = false, disable_keepalive: bool = false, @@ -1591,8 +1689,8 @@ signals: Signals = .{}, async_http_id: u32 = 0, hostname: ?[]u8 = null, reject_unauthorized: bool = true, - unix_socket_path: JSC.ZigString.Slice = JSC.ZigString.Slice.empty, +is_preconnect_only: bool = false, pub fn deinit(this: *HTTPClient) void { if (this.redirect.len > 0) { @@ -1672,6 +1770,13 @@ pub const Encoding = enum { brotli, chunked, + pub fn canUseLibDeflate(this: Encoding) bool { + return switch (this) { + .gzip, .deflate => true, + else => false, + }; + } + pub fn isCompressed(this: Encoding) bool { return switch (this) { .brotli, .gzip, .deflate => true, @@ -1744,8 +1849,6 @@ pub const AsyncHTTP = struct { task: ThreadPool.Task = ThreadPool.Task{ .callback = &startAsyncHTTP }, result_callback: HTTPClientResult.Callback = undefined, - /// Timeout in nanoseconds - timeout: usize = 0, redirected: bool = false, response_encoding: Encoding = Encoding.identity, @@ -1833,6 +1936,51 @@ pub const AsyncHTTP = struct { tls_props: ?*SSLConfig = null, }; + const Preconnect = struct { + async_http: AsyncHTTP, + response_buffer: MutableString, + url: bun.URL, + is_url_owned: bool, + + pub usingnamespace bun.New(@This()); + + pub fn onResult(this: *Preconnect, _: *AsyncHTTP, _: HTTPClientResult) void { + this.response_buffer.deinit(); + this.async_http.clearData(); + this.async_http.client.deinit(); + if (this.is_url_owned) { + bun.default_allocator.free(this.url.href); + } + + this.destroy(); + } + }; + + pub fn preconnect( + url: URL, + is_url_owned: bool, + ) void { + if (!FeatureFlags.is_fetch_preconnect_supported) { + if (is_url_owned) { + bun.default_allocator.free(url.href); + } + + return; + } + + var this = Preconnect.new(.{ + .async_http = undefined, + .response_buffer = MutableString{ .allocator = default_allocator, .list = .{} }, + .url = url, + .is_url_owned = is_url_owned, + }); + + this.async_http = AsyncHTTP.init(bun.default_allocator, .GET, url, .{}, "", &this.response_buffer, "", HTTPClientResult.Callback.New(*Preconnect, Preconnect.onResult).init(this), .manual, .{}); + this.async_http.client.is_preconnect_only = true; + + http_thread.schedule(Batch.from(&this.async_http.task)); + } + pub fn init( allocator: std.mem.Allocator, method: Method, @@ -1841,7 +1989,6 @@ pub const AsyncHTTP = struct { headers_buf: string, response_buffer: *MutableString, request_body: []const u8, - timeout: usize, callback: HTTPClientResult.Callback, redirect_type: FetchRedirect, options: Options, @@ -1858,7 +2005,6 @@ pub const AsyncHTTP = struct { .http_proxy = options.http_proxy, .signals = options.signals orelse .{}, .async_http_id = if (options.signals != null and options.signals.?.aborted != null) async_http_id.fetchAdd(1, .monotonic) else 0, - .timeout = timeout, }; this.client = .{ @@ -1870,7 +2016,6 @@ pub const AsyncHTTP = struct { .hostname = options.hostname, .signals = options.signals orelse this.signals, .async_http_id = this.async_http_id, - .timeout = timeout, .http_proxy = this.http_proxy, .redirect_type = redirect_type, }; @@ -1956,20 +2101,17 @@ pub const AsyncHTTP = struct { return this; } - pub fn initSync(allocator: std.mem.Allocator, method: Method, url: URL, headers: Headers.Entries, headers_buf: string, response_buffer: *MutableString, request_body: []const u8, timeout: usize, http_proxy: ?URL, hostname: ?[]u8, redirect_type: FetchRedirect) AsyncHTTP { - return @This().init(allocator, method, url, headers, headers_buf, response_buffer, request_body, timeout, undefined, redirect_type, .{ + pub fn initSync(allocator: std.mem.Allocator, method: Method, url: URL, headers: Headers.Entries, headers_buf: string, response_buffer: *MutableString, request_body: []const u8, http_proxy: ?URL, hostname: ?[]u8, redirect_type: FetchRedirect) AsyncHTTP { + return @This().init(allocator, method, url, headers, headers_buf, response_buffer, request_body, undefined, redirect_type, .{ .http_proxy = http_proxy, .hostname = hostname, }); } fn reset(this: *AsyncHTTP) !void { - const timeout = this.timeout; const aborted = this.client.aborted; this.client = try HTTPClient.init(this.allocator, this.method, this.client.url, this.client.header_entries, this.client.header_buf, aborted); - this.client.timeout = timeout; this.client.http_proxy = this.http_proxy; - this.timeout = timeout; if (this.http_proxy) |proxy| { //TODO: need to understand how is possible to reuse Proxy with TSL, so disable keepalive if url is HTTPS @@ -2087,12 +2229,23 @@ pub const AsyncHTTP = struct { this.state.store(State.fail, .monotonic); } + if (comptime Environment.enable_logs) { + if (socket_async_http_abort_tracker.count() > 0) { + log("socket_async_http_abort_tracker count: {d}", .{socket_async_http_abort_tracker.count()}); + } + } + + if (socket_async_http_abort_tracker.capacity() > 10_000 and socket_async_http_abort_tracker.count() < 100) { + socket_async_http_abort_tracker.shrinkAndFree(socket_async_http_abort_tracker.count()); + } + if (result.has_more) { callback.function(callback.ctx, async_http, result); } else { { this.client.deinit(); - defer default_allocator.destroy(this); + var threadlocal_http: *ThreadlocalAsyncHTTP = @fieldParentPtr("async_http", async_http); + defer threadlocal_http.destroy(); log("onAsyncHTTPCallback: {any}", .{bun.fmt.fmtDuration(this.elapsed)}); callback.function(callback.ctx, async_http, result); } @@ -2236,13 +2389,22 @@ pub fn buildRequest(this: *HTTPClient, body_len: usize) picohttp.Request { }; } -pub fn doRedirect(this: *HTTPClient, comptime is_ssl: bool, ctx: *NewHTTPContext(is_ssl), socket: NewHTTPContext(is_ssl).HTTPSocket) void { +pub fn doRedirect( + this: *HTTPClient, + comptime is_ssl: bool, + ctx: *NewHTTPContext(is_ssl), + socket: NewHTTPContext(is_ssl).HTTPSocket, +) void { this.unix_socket_path.deinit(); this.unix_socket_path = JSC.ZigString.Slice.empty; + const request_body = if (this.state.resend_request_body_on_redirect and this.state.original_request_body == .bytes) + this.state.original_request_body.bytes + else + ""; this.state.response_message_buffer.deinit(); + // we need to clean the client reference before closing the socket because we are going to reuse the same ref in a another request - socket.ext(**anyopaque).* = bun.cast(**anyopaque, NewHTTPContext(is_ssl).ActiveSocket.init(&dead_socket).ptr()); if (this.isKeepAlivePossible()) { assert(this.connected_url.hostname.len > 0); ctx.releaseSocket( @@ -2252,8 +2414,7 @@ pub fn doRedirect(this: *HTTPClient, comptime is_ssl: bool, ctx: *NewHTTPContext this.connected_url.getPortAuto(), ); } else { - NewHTTPContext(is_ssl).markSocketAsDead(socket); - socket.close(.normal); + NewHTTPContext(is_ssl).closeSocket(socket); } this.connected_url = URL{}; @@ -2278,7 +2439,8 @@ pub fn doRedirect(this: *HTTPClient, comptime is_ssl: bool, ctx: *NewHTTPContext if (this.signals.aborted != null) { _ = socket_async_http_abort_tracker.swapRemove(this.async_http_id); } - return this.start(.{ .bytes = "" }, body_out_str); + + return this.start(.{ .bytes = request_body }, body_out_str); } pub fn isHTTPS(this: *HTTPClient) bool { if (this.http_proxy) |proxy| { @@ -2326,6 +2488,7 @@ fn start_(this: *HTTPClient, comptime is_ssl: bool) void { }; if (socket.isClosed() and (this.state.response_stage != .done and this.state.response_stage != .fail)) { + NewHTTPContext(is_ssl).markSocketAsDead(socket); this.fail(error.ConnectionClosed); assert(this.state.fail != null); return; @@ -2368,12 +2531,38 @@ fn printResponse(response: picohttp.Response) void { Output.flush(); } +pub fn onPreconnect(this: *HTTPClient, comptime is_ssl: bool, socket: NewHTTPContext(is_ssl).HTTPSocket) void { + log("onPreconnect({})", .{this.url}); + _ = socket_async_http_abort_tracker.swapRemove(this.async_http_id); + const ctx = if (comptime is_ssl) &http_thread.https_context else &http_thread.http_context; + ctx.releaseSocket( + socket, + this.did_have_handshaking_error and !this.reject_unauthorized, + this.url.hostname, + this.url.getPortAuto(), + ); + + this.state.reset(this.allocator); + this.state.response_stage = .done; + this.state.request_stage = .done; + this.state.stage = .done; + this.proxy_tunneling = false; + this.result_callback.run(@fieldParentPtr("client", this), HTTPClientResult{ .fail = null, .metadata = null, .has_more = false }); +} + pub fn onWritable(this: *HTTPClient, comptime is_first_call: bool, comptime is_ssl: bool, socket: NewHTTPContext(is_ssl).HTTPSocket) void { if (this.signals.get(.aborted)) { this.closeAndAbort(is_ssl, socket); return; } + if (comptime FeatureFlags.is_fetch_preconnect_supported) { + if (this.is_preconnect_only) { + this.onPreconnect(is_ssl, socket); + return; + } + } + switch (this.state.request_stage) { .pending, .headers => { var stack_fallback = std.heap.stackFallback(16384, default_allocator); @@ -2656,8 +2845,7 @@ pub fn closeAndFail(this: *HTTPClient, err: anyerror, comptime is_ssl: bool, soc if (this.state.stage != .fail and this.state.stage != .done) { log("closeAndFail: {s}", .{@errorName(err)}); if (!socket.isClosed()) { - socket.ext(**anyopaque).* = bun.cast(**anyopaque, NewHTTPContext(is_ssl).ActiveSocket.init(&dead_socket).ptr()); - socket.close(.failure); + NewHTTPContext(is_ssl).terminateSocket(socket); } this.fail(err); } @@ -2787,7 +2975,6 @@ pub fn onData(this: *HTTPClient, comptime is_ssl: bool, incoming_data: []const u } return; } - const should_continue = this.handleResponseMetadata( &response, ) catch |err| { @@ -3075,8 +3262,7 @@ pub fn progressUpdate(this: *HTTPClient, comptime is_ssl: bool, ctx: *NewHTTPCon this.connected_url.getPortAuto(), ); } else if (!socket.isClosed()) { - NewHTTPContext(is_ssl).markSocketAsDead(socket); - socket.close(.normal); + NewHTTPContext(is_ssl).closeSocket(socket); } this.state.reset(this.allocator); @@ -3234,14 +3420,7 @@ fn handleResponseBodyFromSinglePacket(this: *HTTPClient, incoming_data: []const if (this.state.is_redirect_pending) return; if (this.state.encoding.isCompressed()) { - var body_buffer = this.state.body_out_str.?; - if (body_buffer.list.capacity == 0) { - const min = @min(@ceil(@as(f64, @floatFromInt(incoming_data.len)) * 1.5), @as(f64, 1024 * 1024 * 2)); - try body_buffer.growBy(@max(@as(usize, @intFromFloat(min)), 32)); - } - - // assert(!body_buffer.owns(b)); - try this.state.decompressBytes(incoming_data, body_buffer); + try this.state.decompressBytes(incoming_data, this.state.body_out_str.?, true); } else { try this.state.getBodyBuffer().appendSliceExact(incoming_data); } @@ -3289,7 +3468,12 @@ fn handleResponseBodyFromMultiplePackets(this: *HTTPClient, incoming_data: []con // done or streaming const is_done = content_length != null and this.state.total_body_received >= content_length.?; if (is_done or this.signals.get(.body_streaming) or content_length == null) { - const processed = try this.state.processBodyBuffer(buffer.*); + const is_final_chunk = is_done; + const processed = try this.state.processBodyBuffer(buffer.*, is_final_chunk); + + // We can only use the libdeflate fast path when we are not streaming + // If we ever call processBodyBuffer again, it cannot go through the fast path. + this.state.is_libdeflate_fast_path_disabled = true; if (this.progress_node) |progress| { progress.activate(); @@ -3354,7 +3538,9 @@ fn handleResponseBodyChunkedEncodingFromMultiplePackets( } // streaming chunks if (this.signals.get(.body_streaming)) { - return try this.state.processBodyBuffer(buffer); + // If we're streaming, we cannot use the libdeflate fast path + this.state.is_libdeflate_fast_path_disabled = true; + return try this.state.processBodyBuffer(buffer, false); } return false; @@ -3364,6 +3550,7 @@ fn handleResponseBodyChunkedEncodingFromMultiplePackets( this.state.received_last_chunk = true; _ = try this.state.processBodyBuffer( buffer, + true, ); if (this.progress_node) |progress| { @@ -3432,7 +3619,10 @@ fn handleResponseBodyChunkedEncodingFromSinglePacket( // streaming chunks if (this.signals.get(.body_streaming)) { - return try this.state.processBodyBuffer(body_buffer.*); + // If we're streaming, we cannot use the libdeflate fast path + this.state.is_libdeflate_fast_path_disabled = true; + + return try this.state.processBodyBuffer(body_buffer.*, true); } return false; @@ -3440,7 +3630,6 @@ fn handleResponseBodyChunkedEncodingFromSinglePacket( // Done else => { this.state.received_last_chunk = true; - try this.handleResponseBodyFromSinglePacket(buffer); assert(this.state.body_out_str.?.list.items.ptr != buffer.ptr); if (this.progress_node) |progress| { @@ -3785,6 +3974,9 @@ pub fn handleResponseMetadata( } this.state.is_redirect_pending = true; + if (this.method.hasRequestBody()) { + this.state.resend_request_body_on_redirect = true; + } }, else => {}, } @@ -3810,3 +4002,9 @@ pub fn handleResponseMetadata( } const assert = bun.assert; + +// Exists for heap stats reasons. +const ThreadlocalAsyncHTTP = struct { + async_http: AsyncHTTP, + pub usingnamespace bun.New(@This()); +}; diff --git a/src/import_record.zig b/src/import_record.zig index 93e18c124d..c21e2223c8 100644 --- a/src/import_record.zig +++ b/src/import_record.zig @@ -7,7 +7,6 @@ const Index = @import("ast/base.zig").Index; const Api = @import("./api/schema.zig").Api; pub const ImportKind = enum(u8) { - // An entry point provided by the user entry_point, diff --git a/src/install/extract_tarball.zig b/src/install/extract_tarball.zig index fc740cb479..53ba80c0d0 100644 --- a/src/install/extract_tarball.zig +++ b/src/install/extract_tarball.zig @@ -160,7 +160,13 @@ threadlocal var json_path_buf: bun.PathBuffer = undefined; fn extract(this: *const ExtractTarball, tgz_bytes: []const u8) !Install.ExtractData { const tmpdir = this.temp_dir; var tmpname_buf: if (Environment.isWindows) bun.WPathBuffer else bun.PathBuffer = undefined; - const name = this.name.slice(); + const name = if (this.name.slice().len > 0) this.name.slice() else brk: { + // Not sure where this case hits yet. + // BUN-2WQ + Output.warn("Extracting nameless packages is not supported yet. Please open an issue on GitHub with reproduction steps.", .{}); + bun.debugAssert(false); + break :brk "unnamed-package"; + }; const basename = brk: { var tmp = name; if (tmp[0] == '@') { @@ -175,10 +181,6 @@ fn extract(this: *const ExtractTarball, tgz_bytes: []const u8) !Install.ExtractD } } - if (comptime Environment.allow_assert) { - bun.assert(tmp.len > 0); - } - break :brk tmp; }; @@ -198,28 +200,70 @@ fn extract(this: *const ExtractTarball, tgz_bytes: []const u8) !Install.ExtractD defer extract_destination.close(); - if (PackageManager.verbose_install) { - Output.prettyErrorln("[{s}] Start extracting {s}", .{ name, tmpname }); - Output.flush(); - } - const Archive = @import("../libarchive/libarchive.zig").Archive; const Zlib = @import("../zlib.zig"); var zlib_pool = Npm.Registry.BodyPool.get(default_allocator); zlib_pool.data.reset(); defer Npm.Registry.BodyPool.release(zlib_pool); - var zlib_entry = try Zlib.ZlibReaderArrayList.init(tgz_bytes, &zlib_pool.data.list, default_allocator); - zlib_entry.readAll() catch |err| { - this.package_manager.log.addErrorFmt( - null, - logger.Loc.Empty, - this.package_manager.allocator, - "{s} decompressing \"{s}\" to \"{}\"", - .{ @errorName(err), name, bun.fmt.fmtPath(u8, std.mem.span(tmpname), .{}) }, - ) catch unreachable; - return error.InstallFailed; - }; + var esimated_output_size: usize = 0; + + const time_started_for_verbose_logs: u64 = if (PackageManager.verbose_install) bun.getRoughTickCount().ns() else 0; + + { + // Last 4 bytes of a gzip-compressed file are the uncompressed size. + if (tgz_bytes.len > 16) { + // If the file claims to be larger than 16 bytes and smaller than 64 MB, we'll preallocate the buffer. + // If it's larger than that, we'll do it incrementally. We want to avoid OOMing. + const last_4_bytes: u32 = @bitCast(tgz_bytes[tgz_bytes.len - 4 ..][0..4].*); + if (last_4_bytes > 16 and last_4_bytes < 64 * 1024 * 1024) { + // It's okay if this fails. We will just allocate as we go and that will error if we run out of memory. + esimated_output_size = last_4_bytes; + if (zlib_pool.data.list.capacity == 0) { + zlib_pool.data.list.ensureTotalCapacityPrecise(zlib_pool.data.allocator, last_4_bytes) catch {}; + } else { + zlib_pool.data.ensureUnusedCapacity(last_4_bytes) catch {}; + } + } + } + } + + var needs_to_decompress = true; + if (bun.FeatureFlags.isLibdeflateEnabled() and zlib_pool.data.list.capacity > 16 and esimated_output_size > 0) use_libdeflate: { + const decompressor = bun.libdeflate.Decompressor.alloc() orelse break :use_libdeflate; + defer decompressor.deinit(); + + const result = decompressor.gzip(tgz_bytes, zlib_pool.data.list.allocatedSlice()); + + if (result.status == .success) { + zlib_pool.data.list.items.len = result.written; + needs_to_decompress = false; + } + + // If libdeflate fails for any reason, fallback to zlib. + } + + if (needs_to_decompress) { + zlib_pool.data.list.clearRetainingCapacity(); + var zlib_entry = try Zlib.ZlibReaderArrayList.init(tgz_bytes, &zlib_pool.data.list, default_allocator); + zlib_entry.readAll() catch |err| { + this.package_manager.log.addErrorFmt( + null, + logger.Loc.Empty, + this.package_manager.allocator, + "{s} decompressing \"{s}\" to \"{}\"", + .{ @errorName(err), name, bun.fmt.fmtPath(u8, std.mem.span(tmpname), .{}) }, + ) catch unreachable; + return error.InstallFailed; + }; + } + + if (PackageManager.verbose_install) { + const decompressing_ended_at: u64 = bun.getRoughTickCount().ns(); + const elapsed = decompressing_ended_at - time_started_for_verbose_logs; + Output.prettyErrorln("[{s}] Extract {s} (decompressed {} tgz file in {})", .{ name, tmpname, bun.fmt.size(tgz_bytes.len), bun.fmt.fmtDuration(elapsed) }); + } + switch (this.resolution.tag) { .github => { const DirnameReader = struct { @@ -278,7 +322,8 @@ fn extract(this: *const ExtractTarball, tgz_bytes: []const u8) !Install.ExtractD } if (PackageManager.verbose_install) { - Output.prettyErrorln("[{s}] Extracted", .{name}); + const elapsed = bun.getRoughTickCount().ns() - time_started_for_verbose_logs; + Output.prettyErrorln("[{s}] Extracted to {s} ({})", .{ name, tmpname, bun.fmt.fmtDuration(elapsed) }); Output.flush(); } } diff --git a/src/install/install.zig b/src/install/install.zig index 00e4a06480..8d6bb0a0bf 100644 --- a/src/install/install.zig +++ b/src/install/install.zig @@ -426,7 +426,7 @@ const NetworkTask = struct { this.allocator = allocator; const url = URL.parse(this.url_buf); - this.http = AsyncHTTP.init(allocator, .GET, url, header_builder.entries, header_builder.content.ptr.?[0..header_builder.content.len], &this.response_buffer, "", 0, this.getCompletionCallback(), HTTP.FetchRedirect.follow, .{ + this.http = AsyncHTTP.init(allocator, .GET, url, header_builder.entries, header_builder.content.ptr.?[0..header_builder.content.len], &this.response_buffer, "", this.getCompletionCallback(), HTTP.FetchRedirect.follow, .{ .http_proxy = this.package_manager.httpProxy(url), }); this.http.client.reject_unauthorized = this.package_manager.tlsRejectUnauthorized(); @@ -510,7 +510,7 @@ const NetworkTask = struct { const url = URL.parse(this.url_buf); - this.http = AsyncHTTP.init(allocator, .GET, url, header_builder.entries, header_buf, &this.response_buffer, "", 0, this.getCompletionCallback(), HTTP.FetchRedirect.follow, .{ + this.http = AsyncHTTP.init(allocator, .GET, url, header_builder.entries, header_buf, &this.response_buffer, "", this.getCompletionCallback(), HTTP.FetchRedirect.follow, .{ .http_proxy = this.package_manager.httpProxy(url), }); this.http.client.reject_unauthorized = this.package_manager.tlsRejectUnauthorized(); @@ -6167,19 +6167,21 @@ pub const PackageManager = struct { } } - const response = task.http.response orelse { + if (!has_network_error and task.http.response == null) { + has_network_error = true; + const min = manager.options.min_simultaneous_requests; + const max = AsyncHTTP.max_simultaneous_requests.load(.monotonic); + if (max > min) { + AsyncHTTP.max_simultaneous_requests.store(@max(min, max / 2), .monotonic); + } + } + + // Handle retry-able errors. + if (task.http.response == null or task.http.response.?.status_code > 499) { const err = task.http.err orelse error.HTTPError; if (task.retried < manager.options.max_retry_count) { task.retried += 1; - if (!has_network_error) { - has_network_error = true; - const min = manager.options.min_simultaneous_requests; - const max = AsyncHTTP.max_simultaneous_requests.load(.monotonic); - if (max > min) { - AsyncHTTP.max_simultaneous_requests.store(@max(min, max / 2), .monotonic); - } - } manager.enqueueNetworkTask(task); if (manager.options.log_level.isVerbose()) { @@ -6187,13 +6189,18 @@ pub const PackageManager = struct { null, logger.Loc.Empty, manager.allocator, - "{s} downloading package manifest {s}", - .{ bun.span(@errorName(err)), name.slice() }, + "{s} downloading package manifest {s}. Retry {d}/{d}...", + .{ bun.span(@errorName(err)), name.slice(), task.retried, manager.options.max_retry_count }, ) catch unreachable; } continue; } + } + + const response = task.http.response orelse { + // Handle non-retry-able errors. + const err = task.http.err orelse error.HTTPError; if (@TypeOf(callbacks.onPackageManifestError) != void) { callbacks.onPackageManifestError( @@ -6336,19 +6343,20 @@ pub const PackageManager = struct { manager.task_batch.push(ThreadPool.Batch.from(manager.enqueueParseNPMPackage(task.task_id, name, task))); }, .extract => |*extract| { - const response = task.http.response orelse { + if (!has_network_error and task.http.response == null) { + has_network_error = true; + const min = manager.options.min_simultaneous_requests; + const max = AsyncHTTP.max_simultaneous_requests.load(.monotonic); + if (max > min) { + AsyncHTTP.max_simultaneous_requests.store(@max(min, max / 2), .monotonic); + } + } + + if (task.http.response == null or task.http.response.?.status_code > 499) { const err = task.http.err orelse error.TarballFailedToDownload; if (task.retried < manager.options.max_retry_count) { task.retried += 1; - if (!has_network_error) { - has_network_error = true; - const min = manager.options.min_simultaneous_requests; - const max = AsyncHTTP.max_simultaneous_requests.load(.monotonic); - if (max > min) { - AsyncHTTP.max_simultaneous_requests.store(@max(min, max / 2), .monotonic); - } - } manager.enqueueNetworkTask(task); if (manager.options.log_level.isVerbose()) { @@ -6356,17 +6364,23 @@ pub const PackageManager = struct { null, logger.Loc.Empty, manager.allocator, - "warn: {s} downloading tarball {s}@{s}", + "warn: {s} downloading tarball {s}@{s}. Retrying {d}/{d}...", .{ bun.span(@errorName(err)), extract.name.slice(), extract.resolution.fmt(manager.lockfile.buffers.string_bytes.items, .auto), + task.retried, + manager.options.max_retry_count, }, ) catch unreachable; } continue; } + } + + const response = task.http.response orelse { + const err = task.http.err orelse error.TarballFailedToDownload; if (@TypeOf(callbacks.onPackageDownloadError) != void) { const package_id = manager.lockfile.buffers.resolutions.items[extract.dependency_id]; @@ -6485,7 +6499,7 @@ pub const PackageManager = struct { if (comptime log_level.isVerbose()) { Output.prettyError(" ", .{}); Output.printElapsed(@as(f64, @floatCast(@as(f64, @floatFromInt(task.http.elapsed)) / std.time.ns_per_ms))); - Output.prettyError("Downloaded {s} tarball\n", .{extract.name.slice()}); + Output.prettyError(" Downloaded {s} tarball\n", .{extract.name.slice()}); Output.flush(); } @@ -8186,15 +8200,10 @@ pub const PackageManager = struct { } }; - pub fn init(ctx: Command.Context, comptime subcommand: Subcommand) !*PackageManager { - const cli = try CommandLineArguments.parse(ctx.allocator, subcommand); - return initWithCLI(ctx, cli, subcommand); - } - - fn initWithCLI( + pub fn init( ctx: Command.Context, cli: CommandLineArguments, - comptime subcommand: Subcommand, + subcommand: Subcommand, ) !*PackageManager { // assume that spawning a thread will take a lil so we do that asap HTTP.HTTPThread.init(); @@ -8286,7 +8295,7 @@ pub const PackageManager = struct { }; } - if (comptime subcommand == .install) { + if (subcommand == .install) { if (cli.positionals.len > 1) { // this is `bun add `. // @@ -8312,7 +8321,7 @@ pub const PackageManager = struct { // Check if this is a workspace; if so, use root package var found = false; - if (comptime subcommand != .link) { + if (subcommand != .link) { if (!created_package_json) { while (std.fs.path.dirname(this_cwd)) |parent| : (this_cwd = parent) { const parent_without_trailing_slash = strings.withoutTrailingSlash(parent); @@ -8680,29 +8689,29 @@ pub const PackageManager = struct { // parse dependency of positional arg string (may include name@version for example) // get the precise version from the lockfile (there may be multiple) // copy the contents into a temp folder - pub inline fn patch(ctx: Command.Context) !void { + pub fn patch(ctx: Command.Context) !void { try updatePackageJSONAndInstallCatchError(ctx, .patch); } - pub inline fn patchCommit(ctx: Command.Context) !void { + pub fn patchCommit(ctx: Command.Context) !void { try updatePackageJSONAndInstallCatchError(ctx, .@"patch-commit"); } - pub inline fn update(ctx: Command.Context) !void { + pub fn update(ctx: Command.Context) !void { try updatePackageJSONAndInstallCatchError(ctx, .update); } - pub inline fn add(ctx: Command.Context) !void { + pub fn add(ctx: Command.Context) !void { try updatePackageJSONAndInstallCatchError(ctx, .add); } - pub inline fn remove(ctx: Command.Context) !void { + pub fn remove(ctx: Command.Context) !void { try updatePackageJSONAndInstallCatchError(ctx, .remove); } pub fn updatePackageJSONAndInstallCatchError( ctx: Command.Context, - comptime subcommand: Subcommand, + subcommand: Subcommand, ) !void { updatePackageJSONAndInstall(ctx, subcommand) catch |err| { switch (err) { @@ -8719,11 +8728,12 @@ pub const PackageManager = struct { }; } - pub inline fn link(ctx: Command.Context) !void { - var manager = PackageManager.init(ctx, .link) catch |err| brk: { + pub fn link(ctx: Command.Context) !void { + const cli = try CommandLineArguments.parse(ctx.allocator, .link); + var manager = PackageManager.init(ctx, cli, .link) catch |err| brk: { if (err == error.MissingPackageJSON) { try attemptToCreatePackageJSON(); - break :brk try PackageManager.init(ctx, .link); + break :brk try PackageManager.init(ctx, cli, .link); } return err; @@ -8900,11 +8910,12 @@ pub const PackageManager = struct { } } - pub inline fn unlink(ctx: Command.Context) !void { - var manager = PackageManager.init(ctx, .unlink) catch |err| brk: { + pub fn unlink(ctx: Command.Context) !void { + const cli = try PackageManager.CommandLineArguments.parse(ctx.allocator, .unlink); + var manager = PackageManager.init(ctx, cli, .unlink) catch |err| brk: { if (err == error.MissingPackageJSON) { try attemptToCreatePackageJSON(); - break :brk try PackageManager.init(ctx, .unlink); + break :brk try PackageManager.init(ctx, cli, .unlink); } return err; @@ -9056,54 +9067,54 @@ pub const PackageManager = struct { clap.parseParam("-h, --help Print this help menu") catch unreachable, }; - pub const install_params = install_params_ ++ [_]ParamType{ + pub const install_params: []const ParamType = &(install_params_ ++ [_]ParamType{ clap.parseParam("-d, --dev Add dependency to \"devDependencies\"") catch unreachable, clap.parseParam("-D, --development") catch unreachable, clap.parseParam("--optional Add dependency to \"optionalDependencies\"") catch unreachable, clap.parseParam("-E, --exact Add the exact version instead of the ^range") catch unreachable, clap.parseParam(" ... ") catch unreachable, - }; + }); - pub const update_params = install_params_ ++ [_]ParamType{ + pub const update_params: []const ParamType = &(install_params_ ++ [_]ParamType{ clap.parseParam("--latest Update packages to their latest versions") catch unreachable, clap.parseParam(" ... \"name\" of packages to update") catch unreachable, - }; + }); - pub const pm_params = install_params_ ++ [_]ParamType{ + pub const pm_params: []const ParamType = &(install_params_ ++ [_]ParamType{ clap.parseParam("-a, --all") catch unreachable, clap.parseParam(" ... ") catch unreachable, - }; + }); - pub const add_params = install_params_ ++ [_]ParamType{ + pub const add_params: []const ParamType = &(install_params_ ++ [_]ParamType{ clap.parseParam("-d, --dev Add dependency to \"devDependencies\"") catch unreachable, clap.parseParam("-D, --development") catch unreachable, clap.parseParam("--optional Add dependency to \"optionalDependencies\"") catch unreachable, clap.parseParam("-E, --exact Add the exact version instead of the ^range") catch unreachable, clap.parseParam(" ... \"name\" or \"name@version\" of package(s) to install") catch unreachable, - }; + }); - pub const remove_params = install_params_ ++ [_]ParamType{ + pub const remove_params: []const ParamType = &(install_params_ ++ [_]ParamType{ clap.parseParam(" ... \"name\" of package(s) to remove from package.json") catch unreachable, - }; + }); - pub const link_params = install_params_ ++ [_]ParamType{ + pub const link_params: []const ParamType = &(install_params_ ++ [_]ParamType{ clap.parseParam(" ... \"name\" install package as a link") catch unreachable, - }; + }); - pub const unlink_params = install_params_ ++ [_]ParamType{ + pub const unlink_params: []const ParamType = &(install_params_ ++ [_]ParamType{ clap.parseParam(" ... \"name\" uninstall package as a link") catch unreachable, - }; + }); - const patch_params = install_params_ ++ [_]ParamType{ + const patch_params: []const ParamType = &(install_params_ ++ [_]ParamType{ clap.parseParam(" ... \"name\" of the package to patch") catch unreachable, clap.parseParam("--commit Install a package containing modifications in `dir`") catch unreachable, clap.parseParam("--patches-dir The directory to put the patch file in (only if --commit is used)") catch unreachable, - }; + }); - const patch_commit_params = install_params_ ++ [_]ParamType{ + const patch_commit_params: []const ParamType = &(install_params_ ++ [_]ParamType{ clap.parseParam(" ... \"dir\" containing changes to a package") catch unreachable, clap.parseParam("--patches-dir The directory to put the patch file") catch unreachable, - }; + }); pub const CommandLineArguments = struct { registry: string = "", @@ -9169,7 +9180,7 @@ pub const PackageManager = struct { } }; - pub fn printHelp(comptime subcommand: Subcommand) void { + pub fn printHelp(subcommand: Subcommand) void { switch (subcommand) { // fall back to HelpCommand.printWithReason Subcommand.install => { @@ -9192,7 +9203,7 @@ pub const PackageManager = struct { Output.flush(); Output.pretty("\n\nFlags:", .{}); Output.flush(); - clap.simpleHelp(&PackageManager.install_params); + clap.simpleHelp(PackageManager.install_params); Output.pretty("\n\n" ++ outro_text ++ "\n", .{}); Output.flush(); }, @@ -9219,7 +9230,7 @@ pub const PackageManager = struct { Output.flush(); Output.pretty("\nFlags:", .{}); Output.flush(); - clap.simpleHelp(&PackageManager.update_params); + clap.simpleHelp(PackageManager.update_params); Output.pretty("\n\n" ++ outro_text ++ "\n", .{}); Output.flush(); }, @@ -9235,7 +9246,7 @@ pub const PackageManager = struct { Output.flush(); Output.pretty("\nFlags:", .{}); Output.flush(); - clap.simpleHelp(&PackageManager.patch_params); + clap.simpleHelp(PackageManager.patch_params); // Output.pretty("\n\n" ++ outro_text ++ "\n", .{}); Output.flush(); }, @@ -9260,7 +9271,7 @@ pub const PackageManager = struct { Output.flush(); Output.pretty("\nFlags:", .{}); Output.flush(); - clap.simpleHelp(&PackageManager.patch_params); + clap.simpleHelp(PackageManager.patch_params); // Output.pretty("\n\n" ++ outro_text ++ "\n", .{}); Output.flush(); }, @@ -9290,7 +9301,7 @@ pub const PackageManager = struct { Output.flush(); Output.pretty("\n\nFlags:", .{}); Output.flush(); - clap.simpleHelp(&PackageManager.add_params); + clap.simpleHelp(PackageManager.add_params); Output.pretty("\n\n" ++ outro_text ++ "\n", .{}); Output.flush(); }, @@ -9312,7 +9323,7 @@ pub const PackageManager = struct { Output.flush(); Output.pretty("\nFlags:", .{}); Output.flush(); - clap.simpleHelp(&PackageManager.remove_params); + clap.simpleHelp(PackageManager.remove_params); Output.pretty("\n\n" ++ outro_text ++ "\n", .{}); Output.flush(); }, @@ -9336,7 +9347,7 @@ pub const PackageManager = struct { Output.flush(); Output.pretty("\nFlags:", .{}); Output.flush(); - clap.simpleHelp(&PackageManager.link_params); + clap.simpleHelp(PackageManager.link_params); Output.pretty("\n\n" ++ outro_text ++ "\n", .{}); Output.flush(); }, @@ -9357,7 +9368,7 @@ pub const PackageManager = struct { Output.flush(); Output.pretty("\nFlags:", .{}); Output.flush(); - clap.simpleHelp(&PackageManager.unlink_params); + clap.simpleHelp(PackageManager.unlink_params); Output.pretty("\n\n" ++ outro_text ++ "\n", .{}); Output.flush(); }, @@ -9367,7 +9378,7 @@ pub const PackageManager = struct { pub fn parse(allocator: std.mem.Allocator, comptime subcommand: Subcommand) !CommandLineArguments { Output.is_verbose = Output.isVerbose(); - const params: []const ParamType = &switch (subcommand) { + const params: []const ParamType = switch (subcommand) { .install => install_params, .update => update_params, .pm => pm_params, @@ -9681,9 +9692,12 @@ pub const PackageManager = struct { fn updatePackageJSONAndInstall( ctx: Command.Context, - comptime subcommand: Subcommand, + subcommand: Subcommand, ) !void { - var manager = init(ctx, subcommand) catch |err| brk: { + const cli = switch (subcommand) { + inline else => |cmd| try PackageManager.CommandLineArguments.parse(ctx.allocator, cmd), + }; + var manager = init(ctx, cli, subcommand) catch |err| brk: { if (err == error.MissingPackageJSON) { switch (subcommand) { .update => { @@ -9700,7 +9714,7 @@ pub const PackageManager = struct { }, else => { try attemptToCreatePackageJSON(); - break :brk try PackageManager.init(ctx, subcommand); + break :brk try PackageManager.init(ctx, cli, subcommand); }, } } @@ -9709,7 +9723,7 @@ pub const PackageManager = struct { }; if (manager.options.shouldPrintCommandName()) { - Output.prettyErrorln("bun " ++ @tagName(subcommand) ++ " v" ++ Global.package_json_version_with_sha ++ "\n", .{}); + Output.prettyErrorln("bun {s} v" ++ Global.package_json_version_with_sha ++ "\n", .{@tagName(subcommand)}); Output.flush(); } @@ -9882,21 +9896,19 @@ pub const PackageManager = struct { &[_]UpdateRequest{} else UpdateRequest.parse(ctx.allocator, ctx.log, manager.options.positionals[1..], &update_requests, manager.subcommand); - switch (manager.subcommand) { - inline else => |subcommand| try manager.updatePackageJSONAndInstallWithManagerWithUpdates( - ctx, - updates, - subcommand, - log_level, - ), - } + try manager.updatePackageJSONAndInstallWithManagerWithUpdates( + ctx, + updates, + manager.subcommand, + log_level, + ); } fn updatePackageJSONAndInstallWithManagerWithUpdates( manager: *PackageManager, ctx: Command.Context, updates: []UpdateRequest, - comptime subcommand: Subcommand, + subcommand: Subcommand, comptime log_level: Options.LogLevel, ) !void { if (manager.log.errors > 0) { @@ -9950,17 +9962,17 @@ pub const PackageManager = struct { if (subcommand == .remove) { if (current_package_json.root.data != .e_object) { - Output.errGeneric("package.json is not an Object {{}}, so there's nothing to " ++ @tagName(subcommand) ++ "!", .{}); + Output.errGeneric("package.json is not an Object {{}}, so there's nothing to {s}!", .{@tagName(subcommand)}); Global.crash(); } else if (current_package_json.root.data.e_object.properties.len == 0) { - Output.errGeneric("package.json is empty {{}}, so there's nothing to " ++ @tagName(subcommand) ++ "!", .{}); + Output.errGeneric("package.json is empty {{}}, so there's nothing to {s}!", .{@tagName(subcommand)}); Global.crash(); } else if (current_package_json.root.asProperty("devDependencies") == null and current_package_json.root.asProperty("dependencies") == null and current_package_json.root.asProperty("optionalDependencies") == null and current_package_json.root.asProperty("peerDependencies") == null) { - Output.prettyErrorln("package.json doesn't have dependencies, there's nothing to " ++ @tagName(subcommand) ++ "!", .{}); + Output.prettyErrorln("package.json doesn't have dependencies, there's nothing to {s}!", .{@tagName(subcommand)}); Global.exit(0); } } @@ -11443,8 +11455,9 @@ pub const PackageManager = struct { var package_json_cwd_buf: bun.PathBuffer = undefined; pub var package_json_cwd: string = ""; - pub inline fn install(ctx: Command.Context) !void { - var manager = try init(ctx, .install); + pub fn install(ctx: Command.Context) !void { + const cli = try CommandLineArguments.parse(ctx.allocator, .install); + var manager = try init(ctx, cli, .install); // switch to `bun add ` if (manager.options.positionals.len > 1) { diff --git a/src/install/lifecycle_script_runner.zig b/src/install/lifecycle_script_runner.zig index 0d1eeddef3..f9df8bfd63 100644 --- a/src/install/lifecycle_script_runner.zig +++ b/src/install/lifecycle_script_runner.zig @@ -353,15 +353,15 @@ pub const LifecycleScriptSubprocess = struct { }, .signaled => |signal| { this.printOutput(); + const signal_code = bun.SignalCode.from(signal); + Output.prettyErrorln("error: {s} script from \"{s}\" terminated by {}", .{ this.scriptName(), this.package_name, - - bun.SignalCode.from(signal).fmt(Output.enable_ansi_colors_stderr), + signal_code.fmt(Output.enable_ansi_colors_stderr), }); - Global.raiseIgnoringPanicHandler(@intFromEnum(signal)); - return; + Global.raiseIgnoringPanicHandler(signal); }, .err => |err| { Output.prettyErrorln("error: Failed to run {s} script from \"{s}\" due to\n{}", .{ @@ -372,7 +372,6 @@ pub const LifecycleScriptSubprocess = struct { this.deinit(); Output.flush(); Global.exit(1); - return; }, else => { Output.panic("error: Failed to run {s} script from \"{s}\" due to unexpected status\n{any}", .{ diff --git a/src/js/builtins/BunBuiltinNames.h b/src/js/builtins/BunBuiltinNames.h index 226a340ae3..1ea5e53986 100644 --- a/src/js/builtins/BunBuiltinNames.h +++ b/src/js/builtins/BunBuiltinNames.h @@ -14,6 +14,7 @@ #include #include #include +#include "BunBuiltinNames+extras.h" namespace WebCore { @@ -194,7 +195,6 @@ using namespace JSC; macro(requireESM) \ macro(requireMap) \ macro(requireNativeModule) \ - macro(resolve) \ macro(resolveSync) \ macro(resume) \ macro(self) \ @@ -250,6 +250,8 @@ using namespace JSC; macro(writeRequests) \ macro(writing) \ macro(written) \ + BUN_ADDITIONAL_BUILTIN_NAMES(macro) +// --- END of BUN_COMMON_PRIVATE_IDENTIFIERS_EACH_PROPERTY_NAME --- class BunBuiltinNames { public: @@ -268,6 +270,8 @@ public: BUN_COMMON_PRIVATE_IDENTIFIERS_EACH_PROPERTY_NAME(DECLARE_BUILTIN_IDENTIFIER_ACCESSOR) + const JSC::Identifier& resolvePublicName() const { return m_vm.propertyNames->resolve;} + private: JSC::VM& m_vm; BUN_COMMON_PRIVATE_IDENTIFIERS_EACH_PROPERTY_NAME(DECLARE_BUILTIN_NAMES) diff --git a/src/js/builtins/ReadableStreamInternals.ts b/src/js/builtins/ReadableStreamInternals.ts index 2fdc118f34..55eb00fb49 100644 --- a/src/js/builtins/ReadableStreamInternals.ts +++ b/src/js/builtins/ReadableStreamInternals.ts @@ -1328,7 +1328,13 @@ export function readableStreamDefaultControllerCallPullIfNeeded(controller) { export function isReadableStreamLocked(stream) { $assert($isReadableStream(stream)); - return !!$getByIdDirectPrivate(stream, "reader") || stream.$bunNativePtr === -1; + return ( + // Case 1. Is there a reader actively using it? + !!$getByIdDirectPrivate(stream, "reader") || + // Case 2. Has the native reader been released? + // Case 3. Has it been converted into a Node.js NativeReadable? + stream.$bunNativePtr === -1 + ); } export function readableStreamDefaultControllerGetDesiredSize(controller) { diff --git a/src/js/internal-for-testing.ts b/src/js/internal-for-testing.ts index 4f6f42231b..9314943ab7 100644 --- a/src/js/internal-for-testing.ts +++ b/src/js/internal-for-testing.ts @@ -57,6 +57,7 @@ export const crash_handler = $zig("crash_handler.zig", "js_bindings.generate") a panic: () => void; rootError: () => void; outOfMemory: () => void; + raiseIgnoringPanicHandler: () => void; }; export const upgrade_test_helpers = $zig("upgrade_command.zig", "upgrade_js_bindings.generate") as { diff --git a/src/js/internal/errors.ts b/src/js/internal/errors.ts new file mode 100644 index 0000000000..034f33b457 --- /dev/null +++ b/src/js/internal/errors.ts @@ -0,0 +1,4 @@ +export default { + ERR_INVALID_ARG_TYPE: $newCppFunction("NodeError.cpp", "jsFunction_ERR_INVALID_ARG_TYPE", 3), + ERR_OUT_OF_RANGE: $newCppFunction("NodeError.cpp", "jsFunction_ERR_OUT_OF_RANGE", 3), +}; diff --git a/src/js/node/dgram.ts b/src/js/node/dgram.ts index b3fb856096..8762c82b0a 100644 --- a/src/js/node/dgram.ts +++ b/src/js/node/dgram.ts @@ -247,6 +247,7 @@ function Socket(type, listener) { ipv6Only: options && options.ipv6Only, recvBufferSize, sendBufferSize, + unrefOnBind: false, }; if (options?.signal !== undefined) { @@ -399,6 +400,10 @@ Socket.prototype.bind = function (port_, address_ /* , callback */) { }, }).$then( socket => { + if (state.unrefOnBind) { + socket.unref(); + state.unrefOnBind = false; + } state.handle.socket = socket; state.receiving = true; state.bindState = BIND_STATE_BOUND; @@ -934,7 +939,11 @@ Socket.prototype.ref = function () { Socket.prototype.unref = function () { const socket = this[kStateSymbol].handle?.socket; - if (socket) socket.unref(); + if (socket) { + socket.unref(); + } else { + this[kStateSymbol].unrefOnBind = true; + } return this; }; diff --git a/src/js/node/http.ts b/src/js/node/http.ts index 87e1a47338..45c8efefb8 100644 --- a/src/js/node/http.ts +++ b/src/js/node/http.ts @@ -7,10 +7,28 @@ const { getHeader, setHeader, assignHeaders: assignHeadersFast, -} = $cpp("NodeHTTP.cpp", "createNodeHTTPInternalBinding"); + Response, + Request, + Headers, + Blob, + headersTuple, +} = $cpp("NodeHTTP.cpp", "createNodeHTTPInternalBinding") as { + getHeader: (headers: Headers, name: string) => string | undefined; + setHeader: (headers: Headers, name: string, value: string) => void; + assignHeaders: (object: any, req: Request, headersTuple: any) => boolean; + Response: (typeof globalThis)["Response"]; + Request: (typeof globalThis)["Request"]; + Headers: (typeof globalThis)["Headers"]; + Blob: (typeof globalThis)["Blob"]; + headersTuple: any; +}; + +// TODO: make this more robust. +function isAbortError(err) { + return err?.name === "AbortError"; +} const ObjectDefineProperty = Object.defineProperty; -const ObjectSetPrototypeOf = Object.setPrototypeOf; const GlobalPromise = globalThis.Promise; const headerCharRegex = /[^\t\x20-\x7e\x80-\xff]/; @@ -274,7 +292,7 @@ function Agent(options = kEmptyObject) { this.protocol = options.protocol || "http:"; } Agent.prototype = {}; -ObjectSetPrototypeOf(Agent.prototype, EventEmitter.prototype); +$setPrototypeDirect.$call(Agent.prototype, EventEmitter.prototype); ObjectDefineProperty(Agent, "globalAgent", { get: function () { @@ -331,21 +349,11 @@ Agent.prototype.destroy = function () { $debug(`${NODE_HTTP_WARNING}\n`, "WARN: Agent.destroy is a no-op"); }; -function emitListeningNextTick(self, onListen, err, hostname, port) { - if (typeof onListen === "function") { - try { - onListen.$apply(self, [err, hostname, port]); - } catch (err) { - self.emit("error", err); - } - } - - self.listening = !err; - - if (err) { - self.emit("error", err); - } else { - self.emit("listening", hostname, port); +function emitListeningNextTick(self, hostname, port) { + if ((self.listening = !!self[serverSymbol])) { + // TODO: remove the arguments + // Note does not pass any arguments. + self.emit("listening", null, hostname, port); } } @@ -431,200 +439,220 @@ function Server(options, callback) { if (callback) this.on("request", callback); return this; } -Object.setPrototypeOf((Server.prototype = {}), EventEmitter.prototype); -Server.prototype.constructor = Server; // Re-add constructor which got lost when setting prototype -Object.setPrototypeOf(Server, EventEmitter); -Server.prototype.ref = function () { - this._unref = false; - this[serverSymbol]?.ref?.(); - return this; -}; +Server.prototype = { + ref() { + this._unref = false; + this[serverSymbol]?.ref?.(); + return this; + }, -Server.prototype.unref = function () { - this._unref = true; - this[serverSymbol]?.unref?.(); - return this; -}; + unref() { + this._unref = true; + this[serverSymbol]?.unref?.(); + return this; + }, -Server.prototype.closeAllConnections = function () { - const server = this[serverSymbol]; - if (!server) { - return; - } - this[serverSymbol] = undefined; - server.stop(true); - this.emit("close"); -}; - -Server.prototype.closeIdleConnections = function () { - // not actually implemented -}; - -Server.prototype.close = function (optionalCallback?) { - const server = this[serverSymbol]; - if (!server) { - if (typeof optionalCallback === "function") process.nextTick(optionalCallback, new Error("Server is not running")); - return; - } - this[serverSymbol] = undefined; - if (typeof optionalCallback === "function") this.once("close", optionalCallback); - server.stop(); - this.emit("close"); -}; - -Server.prototype[Symbol.asyncDispose] = function () { - const { resolve, reject, promise } = Promise.withResolvers(); - this.close(function (err, ...args) { - if (err) reject(err); - else resolve(...args); - }); - return promise; -}; - -Server.prototype.address = function () { - if (!this[serverSymbol]) return null; - return this[serverSymbol].address; -}; - -Server.prototype.listen = function (port, host, backlog, onListen) { - const server = this; - let socketPath; - if (typeof port == "string" && !Number.isSafeInteger(Number(port))) { - socketPath = port; - } - if (typeof host === "function") { - onListen = host; - host = undefined; - } - - if (typeof port === "function") { - onListen = port; - } else if (typeof port === "object") { - port?.signal?.addEventListener("abort", () => { - this.close(); - }); - - host = port?.host; - port = port?.port; - - if (typeof port?.callback === "function") onListen = port?.callback; - } - - if (typeof backlog === "function") { - onListen = backlog; - } - - const ResponseClass = this[optionsSymbol].ServerResponse || ServerResponse; - const RequestClass = this[optionsSymbol].IncomingMessage || IncomingMessage; - let isHTTPS = false; - - try { - const tls = this[tlsSymbol]; - if (tls) { - this.serverName = tls.serverName || host || "localhost"; + closeAllConnections() { + const server = this[serverSymbol]; + if (!server) { + return; } - this[serverSymbol] = Bun.serve({ - tls, - port, - hostname: host, - unix: socketPath, - // Bindings to be used for WS Server - websocket: { - open(ws) { - ws.data.open(ws); - }, - message(ws, message) { - ws.data.message(ws, message); - }, - close(ws, code, reason) { - ws.data.close(ws, code, reason); - }, - drain(ws) { - ws.data.drain(ws); - }, - ping(ws, data) { - ws.data.ping(ws, data); - }, - pong(ws, data) { - ws.data.pong(ws, data); - }, - }, - maxRequestBodySize: Number.MAX_SAFE_INTEGER, - // Be very careful not to access (web) Request object - // properties: - // - request.url - // - request.headers - // - // We want to avoid triggering the getter for these properties because - // that will cause the data to be cloned twice, which costs memory & performance. - fetch(req, _server) { - var pendingResponse; - var pendingError; - var reject = err => { - if (pendingError) return; - pendingError = err; - if (rejectFunction) rejectFunction(err); - }; + this[serverSymbol] = undefined; + server.stop(true); + process.nextTick(emitCloseNT, this); + }, - var reply = function (resp) { - if (pendingResponse) return; - pendingResponse = resp; - if (resolveFunction) resolveFunction(resp); - }; + closeIdleConnections() { + // not actually implemented + }, - const prevIsNextIncomingMessageHTTPS = isNextIncomingMessageHTTPS; - isNextIncomingMessageHTTPS = isHTTPS; - const http_req = new RequestClass(req); - isNextIncomingMessageHTTPS = prevIsNextIncomingMessageHTTPS; + close(optionalCallback?) { + const server = this[serverSymbol]; + if (!server) { + if (typeof optionalCallback === "function") + process.nextTick(optionalCallback, new Error("Server is not running")); + return; + } + this[serverSymbol] = undefined; + if (typeof optionalCallback === "function") this.once("close", optionalCallback); + server.stop(); + process.nextTick(emitCloseNT, this); + }, - const upgrade = http_req.headers.upgrade; - - const http_res = new ResponseClass(http_req, reply); - - http_req.socket[kInternalSocketData] = [_server, http_res, req]; - server.emit("connection", http_req.socket); - - const rejectFn = err => reject(err); - http_req.once("error", rejectFn); - http_res.once("error", rejectFn); - - if (upgrade) { - server.emit("upgrade", http_req, http_req.socket, kEmptyBuffer); - } else { - server.emit("request", http_req, http_res); - } - - if (pendingError) { - throw pendingError; - } - - if (pendingResponse) { - return pendingResponse; - } - - var { promise, resolve: resolveFunction, reject: rejectFunction } = $newPromiseCapability(GlobalPromise); - return promise; - }, + [Symbol.asyncDispose]() { + const { resolve, reject, promise } = Promise.withResolvers(); + this.close(function (err, ...args) { + if (err) reject(err); + else resolve(...args); }); - isHTTPS = this[serverSymbol].protocol === "https"; + return promise; + }, - if (this?._unref) { - this[serverSymbol]?.unref?.(); + address() { + if (!this[serverSymbol]) return null; + return this[serverSymbol].address; + }, + + listen() { + const server = this; + let port, host, onListen; + let socketPath; + let tls = this[tlsSymbol]; + + // This logic must align with: + // - https://github.com/nodejs/node/blob/2eff28fb7a93d3f672f80b582f664a7c701569fb/lib/net.js#L274-L307 + if (arguments.length > 0) { + if (($isObject(arguments[0]) || $isCallable(arguments[0])) && arguments[0] !== null) { + // (options[...][, cb]) + port = arguments[0].port; + host = arguments[0].host; + socketPath = arguments[0].path; + + const otherTLS = arguments[0].tls; + if (otherTLS && $isObject(otherTLS)) { + tls = otherTLS; + } + } else if (typeof arguments[0] === "string" && !(Number(arguments[0]) >= 0)) { + // (path[...][, cb]) + socketPath = arguments[0]; + } else { + // ([port][, host][...][, cb]) + port = arguments[0]; + if (arguments.length > 1 && typeof arguments[1] === "string") { + host = arguments[1]; + } + } } - setTimeout(emitListeningNextTick, 1, this, onListen, null, this[serverSymbol].hostname, this[serverSymbol].port); - } catch (err) { - server.emit("error", err); - } + // Bun defaults to port 3000. + // Node defaults to port 0. + if (port === undefined && !socketPath) { + port = 0; + } - return this; -}; + if ($isCallable(arguments[arguments.length - 1])) { + onListen = arguments[arguments.length - 1]; + } -Server.prototype.setTimeout = function (msecs, callback) { - // TODO: - return this; + const ResponseClass = this[optionsSymbol].ServerResponse || ServerResponse; + const RequestClass = this[optionsSymbol].IncomingMessage || IncomingMessage; + let isHTTPS = false; + + try { + if (tls) { + this.serverName = tls.serverName || host || "localhost"; + } + this[serverSymbol] = Bun.serve({ + tls, + port, + hostname: host, + unix: socketPath, + // Bindings to be used for WS Server + websocket: { + open(ws) { + ws.data.open(ws); + }, + message(ws, message) { + ws.data.message(ws, message); + }, + close(ws, code, reason) { + ws.data.close(ws, code, reason); + }, + drain(ws) { + ws.data.drain(ws); + }, + ping(ws, data) { + ws.data.ping(ws, data); + }, + pong(ws, data) { + ws.data.pong(ws, data); + }, + }, + maxRequestBodySize: Number.MAX_SAFE_INTEGER, + // Be very careful not to access (web) Request object + // properties: + // - request.url + // - request.headers + // + // We want to avoid triggering the getter for these properties because + // that will cause the data to be cloned twice, which costs memory & performance. + fetch(req, _server) { + var pendingResponse; + var pendingError; + var reject = err => { + if (pendingError) return; + pendingError = err; + if (rejectFunction) rejectFunction(err); + }; + + var reply = function (resp) { + if (pendingResponse) return; + pendingResponse = resp; + if (resolveFunction) resolveFunction(resp); + }; + + const prevIsNextIncomingMessageHTTPS = isNextIncomingMessageHTTPS; + isNextIncomingMessageHTTPS = isHTTPS; + const http_req = new RequestClass(req); + isNextIncomingMessageHTTPS = prevIsNextIncomingMessageHTTPS; + + const upgrade = http_req.headers.upgrade; + + const http_res = new ResponseClass(http_req, reply); + + http_req.socket[kInternalSocketData] = [_server, http_res, req]; + server.emit("connection", http_req.socket); + + const rejectFn = err => reject(err); + http_req.once("error", rejectFn); + http_res.once("error", rejectFn); + + if (upgrade) { + server.emit("upgrade", http_req, http_req.socket, kEmptyBuffer); + } else { + server.emit("request", http_req, http_res); + } + + if (pendingError) { + throw pendingError; + } + + if (pendingResponse) { + return pendingResponse; + } + + var { promise, resolve: resolveFunction, reject: rejectFunction } = $newPromiseCapability(GlobalPromise); + return promise; + }, + }); + isHTTPS = this[serverSymbol].protocol === "https"; + + if (this?._unref) { + this[serverSymbol]?.unref?.(); + } + + if ($isCallable(onListen)) { + this.once("listening", onListen); + } + + setTimeout(emitListeningNextTick, 1, this, this[serverSymbol].hostname, this[serverSymbol].port); + } catch (err) { + server.emit("error", err); + } + + return this; + }, + + setTimeout(msecs, callback) { + // TODO: + return this; + }, + + constructor: Server, }; +$setPrototypeDirect.$call(Server.prototype, EventEmitter.prototype); +$setPrototypeDirect.$call(Server, EventEmitter); function assignHeadersSlow(object, req) { const headers = req.headers; @@ -664,10 +692,13 @@ function assignHeadersSlow(object, req) { function assignHeaders(object, req) { // This fast path is an 8% speedup for a "hello world" node:http server, and a 7% speedup for a "hello world" express server - const tuple = assignHeadersFast(req, object); - if (tuple !== null) { - object.headers = $getInternalField(tuple, 0); - object.rawHeaders = $getInternalField(tuple, 1); + if (assignHeadersFast(req, object, headersTuple)) { + const headers = $getInternalField(headersTuple, 0); + const rawHeaders = $getInternalField(headersTuple, 1); + $putInternalField(headersTuple, 0, undefined); + $putInternalField(headersTuple, 1, undefined); + object.headers = headers; + object.rawHeaders = rawHeaders; return true; } else { assignHeadersSlow(object, req); @@ -738,133 +769,172 @@ function IncomingMessage(req, defaultIncomingOpts) { this.complete = !!this[noBodySymbol]; } -Object.setPrototypeOf((IncomingMessage.prototype = {}), Readable.prototype); -IncomingMessage.prototype.constructor = IncomingMessage; // Re-add constructor which got lost when setting prototype -Object.setPrototypeOf(IncomingMessage, Readable); - -IncomingMessage.prototype._construct = function (callback) { - // TODO: streaming - if (this[typeSymbol] === "response" || this[noBodySymbol]) { - callback(); - return; - } - - const contentLength = this.headers["content-length"]; - const length = contentLength ? parseInt(contentLength, 10) : 0; - if (length === 0) { - this[noBodySymbol] = true; - callback(); - return; - } - - callback(); -}; - -async function consumeStream(self, reader: ReadableStreamDefaultReader) { - while (true) { - var { done, value } = await reader.readMany(); - if (self[abortedSymbol]) return; - if (done) { - self.complete = true; - self.push(null); - break; - } - for (var v of value) { - self.push(v); - } - } -} - -IncomingMessage.prototype._read = function (size) { - if (this[noBodySymbol]) { - this.complete = true; - this.push(null); - } else if (this[bodyStreamSymbol] == null) { - const reader = this[reqSymbol].body?.getReader() as ReadableStreamDefaultReader; - if (!reader) { - this.complete = true; - this.push(null); +IncomingMessage.prototype = { + constructor: IncomingMessage, + _construct(callback) { + // TODO: streaming + if (this[typeSymbol] === "response" || this[noBodySymbol]) { + callback(); return; } - this[bodyStreamSymbol] = reader; - consumeStream(this, reader); - } -}; -Object.defineProperty(IncomingMessage.prototype, "aborted", { - get() { + const contentLength = this.headers["content-length"]; + const length = contentLength ? parseInt(contentLength, 10) : 0; + if (length === 0) { + this[noBodySymbol] = true; + callback(); + return; + } + + callback(); + }, + _read(size) { + if (this[noBodySymbol]) { + this.complete = true; + this.push(null); + } else if (this[bodyStreamSymbol] == null) { + const reader = this[reqSymbol].body?.getReader() as ReadableStreamDefaultReader; + if (!reader) { + this.complete = true; + this.push(null); + return; + } + this[bodyStreamSymbol] = reader; + consumeStream(this, reader); + } + }, + _destroy(err, cb) { + if (!this.readableEnded || !this.complete) { + this[abortedSymbol] = true; + // IncomingMessage emits 'aborted'. + // Client emits 'abort'. + this.emit("aborted"); + } + + // Suppress "AbortError" from fetch() because we emit this in the 'aborted' event + if (isAbortError(err)) { + err = undefined; + } + + const stream = this[bodyStreamSymbol]; + this[bodyStreamSymbol] = undefined; + const streamState = stream?.$state; + + if (streamState === $streamReadable || streamState === $streamWaiting || streamState === $streamWritable) { + stream?.cancel?.().catch(nop); + } + + const socket = this[fakeSocketSymbol]; + if (socket) { + socket.destroy(err); + } + + if (cb) { + emitErrorNextTick(this, err, cb); + } + }, + get aborted() { return this[abortedSymbol]; }, -}); - -Object.defineProperty(IncomingMessage.prototype, "connection", { - get() { + set aborted(value) { + this[abortedSymbol] = value; + }, + get connection() { return (this[fakeSocketSymbol] ??= new FakeSocket()); }, -}); - -Object.defineProperty(IncomingMessage.prototype, "statusCode", { - get() { + get statusCode() { return this[reqSymbol].status; }, - set(v) { - if (!(v in STATUS_CODES)) return; - this[reqSymbol].status = v; + set statusCode(value) { + if (!(value in STATUS_CODES)) return; + this[reqSymbol].status = value; }, -}); - -Object.defineProperty(IncomingMessage.prototype, "statusMessage", { - get() { + get statusMessage() { return STATUS_CODES[this[reqSymbol].status]; }, - set(v) { - //noop + set statusMessage(value) { + // noop }, -}); - -Object.defineProperty(IncomingMessage.prototype, "httpVersion", { - get() { + get httpVersion() { return "1.1"; }, -}); - -Object.defineProperty(IncomingMessage.prototype, "rawTrailers", { - get() { + set httpVersion(value) { + // noop + }, + get httpVersionMajor() { + return 1; + }, + set httpVersionMajor(value) { + // noop + }, + get httpVersionMinor() { + return 1; + }, + set httpVersionMinor(value) { + // noop + }, + get rawTrailers() { return []; }, -}); - -Object.defineProperty(IncomingMessage.prototype, "httpVersionMajor", { - get() { - return 1; + set rawTrailers(value) { + // noop }, -}); - -Object.defineProperty(IncomingMessage.prototype, "httpVersionMinor", { - get() { - return 1; - }, -}); - -Object.defineProperty(IncomingMessage.prototype, "trailers", { - get() { + get trailers() { return kEmptyObject; }, -}); - -Object.defineProperty(IncomingMessage.prototype, "socket", { - get() { + set trailers(value) { + // noop + }, + setTimeout(msecs, callback) { + // noop + return this; + }, + get socket() { return (this[fakeSocketSymbol] ??= new FakeSocket()); }, - set(val) { - this[fakeSocketSymbol] = val; + set socket(value) { + this[fakeSocketSymbol] = value; }, -}); - -IncomingMessage.prototype.setTimeout = function (msecs, callback) { - // TODO: - return this; }; +$setPrototypeDirect.$call(IncomingMessage.prototype, Readable.prototype); +$setPrototypeDirect.$call(IncomingMessage, Readable); + +async function consumeStream(self, reader: ReadableStreamDefaultReader) { + var done = false, + value, + aborted = false; + try { + while (true) { + const result = reader.readMany(); + if ($isPromise(result)) { + ({ done, value } = await result); + } else { + ({ done, value } = result); + } + + if (self.destroyed || (aborted = self[abortedSymbol])) { + break; + } + for (var v of value) { + self.push(v); + } + + if (self.destroyed || (aborted = self[abortedSymbol]) || done) { + break; + } + } + } catch (err) { + if (aborted || self.destroyed) return; + self.destroy(err); + } finally { + reader?.cancel?.().catch?.(nop); + } + + if (!self.complete) { + self.complete = true; + self.push(null); + } +} const headersSymbol = Symbol("headers"); const finishedSymbol = Symbol("finished"); @@ -879,9 +949,9 @@ function OutgoingMessage(options) { this[kAbortController] = null; } -Object.setPrototypeOf((OutgoingMessage.prototype = {}), Writable.prototype); +$setPrototypeDirect.$call((OutgoingMessage.prototype = {}), Writable.prototype); OutgoingMessage.prototype.constructor = OutgoingMessage; // Re-add constructor which got lost when setting prototype -Object.setPrototypeOf(OutgoingMessage, Writable); +$setPrototypeDirect.$call(OutgoingMessage, Writable); // Express "compress" package uses this OutgoingMessage.prototype._implicitHeader = function () {}; @@ -1083,9 +1153,9 @@ function ServerResponse(req, reply) { // https://github.com/nodejs/node/blob/cf8c6994e0f764af02da4fa70bc5962142181bf3/lib/_http_server.js#L192 if (req.method === "HEAD") this._hasBody = false; } -Object.setPrototypeOf((ServerResponse.prototype = {}), OutgoingMessage.prototype); +$setPrototypeDirect.$call((ServerResponse.prototype = {}), OutgoingMessage.prototype); ServerResponse.prototype.constructor = ServerResponse; // Re-add constructor which got lost when setting prototype -Object.setPrototypeOf(ServerResponse, OutgoingMessage); +$setPrototypeDirect.$call(ServerResponse, OutgoingMessage); // Express "compress" package uses this ServerResponse.prototype._implicitHeader = function () { @@ -1376,10 +1446,7 @@ class ClientRequest extends OutgoingMessage { this.destroyed = true; // If request is destroyed we abort the current response this[kAbortController]?.abort?.(); - if (err) { - this.emit("error", err); - } - callback(); + emitErrorNextTick(this, err, callback); } _ensureTls() { @@ -1390,11 +1457,16 @@ class ClientRequest extends OutgoingMessage { _final(callback) { this.#finished = true; this[kAbortController] = new AbortController(); - this[kAbortController].signal.addEventListener("abort", () => { - this.emit("abort"); - this[kClearTimeout](); - this.destroy(); - }); + this[kAbortController].signal.addEventListener( + "abort", + () => { + this[kClearTimeout]?.(); + if (this.destroyed) return; + this.emit("abort"); + this.destroy(); + }, + { once: true }, + ); if (this.#signal?.aborted) { this[kAbortController].abort(); } @@ -1451,6 +1523,10 @@ class ClientRequest extends OutgoingMessage { //@ts-ignore this.#fetchRequest = fetch(url, fetchOptions) .then(response => { + if (this.aborted) { + return; + } + const prevIsHTTPS = isNextIncomingMessageHTTPS; isNextIncomingMessageHTTPS = response.url.startsWith("https:"); var res = (this.#res = new IncomingMessage(response, { @@ -1463,7 +1539,7 @@ class ClientRequest extends OutgoingMessage { .catch(err => { // Node treats AbortError separately. // The "abort" listener on the abort controller should have called this - if (err?.name === "AbortError") { + if (isAbortError(err)) { return; } @@ -1485,13 +1561,19 @@ class ClientRequest extends OutgoingMessage { } get aborted() { - return this.#signal?.aborted || !!this[kAbortController]?.signal.aborted; + return this[abortedSymbol] || this.#signal?.aborted || !!this[kAbortController]?.signal.aborted; + } + + set aborted(value) { + this[abortedSymbol] = value; } abort() { if (this.aborted) return; + this[abortedSymbol] = true; + process.nextTick(emitAbortNextTick, this); this[kAbortController]?.abort?.(); - // TODO: Close stream if body streaming + this.destroy(); } constructor(input, options, cb) { @@ -2098,6 +2180,22 @@ function get(url, options, cb) { return req; } +function onError(self, error, cb) { + if (error) { + cb(error); + } else { + cb(); + } +} + +function emitErrorNextTick(self, err, cb) { + process.nextTick(onError, self, err, cb); +} + +function emitAbortNextTick(self) { + self.emit("abort"); +} + var globalAgent = new Agent(); export default { Agent, diff --git a/src/js/node/stream.ts b/src/js/node/stream.ts index ab4a9dffc1..961bdb7228 100644 --- a/src/js/node/stream.ts +++ b/src/js/node/stream.ts @@ -27,6 +27,7 @@ const kPaused = Symbol("kPaused"); // END moved from require_readable const StringDecoder = require("node:string_decoder").StringDecoder; +const transferToNativeReadable = $newCppFunction("ReadableStream.cpp", "jsFunctionTransferToNativeReadableStream", 1); const ObjectSetPrototypeOf = Object.setPrototypeOf; @@ -5720,7 +5721,7 @@ function createNativeStreamReadable(Readable) { ProcessNextTick(() => { this.push(null); }); - return view?.byteLength ?? 0 > 0 ? view : undefined; + return (view?.byteLength ?? 0 > 0) ? view : undefined; } else if ($isTypedArrayView(result)) { if (result.byteLength >= this[highWaterMark] && !this[hasResized] && !isClosed) { this[_adjustHighWaterMark](); @@ -5819,9 +5820,13 @@ function getNativeReadableStream(Readable, stream, options) { $assert(typeof ptr === "object", "Invalid native ptr"); const NativeReadable = getNativeReadableStreamPrototype(type, Readable); - stream.$bunNativePtr = -1; - stream.$bunNativeType = 0; - stream.$disturbed = true; + // https://github.com/oven-sh/bun/pull/12801 + // https://github.com/oven-sh/bun/issues/9555 + // There may be a ReadableStream.Strong handle to the ReadableStream. + // We can't update those handles to point to the NativeReadable from JS + // So we instead mark it as no longer usable, and create a new NativeReadable + transferToNativeReadable(stream); + return new NativeReadable(ptr, options); } diff --git a/src/js/node/util.ts b/src/js/node/util.ts index c237a1915f..eb7db12985 100644 --- a/src/js/node/util.ts +++ b/src/js/node/util.ts @@ -2,6 +2,11 @@ const types = require("node:util/types"); /** @type {import('node-inspect-extracted')} */ const utl = require("internal/util/inspect"); +const { ERR_INVALID_ARG_TYPE, ERR_OUT_OF_RANGE } = require("internal/errors"); + +const internalErrorName = $newZigFunction("node_util_binding.zig", "internalErrorName", 1); + +const NumberIsSafeInteger = Number.isSafeInteger; var cjs_exports = {}; @@ -280,6 +285,12 @@ function styleText(format, text) { return `\u001b[${formatCodes[0]}m${text}\u001b[${formatCodes[1]}m`; } +function getSystemErrorName(err: any) { + if (typeof err !== "number") throw ERR_INVALID_ARG_TYPE("err", "number", err); + if (err >= 0 || !NumberIsSafeInteger(err)) throw ERR_OUT_OF_RANGE("err", "a negative integer", err); + return internalErrorName(err); +} + export default Object.assign(cjs_exports, { format, formatWithOptions, @@ -315,4 +326,5 @@ export default Object.assign(cjs_exports, { TextEncoder, parseArgs, styleText, + getSystemErrorName, }); diff --git a/src/js/node/v8.ts b/src/js/node/v8.ts index ff1cf471fe..d70a7bc73c 100644 --- a/src/js/node/v8.ts +++ b/src/js/node/v8.ts @@ -160,4 +160,6 @@ hideFromStack( DefaultDeserializer, DefaultSerializer, GCProfiler, + DefaultDeserializer, + DefaultSerializer, ); diff --git a/src/js/thirdparty/node-fetch.ts b/src/js/thirdparty/node-fetch.ts index 788ee6600f..d2b5831690 100644 --- a/src/js/thirdparty/node-fetch.ts +++ b/src/js/thirdparty/node-fetch.ts @@ -1,13 +1,13 @@ import type * as s from "stream"; -const { - Headers: WebHeaders, - Request: WebRequest, - Response: WebResponse, - Blob, - File = Blob, - FormData, -} = globalThis as any; +// Users may override the global fetch implementation, so we need to ensure these are the originals. +const bindings = $cpp("NodeFetch.cpp", "createNodeFetchInternalBinding"); +const WebResponse: typeof globalThis.Response = bindings[0]; +const WebRequest: typeof globalThis.Request = bindings[1]; +const Blob: typeof globalThis.Blob = bindings[2]; +const WebHeaders: typeof globalThis.Headers = bindings[3]; +const FormData: typeof globalThis.FormData = bindings[4]; +const File: typeof globalThis.File = bindings[5]; const nativeFetch = Bun.fetch; // node-fetch extends from URLSearchParams in their implementation... diff --git a/src/js/thirdparty/undici.js b/src/js/thirdparty/undici.js index ec0096ff84..b194df4db5 100644 --- a/src/js/thirdparty/undici.js +++ b/src/js/thirdparty/undici.js @@ -7,19 +7,22 @@ const ObjectCreate = Object.create; const kEmptyObject = ObjectCreate(null); var fetch = Bun.fetch; -var Response = globalThis.Response; -var Headers = globalThis.Headers; -var Request = globalThis.Request; -var URLSearchParams = globalThis.URLSearchParams; -var URL = globalThis.URL; -class File extends Blob {} +const bindings = $cpp("Undici.cpp", "createUndiciInternalBinding"); +const Response = bindings[0]; +const Request = bindings[1]; +const Headers = bindings[2]; +const FormData = bindings[3]; +const File = bindings[4]; +const URL = bindings[5]; +const AbortSignal = bindings[6]; +const URLSearchParams = bindings[7]; + class FileReader extends EventTarget { constructor() { throw new Error("Not implemented yet!"); } } -var FormData = globalThis.FormData; function notImplemented() { throw new Error("Not implemented in bun"); } @@ -301,28 +304,28 @@ Undici.buildConnector = Undici.fetch = fetch; export default { + Agent, + BalancedPool, + Client, + connect, + Dispatcher, fetch, - Response, - Headers, - Request, - URLSearchParams, - URL, File, FileReader, FormData, - request, - stream, - pipeline, - connect, - upgrade, - MockClient, - MockPool, + Headers, MockAgent, + MockClient, mockErrors, - Dispatcher, + MockPool, + pipeline, Pool, - BalancedPool, - Client, - Agent, + request, + Request, + Response, + stream, Undici, + upgrade, + URL, + URLSearchParams, }; diff --git a/src/js_ast.zig b/src/js_ast.zig index 8345e07cdb..323ccbda76 100644 --- a/src/js_ast.zig +++ b/src/js_ast.zig @@ -776,7 +776,7 @@ pub const G = struct { switch (val.data) { .e_arrow, .e_function => {}, else => { - if (!val.canBeConstValue()) { + if (!val.canBeMoved()) { return false; } }, @@ -2774,10 +2774,10 @@ pub const E = struct { pub const Import = struct { expr: ExprNodeIndex, + options: ExprNodeIndex = Expr.empty, import_record_index: u32, - // This will be dynamic at some point. - type_attribute: TypeAttribute = .none, + /// TODO: /// Comments inside "import()" expressions have special meaning for Webpack. /// Preserving comments inside these expressions makes it possible to use /// esbuild as a TypeScript-to-JavaScript frontend for Webpack to improve @@ -2785,30 +2785,43 @@ pub const E = struct { /// because esbuild is not Webpack. But we do preserve them since doing so is /// harmless, easy to maintain, and useful to people. See the Webpack docs for /// more info: https://webpack.js.org/api/module-methods/#magic-comments. - /// TODO: - leading_interior_comments: []G.Comment = &([_]G.Comment{}), + // leading_interior_comments: []G.Comment = &([_]G.Comment{}), pub fn isImportRecordNull(this: *const Import) bool { return this.import_record_index == std.math.maxInt(u32); } - pub const TypeAttribute = enum { - none, - json, - toml, - text, - file, + pub fn importRecordTag(import: *const Import) ?ImportRecord.Tag { + const obj = import.options.data.as(.e_object) orelse + return null; + const with = obj.get("with") orelse obj.get("assert") orelse + return null; + const with_obj = with.data.as(.e_object) orelse + return null; + const str = (with_obj.get("type") orelse + return null).data.as(.e_string) orelse + return null; - pub fn tag(this: TypeAttribute) ImportRecord.Tag { - return switch (this) { - .none => .none, - .json => .with_type_json, - .toml => .with_type_toml, - .text => .with_type_text, - .file => .with_type_file, + if (str.eqlComptime("json")) { + return .with_type_json; + } else if (str.eqlComptime("toml")) { + return .with_type_toml; + } else if (str.eqlComptime("text")) { + return .with_type_text; + } else if (str.eqlComptime("file")) { + return .with_type_file; + } else if (str.eqlComptime("sqlite")) { + const embed = brk: { + const embed = with_obj.get("embed") orelse break :brk false; + const embed_str = embed.data.as(.e_string) orelse break :brk false; + break :brk embed_str.eqlComptime("true"); }; + + return if (embed) .with_type_sqlite_embedded else .with_type_sqlite; } - }; + + return null; + } }; }; @@ -3018,6 +3031,10 @@ pub const Stmt = struct { }; } + pub fn allocateExpr(allocator: std.mem.Allocator, expr: Expr) Stmt { + return Stmt.allocate(allocator, S.SExpr, S.SExpr{ .value = expr }, expr.loc); + } + pub const Tag = enum(u6) { s_block, s_break, @@ -3244,10 +3261,15 @@ pub const Expr = struct { else => true, }; } + pub fn canBeConstValue(this: Expr) bool { return this.data.canBeConstValue(); } + pub fn canBeMoved(expr: Expr) bool { + return expr.data.canBeMoved(); + } + pub fn unwrapInlined(expr: Expr) Expr { if (expr.data.as(.e_inlined_enum)) |inlined| return inlined.value; return expr; @@ -5461,9 +5483,8 @@ pub const Expr = struct { .e_import => |el| { const item = bun.create(allocator, E.Import, .{ .expr = try el.expr.deepClone(allocator), + .options = try el.options.deepClone(allocator), .import_record_index = el.import_record_index, - .type_attribute = el.type_attribute, - .leading_interior_comments = el.leading_interior_comments, }); return .{ .e_import = item }; }, @@ -5495,9 +5516,17 @@ pub const Expr = struct { }; } + /// "const values" here refers to expressions that can participate in constant + /// inlining, as they have no side effects on instantiation, and there would be + /// no observable difference if duplicated. This is a subset of canBeMoved() pub fn canBeConstValue(this: Expr.Data) bool { return switch (this) { - .e_number, .e_boolean, .e_null, .e_undefined => true, + .e_number, + .e_boolean, + .e_null, + .e_undefined, + .e_inlined_enum, + => true, .e_string => |str| str.next == null, .e_array => |array| array.was_originally_macro, .e_object => |object| object.was_originally_macro, @@ -5505,6 +5534,39 @@ pub const Expr = struct { }; } + /// Expressions that can be moved are those that do not have side + /// effects on their own. This is used to determine what can be moved + /// outside of a module wrapper (__esm/__commonJS). + pub fn canBeMoved(data: Expr.Data) bool { + return switch (data) { + .e_class => |class| class.canBeMoved(), + + .e_arrow, + .e_function, + + .e_number, + .e_boolean, + .e_null, + .e_undefined, + // .e_reg_exp, + .e_big_int, + .e_string, + .e_inlined_enum, + .e_import_meta, + .e_utf8_string, + => true, + + .e_template => |template| template.parts.len == 0, + + .e_array => |array| array.was_originally_macro, + .e_object => |object| object.was_originally_macro, + + // TODO: experiment with allowing some e_binary, e_unary, e_if as movable + + else => false, + }; + } + pub fn knownPrimitive(data: Expr.Data) PrimitiveType { return switch (data) { .e_big_int => .bigint, @@ -6045,11 +6107,7 @@ pub const S = struct { pub fn canBeMoved(self: *const ExportDefault) bool { return switch (self.value) { - .expr => |e| switch (e.data) { - .e_class => |class| class.canBeMoved(), - .e_arrow, .e_function => true, - else => e.canBeConstValue(), - }, + .expr => |e| e.canBeMoved(), .stmt => |s| switch (s.data) { .s_class => |class| class.class.canBeMoved(), .s_function => true, diff --git a/src/js_lexer.zig b/src/js_lexer.zig index 681439daed..7e7a41298e 100644 --- a/src/js_lexer.zig +++ b/src/js_lexer.zig @@ -32,7 +32,7 @@ pub const TypeScriptAccessibilityModifier = tables.TypeScriptAccessibilityModifi pub const ChildlessJSXTags = tables.ChildlessJSXTags; fn notimpl() noreturn { - Global.panic("not implemented yet!", .{}); + Output.panic("not implemented yet!", .{}); } pub var emptyJavaScriptString = ([_]u16{0}); diff --git a/src/js_parser.zig b/src/js_parser.zig index 47b46c2e52..5cba46d738 100644 --- a/src/js_parser.zig +++ b/src/js_parser.zig @@ -451,36 +451,29 @@ const VisitArgsOpts = struct { is_unique_formal_parameters: bool = false, }; -const BunJSX = struct { - pub threadlocal var bun_jsx_identifier: E.Identifier = undefined; -}; pub fn ExpressionTransposer( - comptime Kontext: type, - comptime visitor: fn (ptr: *Kontext, arg: Expr, state: anytype) Expr, + comptime ContextType: type, + comptime StateType: type, + comptime visitor: fn (ptr: *ContextType, arg: Expr, state: StateType) Expr, ) type { return struct { - pub const Context = Kontext; + pub const Context = ContextType; pub const This = @This(); + context: *Context, pub fn init(c: *Context) This { - return This{ - .context = c, - }; + return .{ .context = c }; } - pub fn maybeTransposeIf(self: *This, arg: Expr, state: anytype) Expr { + pub fn maybeTransposeIf(self: *This, arg: Expr, state: StateType) Expr { switch (arg.data) { .e_if => |ex| { - return Expr.init( - E.If, - E.If{ - .yes = self.maybeTransposeIf(ex.yes, state), - .no = self.maybeTransposeIf(ex.no, state), - .test_ = ex.test_, - }, - arg.loc, - ); + return Expr.init(E.If, .{ + .yes = self.maybeTransposeIf(ex.yes, state), + .no = self.maybeTransposeIf(ex.no, state), + .test_ = ex.test_, + }, arg.loc); }, else => { return visitor(self.context, arg, state); @@ -488,16 +481,12 @@ pub fn ExpressionTransposer( } } - pub fn transposeKnownToBeIf(self: *This, arg: Expr, state: anytype) Expr { - return Expr.init( - E.If, - E.If{ - .yes = self.maybeTransposeIf(arg.data.e_if.yes, state), - .no = self.maybeTransposeIf(arg.data.e_if.no, state), - .test_ = arg.data.e_if.test_, - }, - arg.loc, - ); + pub fn transposeKnownToBeIf(self: *This, arg: Expr, state: StateType) Expr { + return Expr.init(E.If, .{ + .yes = self.maybeTransposeIf(arg.data.e_if.yes, state), + .no = self.maybeTransposeIf(arg.data.e_if.no, state), + .test_ = arg.data.e_if.test_, + }, arg.loc); } }; } @@ -517,7 +506,8 @@ const TransposeState = struct { is_then_catch_target: bool = false, is_require_immediately_assigned_to_decl: bool = false, loc: logger.Loc = logger.Loc.Empty, - type_attribute: E.Import.TypeAttribute = .none, + import_record_tag: ?ImportRecord.Tag = null, + import_options: Expr = Expr.empty, }; var true_args = &[_]Expr{ @@ -2746,7 +2736,7 @@ pub const StmtsKind = enum { }; fn notimpl() noreturn { - Global.panic("Not implemented yet!!", .{}); + Output.panic("Not implemented yet!!", .{}); } const ExprBindingTuple = struct { @@ -5327,9 +5317,9 @@ fn NewParser_( return p.options.bundle and p.source.index.isRuntime(); } - pub fn transposeImport(p: *P, arg: Expr, state: anytype) Expr { + pub fn transposeImport(p: *P, arg: Expr, state: *const TransposeState) Expr { // The argument must be a string - if (@as(Expr.Tag, arg.data) == .e_string) { + if (arg.data.as(.e_string)) |str| { // Ignore calls to import() if the control flow is provably dead here. // We don't want to spend time scanning the required files if they will // never be used. @@ -5337,18 +5327,19 @@ fn NewParser_( return p.newExpr(E.Null{}, arg.loc); } - const import_record_index = p.addImportRecord(.dynamic, arg.loc, arg.data.e_string.slice(p.allocator)); + const import_record_index = p.addImportRecord(.dynamic, arg.loc, str.slice(p.allocator)); - if (state.type_attribute.tag() != .none) { - p.import_records.items[import_record_index].tag = state.type_attribute.tag(); + if (state.import_record_tag) |tag| { + p.import_records.items[import_record_index].tag = tag; } + p.import_records.items[import_record_index].handles_import_errors = (state.is_await_target and p.fn_or_arrow_data_visit.try_body_count != 0) or state.is_then_catch_target; p.import_records_for_current_part.append(p.allocator, import_record_index) catch unreachable; + return p.newExpr(E.Import{ .expr = arg, - .import_record_index = Ref.toInt(import_record_index), - .type_attribute = state.type_attribute, - // .leading_interior_comments = arg.getString(). + .import_record_index = @intCast(import_record_index), + .options = state.import_options, }, state.loc); } @@ -5360,12 +5351,12 @@ fn NewParser_( return p.newExpr(E.Import{ .expr = arg, + .options = state.import_options, .import_record_index = std.math.maxInt(u32), - .type_attribute = state.type_attribute, }, state.loc); } - pub fn transposeRequireResolve(p: *P, arg: Expr, require_resolve_ref: anytype) Expr { + pub fn transposeRequireResolve(p: *P, arg: Expr, require_resolve_ref: Expr) Expr { // The argument must be a string if (arg.data == .e_string) { return p.transposeRequireResolveKnownString(arg); @@ -5402,14 +5393,14 @@ fn NewParser_( return p.newExpr( E.RequireResolveString{ - .import_record_index = Ref.toInt(import_record_index), + .import_record_index = import_record_index, // .leading_interior_comments = arg.getString(). }, arg.loc, ); } - pub fn transposeRequire(p: *P, arg: Expr, state: anytype) Expr { + pub fn transposeRequire(p: *P, arg: Expr, state: *const TransposeState) Expr { if (!p.options.features.allow_runtime) { const args = p.allocator.alloc(Expr, 1) catch bun.outOfMemory(); args[0] = arg; @@ -5437,11 +5428,12 @@ fn NewParser_( const handles_import_errors = p.fn_or_arrow_data_visit.try_body_count != 0; - if ( // For unwrapping CommonJS into ESM to fully work // we must also unwrap requires into imports. - (p.unwrap_all_requires or p.options.features.shouldUnwrapRequire(path.packageName() orelse "")) and + const should_unwrap_require = p.unwrap_all_requires or + if (path.packageName()) |pkg| p.options.features.shouldUnwrapRequire(pkg) else false; + if (should_unwrap_require and // We cannot unwrap a require wrapped in a try/catch because // import statements cannot be wrapped in a try/catch and // require cannot return a promise. @@ -5725,9 +5717,9 @@ fn NewParser_( } } - const ImportTransposer = ExpressionTransposer(P, P.transposeImport); - const RequireTransposer = ExpressionTransposer(P, P.transposeRequire); - const RequireResolveTransposer = ExpressionTransposer(P, P.transposeRequireResolve); + const ImportTransposer = ExpressionTransposer(P, *const TransposeState, P.transposeImport); + const RequireTransposer = ExpressionTransposer(P, *const TransposeState, P.transposeRequire); + const RequireResolveTransposer = ExpressionTransposer(P, Expr, P.transposeRequireResolve); const Binding2ExprWrapper = struct { pub const Namespace = Binding.ToExpr(P, P.wrapIdentifierNamespace); @@ -9482,7 +9474,7 @@ fn NewParser_( return Ref{ .inner_index = inner_index, - .source_index = Ref.toInt(p.source.index.get()), + .source_index = @intCast(p.source.index.get()), .tag = .symbol, }; } @@ -11072,7 +11064,6 @@ fn NewParser_( inline .s_namespace, .s_enum => |ns| { if (ns.is_export) { if (p.ref_to_ts_namespace_member.get(ns.name.ref.?)) |member_data| { - bun.assert(member_data == .namespace); try exported_members.put( p.allocator, p.symbols.items[ns.name.ref.?.inner_index].original_name, @@ -11081,11 +11072,11 @@ fn NewParser_( .loc = ns.name.loc, }, ); - // try p.ref_to_ts_namespace_member.put( - // p.allocator, - // id.ref, - // member_data, - // ); + try p.ref_to_ts_namespace_member.put( + p.allocator, + ns.name.ref.?, + member_data, + ); } } }, @@ -11108,8 +11099,7 @@ fn NewParser_( // them entirely from the output. That can cause the namespace itself // to be considered empty and thus be removed. var import_equal_count: usize = 0; - const _stmts: []Stmt = stmts.items; - for (_stmts) |stmt| { + for (stmts.items) |stmt| { switch (stmt.data) { .s_local => |local| { if (local.was_ts_import_equals and !local.is_export) { @@ -12902,13 +12892,19 @@ fn NewParser_( } if (@intFromPtr(p.source.contents.ptr) <= @intFromPtr(name.ptr) and (@intFromPtr(name.ptr) + name.len) <= (@intFromPtr(p.source.contents.ptr) + p.source.contents.len)) { - const start = Ref.toInt(@intFromPtr(name.ptr) - @intFromPtr(p.source.contents.ptr)); - const end = Ref.toInt(name.len); - return Ref.initSourceEnd(.{ .source_index = start, .inner_index = end, .tag = .source_contents_slice }); + return Ref.initSourceEnd(.{ + .source_index = @intCast(@intFromPtr(name.ptr) - @intFromPtr(p.source.contents.ptr)), + .inner_index = @intCast(name.len), + .tag = .source_contents_slice, + }); } else { - const inner_index = Ref.toInt(p.allocated_names.items.len); + const inner_index: u31 = @intCast(p.allocated_names.items.len); try p.allocated_names.append(p.allocator, name); - return Ref.init(inner_index, p.source.index.get(), false); + return Ref.init( + inner_index, + p.source.index.get(), + false, + ); } } @@ -14909,7 +14905,7 @@ fn NewParser_( p.log.level = .verbose; p.log.printForLogLevel(panic_stream.writer()) catch unreachable; - Global.panic(fmt ++ "\n{s}", args ++ .{panic_buffer[0..panic_stream.pos]}); + Output.panic(fmt ++ "\n{s}", args ++ .{panic_buffer[0..panic_stream.pos]}); } pub fn parsePrefix(p: *P, level: Level, errors: ?*DeferredErrors, flags: Expr.EFlags) anyerror!Expr { @@ -15645,40 +15641,17 @@ fn NewParser_( const value = try p.parseExpr(.comma); - var type_attribute = E.Import.TypeAttribute.none; - + var import_options = Expr.empty; if (p.lexer.token == .t_comma) { // "import('./foo.json', )" try p.lexer.next(); if (p.lexer.token != .t_close_paren) { - // for now, we silently strip import assertions // "import('./foo.json', { assert: { type: 'json' } })" - const import_expr = try p.parseExpr(.comma); - if (import_expr.data == .e_object) { - if (import_expr.data.e_object.get("with") orelse import_expr.data.e_object.get("assert")) |with| { - if (with.data == .e_object) { - const with_object = with.data.e_object; - if (with_object.get("type")) |field| { - if (field.data == .e_string) { - const str = field.data.e_string; - if (str.eqlComptime("json")) { - type_attribute = .json; - } else if (str.eqlComptime("toml")) { - type_attribute = .toml; - } else if (str.eqlComptime("text")) { - type_attribute = .text; - } else if (str.eqlComptime("file")) { - type_attribute = .file; - } - } - } - } - } - } + import_options = try p.parseExpr(.comma); if (p.lexer.token == .t_comma) { - // "import('./foo.json', { assert: { type: 'json' } }, , )" + // "import('./foo.json', { assert: { type: 'json' } }, )" try p.lexer.next(); } } @@ -15694,18 +15667,20 @@ fn NewParser_( return p.newExpr(E.Import{ .expr = value, - .leading_interior_comments = comments, + // .leading_interior_comments = comments, .import_record_index = import_record_index, - .type_attribute = type_attribute, + .options = import_options, }, loc); } } + _ = comments; // TODO: leading_interior comments + return p.newExpr(E.Import{ .expr = value, - .type_attribute = type_attribute, - .leading_interior_comments = comments, + // .leading_interior_comments = comments, .import_record_index = std.math.maxInt(u32), + .options = import_options, }, loc); } @@ -16216,7 +16191,7 @@ fn NewParser_( } }, else => { - Global.panic("Unexpected type in export default: {any}", .{s2}); + Output.panic("Unexpected type in export default: {any}", .{s2}); }, } }, @@ -17511,7 +17486,7 @@ fn NewParser_( var has_proto = false; for (e_.properties.slice()) |*property| { if (property.kind != .spread) { - property.key = p.visitExpr(property.key orelse Global.panic("Expected property key", .{})); + property.key = p.visitExpr(property.key orelse Output.panic("Expected property key", .{})); const key = property.key.?; // Forbid duplicate "__proto__" properties according to the specification if (!property.flags.contains(.is_computed) and @@ -17568,20 +17543,6 @@ fn NewParser_( } }, .e_import => |e_| { - const state = TransposeState{ - // we must check that the await_target is an e_import or it will crash - // example from next.js where not checking causes a panic: - // ``` - // const { - // normalizeLocalePath, - // } = require('../shared/lib/i18n/normalize-locale-path') as typeof import('../shared/lib/i18n/normalize-locale-path') - // ``` - .is_await_target = if (p.await_target != null) p.await_target.? == .e_import and p.await_target.?.e_import == e_ else false, - .is_then_catch_target = p.then_catch_chain.has_catch and std.meta.activeTag(p.then_catch_chain.next_target) == .e_import and expr.data.e_import == p.then_catch_chain.next_target.e_import, - .loc = e_.expr.loc, - .type_attribute = e_.type_attribute, - }; - // We want to forcefully fold constants inside of imports // even when minification is disabled, so that if we have an // import based on a string template, it does not cause a @@ -17595,7 +17556,32 @@ fn NewParser_( p.should_fold_typescript_constant_expressions = true; e_.expr = p.visitExpr(e_.expr); - return p.import_transposer.maybeTransposeIf(e_.expr, state); + e_.options = p.visitExpr(e_.options); + + // Import transposition is able to duplicate the options structure, so + // only perform it if the expression is side effect free. + // + // TODO: make this more like esbuild by emitting warnings that explain + // why this import was not analyzed. (see esbuild 'unsupported-dynamic-import') + if (p.exprCanBeRemovedIfUnused(&e_.options)) { + const state = TransposeState{ + .is_await_target = if (p.await_target) |await_target| + await_target == .e_import and await_target.e_import == e_ + else + false, + + .is_then_catch_target = p.then_catch_chain.has_catch and + p.then_catch_chain.next_target == .e_import and + expr.data.e_import == p.then_catch_chain.next_target.e_import, + + .import_options = e_.options, + + .loc = e_.expr.loc, + .import_record_tag = e_.importRecordTag(), + }; + + return p.import_transposer.maybeTransposeIf(e_.expr, &state); + } }, .e_call => |e_| { p.call_target = e_.target.data; @@ -17607,8 +17593,8 @@ fn NewParser_( }; const target_was_identifier_before_visit = e_.target.data == .e_identifier; - e_.target = p.visitExprInOut(e_.target, ExprIn{ - .has_chain_parent = (e_.optional_chain orelse js_ast.OptionalChain.start) == .continuation, + e_.target = p.visitExprInOut(e_.target, .{ + .has_chain_parent = e_.optional_chain == .continuation, }); // Copy the call side effect flag over if this is a known target @@ -17694,17 +17680,18 @@ fn NewParser_( if (e_.args.len == 1) { const first = e_.args.first_(); const state = TransposeState{ - .is_require_immediately_assigned_to_decl = in.is_immediately_assigned_to_decl and first.data == .e_string, + .is_require_immediately_assigned_to_decl = in.is_immediately_assigned_to_decl and + first.data == .e_string, }; switch (first.data) { .e_string => { // require(FOO) => require(FOO) - return p.transposeRequire(first, state); + return p.transposeRequire(first, &state); }, .e_if => { // require(FOO ? '123' : '456') => FOO ? require('123') : require('456') // This makes static analysis later easier - return p.require_transposer.transposeKnownToBeIf(first, state); + return p.require_transposer.transposeKnownToBeIf(first, &state); }, else => {}, } @@ -18666,7 +18653,15 @@ fn NewParser_( name_loc, E.Identifier{ .ref = ref }, name, - identifier_opts, + .{ + .assign_target = identifier_opts.assign_target, + .is_call_target = identifier_opts.is_call_target, + .is_delete_target = identifier_opts.is_delete_target, + + // If this expression is used as the target of a call expression, make + // sure the value of "this" is preserved. + .was_originally_identifier = false, + }, ); } } @@ -20725,7 +20720,7 @@ fn NewParser_( } }, else => { - Global.panic("Unexpected binding type in namespace. This is a bug. {any}", .{binding}); + Output.panic("Unexpected binding type in namespace. This is a bug. {any}", .{binding}); }, } } @@ -22082,14 +22077,12 @@ fn NewParser_( }, .s_function => |data| { if ( - // Hoist module-level functions when - ((FeatureFlags.unwrap_commonjs_to_esm and p.current_scope == p.module_scope and !data.func.flags.contains(.is_export)) or - - // Manually hoist block-level function declarations to preserve semantics. - // This is only done for function declarations that are not generators - // or async functions, since this is a backwards-compatibility hack from - // Annex B of the JavaScript standard. - !p.current_scope.kindStopsHoisting()) and p.symbols.items[data.func.name.?.ref.?.innerIndex()].kind == .hoisted_function) + // Manually hoist block-level function declarations to preserve semantics. + // This is only done for function declarations that are not generators + // or async functions, since this is a backwards-compatibility hack from + // Annex B of the JavaScript standard. + !p.current_scope.kindStopsHoisting() and + p.symbols.items[data.func.name.?.ref.?.innerIndex()].kind == .hoisted_function) { break :list_getter &before; } @@ -22138,11 +22131,11 @@ fn NewParser_( // Merge the two identifiers back into a single one p.symbols.items[hoisted_ref.innerIndex()].link = name_ref; } - non_fn_stmts.append(stmt) catch unreachable; + non_fn_stmts.append(stmt) catch bun.outOfMemory(); continue; } - const gpe = fn_stmts.getOrPut(name_ref) catch unreachable; + const gpe = fn_stmts.getOrPut(name_ref) catch bun.outOfMemory(); var index = gpe.value_ptr.*; if (!gpe.found_existing) { index = @as(u32, @intCast(let_decls.items.len)); @@ -22167,7 +22160,7 @@ fn NewParser_( }, data.func.name.?.loc, ), - }) catch unreachable; + }) catch bun.outOfMemory(); } } @@ -23856,17 +23849,15 @@ fn NewParser_( } const wrapper_ref: Ref = brk: { - if (p.options.bundle) { + if (p.options.bundle and p.needsWrapperRef(parts)) { break :brk p.newSymbol( .other, std.fmt.allocPrint( p.allocator, "require_{any}", - .{ - p.source.fmtIdentifier(), - }, - ) catch unreachable, - ) catch unreachable; + .{p.source.fmtIdentifier()}, + ) catch bun.outOfMemory(), + ) catch bun.outOfMemory(); } break :brk Ref.None; @@ -23927,6 +23918,53 @@ fn NewParser_( }; } + /// The bundler will generate wrappers to contain top-level side effects using + /// the '__esm' helper. Example: + /// + /// var init_foo = __esm(() => { + /// someExport = Math.random(); + /// }); + /// + /// This wrapper can be removed if all of the constructs get moved + /// outside of the file. Due to paralleization, we can't retroactively + /// delete the `init_foo` symbol, but instead it must be known far in + /// advance if the symbol is needed or not. + /// + /// The logic in this function must be in sync with the hoisting + /// logic in `LinkerContext.generateCodeForFileInChunkJS` + fn needsWrapperRef(p: *const P, parts: []const js_ast.Part) bool { + bun.assert(p.options.bundle); + for (parts) |part| { + for (part.stmts) |stmt| { + switch (stmt.data) { + .s_function => {}, + .s_class => |class| if (!class.class.canBeMoved()) return true, + .s_local => |local| { + if (local.was_commonjs_export or p.commonjs_named_exports.count() == 0) { + for (local.decls.slice()) |decl| { + if (decl.value) |value| + if (value.data != .e_missing and !value.canBeMoved()) + return true; + } + continue; + } + return true; + }, + .s_export_default => |ed| { + if (!ed.canBeMoved()) + return true; + }, + .s_export_equals => |e| { + if (!e.value.canBeMoved()) + return true; + }, + else => return true, + } + } + } + return false; + } + pub fn init( allocator: Allocator, log: *logger.Log, diff --git a/src/js_printer.zig b/src/js_printer.zig index 32995d15f3..821177fe04 100644 --- a/src/js_printer.zig +++ b/src/js_printer.zig @@ -1105,7 +1105,14 @@ fn NewPrinter( p.print("="); p.printSpaceBeforeIdentifier(); if (comptime Statement == void) { - p.printRequireOrImportExpr(import.import_record_index, false, &.{}, Level.lowest, ExprFlag.None()); + p.printRequireOrImportExpr( + import.import_record_index, + false, + &.{}, + Expr.empty, + Level.lowest, + ExprFlag.None(), + ); } else { p.print(statement); } @@ -1119,7 +1126,14 @@ fn NewPrinter( p.printSymbol(default.ref.?); if (comptime Statement == void) { p.@"print = "(); - p.printRequireOrImportExpr(import.import_record_index, false, &.{}, Level.lowest, ExprFlag.None()); + p.printRequireOrImportExpr( + import.import_record_index, + false, + &.{}, + Expr.empty, + Level.lowest, + ExprFlag.None(), + ); } else { p.@"print = "(); p.print(statement); @@ -1161,7 +1175,7 @@ fn NewPrinter( if (import.star_name_loc == null and import.default_name == null) { if (comptime Statement == void) { - p.printRequireOrImportExpr(import.import_record_index, false, &.{}, Level.lowest, ExprFlag.None()); + p.printRequireOrImportExpr(import.import_record_index, false, &.{}, Expr.empty, Level.lowest, ExprFlag.None()); } else { p.print(statement); } @@ -1916,9 +1930,12 @@ fn NewPrinter( import_record_index: u32, was_unwrapped_require: bool, leading_interior_comments: []G.Comment, + import_options: Expr, level_: Level, flags: ExprFlag.Set, ) void { + _ = leading_interior_comments; // TODO: + var level = level_; const wrap = level.gte(.new) or flags.contains(.forbid_call); if (wrap) p.print("("); @@ -1998,12 +2015,12 @@ fn NewPrinter( } defer if (record.kind == .dynamic) p.printDotThenSuffix(); - // Make sure the comma operator is propertly wrapped - - if (meta.exports_ref.isValid() and level.gte(.comma)) { - p.print("("); - } - defer if (meta.exports_ref.isValid() and level.gte(.comma)) p.print(")"); + // Make sure the comma operator is properly wrapped + const wrap_comma_operator = meta.exports_ref.isValid() and + meta.wrapper_ref.isValid() and + level.gte(.comma); + if (wrap_comma_operator) p.print("("); + defer if (wrap_comma_operator) p.print(")"); // Wrap this with a call to "__toESM()" if this is a CommonJS file const wrap_with_to_esm = record.wrap_with_to_esm; @@ -2014,17 +2031,20 @@ fn NewPrinter( } if (!meta.was_unwrapped_require) { - // Call the wrapper - p.printSpaceBeforeIdentifier(); - p.printSymbol(meta.wrapper_ref); - p.print("()"); + if (meta.wrapper_ref.isValid()) { + p.printSpaceBeforeIdentifier(); + p.printSymbol(meta.wrapper_ref); + p.print("()"); + + if (meta.exports_ref.isValid()) { + p.print(","); + p.printSpace(); + } + } // Return the namespace object if this is an ESM file if (meta.exports_ref.isValid()) { - p.print(","); - p.printSpace(); - // Wrap this with a call to "__toCommonJS()" if this is an ESM file const wrap_with_to_cjs = record.wrap_with_to_commonjs; if (wrap_with_to_cjs) { @@ -2090,14 +2110,14 @@ fn NewPrinter( } // External import() - if (leading_interior_comments.len > 0) { - p.printNewline(); - p.indent(); - for (leading_interior_comments) |comment| { - p.printIndentedComment(comment.text); - } - p.printIndent(); - } + // if (leading_interior_comments.len > 0) { + // p.printNewline(); + // p.indent(); + // for (leading_interior_comments) |comment| { + // p.printIndentedComment(comment.text); + // } + // p.printIndent(); + // } p.addSourceMapping(record.range.loc); p.printSpaceBeforeIdentifier(); @@ -2106,43 +2126,22 @@ fn NewPrinter( p.print("import("); p.printImportRecordPath(record); - switch (record.tag) { - .with_type_sqlite, .with_type_sqlite_embedded => { - // we do not preserve "embed": "true" since it is not necessary - p.printWhitespacer(ws(", { with: { type: \"sqlite\" } }")); - }, - .with_type_text => { - if (comptime is_bun_platform) { - p.printWhitespacer(ws(", { with: { type: \"text\" } }")); - } - }, - .with_type_json => { - // backwards compatibility: previously, we always stripped type json - if (comptime is_bun_platform) { - p.printWhitespacer(ws(", { with: { type: \"json\" } }")); - } - }, - .with_type_toml => { - // backwards compatibility: previously, we always stripped type - if (comptime is_bun_platform) { - p.printWhitespacer(ws(", { with: { type: \"toml\" } }")); - } - }, - .with_type_file => { - // backwards compatibility: previously, we always stripped type - if (comptime is_bun_platform) { - p.printWhitespacer(ws(", { with: { type: \"file\" } }")); - } - }, - else => {}, + if (!import_options.isMissing()) { + // since we previously stripped type, it is a breaking change to + // enable this for non-bun platforms + if (is_bun_platform or bun.FeatureFlags.breaking_changes_1_2) { + p.printWhitespacer(ws(", ")); + p.printExpr(import_options, .comma, .{}); + } } + p.print(")"); - if (leading_interior_comments.len > 0) { - p.printNewline(); - p.unindent(); - p.printIndent(); - } + // if (leading_interior_comments.len > 0) { + // p.printNewline(); + // p.unindent(); + // p.printIndent(); + // } return; } @@ -2485,7 +2484,14 @@ fn NewPrinter( }, .e_require_string => |e| { if (!rewrite_esm_to_cjs) { - p.printRequireOrImportExpr(e.import_record_index, e.unwrapped_id != std.math.maxInt(u32), &([_]G.Comment{}), level, flags); + p.printRequireOrImportExpr( + e.import_record_index, + e.unwrapped_id != std.math.maxInt(u32), + &([_]G.Comment{}), + Expr.empty, + level, + flags, + ); } }, .e_require_resolve_string => |e| { @@ -2514,7 +2520,6 @@ fn NewPrinter( } }, .e_import => |e| { - // Handle non-string expressions if (e.isImportRecordNull()) { const wrap = level.gte(.new) or flags.contains(.forbid_call); @@ -2525,47 +2530,45 @@ fn NewPrinter( p.printSpaceBeforeIdentifier(); p.addSourceMapping(expr.loc); p.print("import("); - if (e.leading_interior_comments.len > 0) { - p.printNewline(); - p.indent(); - for (e.leading_interior_comments) |comment| { - p.printIndentedComment(comment.text); - } - p.printIndent(); - } + // TODO: + // if (e.leading_interior_comments.len > 0) { + // p.printNewline(); + // p.indent(); + // for (e.leading_interior_comments) |comment| { + // p.printIndentedComment(comment.text); + // } + // p.printIndent(); + // } p.printExpr(e.expr, .comma, ExprFlag.None()); - if (comptime is_bun_platform) { + if (!e.options.isMissing()) { // since we previously stripped type, it is a breaking change to // enable this for non-bun platforms - switch (e.type_attribute) { - .none => {}, - .text => { - p.printWhitespacer(ws(", { with: { type: \"text\" } }")); - }, - .json => { - p.printWhitespacer(ws(", { with: { type: \"json\" } }")); - }, - .toml => { - p.printWhitespacer(ws(", { with: { type: \"toml\" } }")); - }, - .file => { - p.printWhitespacer(ws(", { with: { type: \"file\" } }")); - }, + if (is_bun_platform or bun.FeatureFlags.breaking_changes_1_2) { + p.printWhitespacer(ws(", ")); + p.printExpr(e.options, .comma, .{}); } } - if (e.leading_interior_comments.len > 0) { - p.printNewline(); - p.unindent(); - p.printIndent(); - } + // TODO: + // if (e.leading_interior_comments.len > 0) { + // p.printNewline(); + // p.unindent(); + // p.printIndent(); + // } p.print(")"); if (wrap) { p.print(")"); } } else { - p.printRequireOrImportExpr(e.import_record_index, false, e.leading_interior_comments, level, flags); + p.printRequireOrImportExpr( + e.import_record_index, + false, + &.{}, // e.leading_interior_comments, + e.options, + level, + flags, + ); } }, .e_dot => |e| { @@ -2768,7 +2771,7 @@ fn NewPrinter( if (e.func.name) |sym| { p.printSpaceBeforeIdentifier(); p.addSourceMapping(sym.loc); - p.printSymbol(sym.ref orelse Global.panic("internal error: expected E.Function's name symbol to have a ref\n{any}", .{e.func})); + p.printSymbol(sym.ref orelse Output.panic("internal error: expected E.Function's name symbol to have a ref\n{any}", .{e.func})); } p.printFunc(e.func); @@ -2789,7 +2792,7 @@ fn NewPrinter( if (e.class_name) |name| { p.print(" "); p.addSourceMapping(name.loc); - p.printSymbol(name.ref orelse Global.panic("internal error: expected E.Class's name symbol to have a ref\n{any}", .{e})); + p.printSymbol(name.ref orelse Output.panic("internal error: expected E.Class's name symbol to have a ref\n{any}", .{e})); } p.printClass(e.*); if (wrap) { @@ -3881,7 +3884,7 @@ fn NewPrinter( p.print("}"); }, else => { - Global.panic("Unexpected binding of type {any}", .{binding}); + Output.panic("Unexpected binding of type {any}", .{binding}); }, } } @@ -3910,8 +3913,8 @@ fn NewPrinter( .s_function => |s| { p.printIndent(); p.printSpaceBeforeIdentifier(); - const name = s.func.name orelse Global.panic("Internal error: expected func to have a name ref\n{any}", .{s}); - const nameRef = name.ref orelse Global.panic("Internal error: expected func to have a name\n{any}", .{s}); + const name = s.func.name orelse Output.panic("Internal error: expected func to have a name ref\n{any}", .{s}); + const nameRef = name.ref orelse Output.panic("Internal error: expected func to have a name\n{any}", .{s}); if (s.func.flags.contains(.is_export)) { if (!rewrite_esm_to_cjs) { @@ -4035,7 +4038,7 @@ fn NewPrinter( if (class.class.class_name) |name| { p.print("class "); - p.printSymbol(name.ref orelse Global.panic("Internal error: Expected class to have a name ref\n{any}", .{class})); + p.printSymbol(name.ref orelse Output.panic("Internal error: Expected class to have a name ref\n{any}", .{class})); } else { p.print("class"); } @@ -4045,7 +4048,7 @@ fn NewPrinter( p.printNewline(); }, else => { - Global.panic("Internal error: unexpected export default stmt data {any}", .{s}); + Output.panic("Internal error: unexpected export default stmt data {any}", .{s}); }, } }, @@ -4441,7 +4444,7 @@ fn NewPrinter( p.printIndent(); } p.printSpaceBeforeIdentifier(); - p.printSymbol(s.name.ref orelse Global.panic("Internal error: expected label to have a name {any}", .{s})); + p.printSymbol(s.name.ref orelse Output.panic("Internal error: expected label to have a name {any}", .{s})); p.print(":"); p.printBody(s.stmt); }, @@ -4930,9 +4933,9 @@ fn NewPrinter( const to_print: []const u8 = if (slice.len > 1024) slice[slice.len - 1024 ..] else slice; if (to_print.len > 0) { - Global.panic("\nvoluntary crash while printing:\n{s}\n---This is a bug. Not your fault.\n", .{to_print}); + Output.panic("\nvoluntary crash while printing:\n{s}\n---This is a bug. Not your fault.\n", .{to_print}); } else { - Global.panic("\nvoluntary crash while printing. This is a bug. Not your fault.\n", .{}); + Output.panic("\nvoluntary crash while printing. This is a bug. Not your fault.\n", .{}); } }, } @@ -5159,7 +5162,7 @@ fn NewPrinter( // for(;) .s_empty => {}, else => { - Global.panic("Internal error: Unexpected stmt in for loop {any}", .{initSt}); + Output.panic("Internal error: Unexpected stmt in for loop {any}", .{initSt}); }, } } @@ -5668,7 +5671,7 @@ pub fn NewWriter( pub inline fn print(writer: *Self, comptime ValueType: type, str: ValueType) void { if (FeatureFlags.disable_printing_null) { if (str == 0) { - Global.panic("Attempted to print null char", .{}); + Output.panic("Attempted to print null char", .{}); } } diff --git a/src/json_parser.zig b/src/json_parser.zig index 87f7ca1ffe..bf46ea15ed 100644 --- a/src/json_parser.zig +++ b/src/json_parser.zig @@ -1075,7 +1075,7 @@ fn expectPrintedJSON(_contents: string, expected: string) !void { const expr = try ParseJSON(&source, &log, default_allocator); if (log.msgs.items.len > 0) { - Global.panic("--FAIL--\nExpr {s}\nLog: {s}\n--FAIL--", .{ expr, log.msgs.items[0].data.text }); + Output.panic("--FAIL--\nExpr {s}\nLog: {s}\n--FAIL--", .{ expr, log.msgs.items[0].data.text }); } const buffer_writer = try js_printer.BufferWriter.init(default_allocator); diff --git a/src/linker.lds b/src/linker.lds index 27c44da312..9aee7b8227 100644 --- a/src/linker.lds +++ b/src/linker.lds @@ -5,6 +5,7 @@ BUN_1.1 { extern "C++" { v8::*; node::*; + JSC::CallFrame::describeFrame; }; local: *; diff --git a/src/linux_c.zig b/src/linux_c.zig index 5ba1b39b75..46e1b080a0 100644 --- a/src/linux_c.zig +++ b/src/linux_c.zig @@ -293,6 +293,75 @@ pub const SystemErrno = enum(u8) { }; }; +pub const UV_E2BIG: i32 = @intFromEnum(SystemErrno.E2BIG); +pub const UV_EACCES: i32 = @intFromEnum(SystemErrno.EACCES); +pub const UV_EADDRINUSE: i32 = @intFromEnum(SystemErrno.EADDRINUSE); +pub const UV_EADDRNOTAVAIL: i32 = @intFromEnum(SystemErrno.EADDRNOTAVAIL); +pub const UV_EAFNOSUPPORT: i32 = @intFromEnum(SystemErrno.EAFNOSUPPORT); +pub const UV_EAGAIN: i32 = @intFromEnum(SystemErrno.EAGAIN); +pub const UV_EALREADY: i32 = @intFromEnum(SystemErrno.EALREADY); +pub const UV_EBADF: i32 = @intFromEnum(SystemErrno.EBADF); +pub const UV_EBUSY: i32 = @intFromEnum(SystemErrno.EBUSY); +pub const UV_ECANCELED: i32 = @intFromEnum(SystemErrno.ECANCELED); +pub const UV_ECHARSET: i32 = -bun.windows.libuv.UV_ECHARSET; +pub const UV_ECONNABORTED: i32 = @intFromEnum(SystemErrno.ECONNABORTED); +pub const UV_ECONNREFUSED: i32 = @intFromEnum(SystemErrno.ECONNREFUSED); +pub const UV_ECONNRESET: i32 = @intFromEnum(SystemErrno.ECONNRESET); +pub const UV_EDESTADDRREQ: i32 = @intFromEnum(SystemErrno.EDESTADDRREQ); +pub const UV_EEXIST: i32 = @intFromEnum(SystemErrno.EEXIST); +pub const UV_EFAULT: i32 = @intFromEnum(SystemErrno.EFAULT); +pub const UV_EHOSTUNREACH: i32 = @intFromEnum(SystemErrno.EHOSTUNREACH); +pub const UV_EINTR: i32 = @intFromEnum(SystemErrno.EINTR); +pub const UV_EINVAL: i32 = @intFromEnum(SystemErrno.EINVAL); +pub const UV_EIO: i32 = @intFromEnum(SystemErrno.EIO); +pub const UV_EISCONN: i32 = @intFromEnum(SystemErrno.EISCONN); +pub const UV_EISDIR: i32 = @intFromEnum(SystemErrno.EISDIR); +pub const UV_ELOOP: i32 = @intFromEnum(SystemErrno.ELOOP); +pub const UV_EMFILE: i32 = @intFromEnum(SystemErrno.EMFILE); +pub const UV_EMSGSIZE: i32 = @intFromEnum(SystemErrno.EMSGSIZE); +pub const UV_ENAMETOOLONG: i32 = @intFromEnum(SystemErrno.ENAMETOOLONG); +pub const UV_ENETDOWN: i32 = @intFromEnum(SystemErrno.ENETDOWN); +pub const UV_ENETUNREACH: i32 = @intFromEnum(SystemErrno.ENETUNREACH); +pub const UV_ENFILE: i32 = @intFromEnum(SystemErrno.ENFILE); +pub const UV_ENOBUFS: i32 = @intFromEnum(SystemErrno.ENOBUFS); +pub const UV_ENODEV: i32 = @intFromEnum(SystemErrno.ENODEV); +pub const UV_ENOENT: i32 = @intFromEnum(SystemErrno.ENOENT); +pub const UV_ENOMEM: i32 = @intFromEnum(SystemErrno.ENOMEM); +pub const UV_ENONET: i32 = @intFromEnum(SystemErrno.ENONET); +pub const UV_ENOSPC: i32 = @intFromEnum(SystemErrno.ENOSPC); +pub const UV_ENOSYS: i32 = @intFromEnum(SystemErrno.ENOSYS); +pub const UV_ENOTCONN: i32 = @intFromEnum(SystemErrno.ENOTCONN); +pub const UV_ENOTDIR: i32 = @intFromEnum(SystemErrno.ENOTDIR); +pub const UV_ENOTEMPTY: i32 = @intFromEnum(SystemErrno.ENOTEMPTY); +pub const UV_ENOTSOCK: i32 = @intFromEnum(SystemErrno.ENOTSOCK); +pub const UV_ENOTSUP: i32 = @intFromEnum(SystemErrno.ENOTSUP); +pub const UV_EPERM: i32 = @intFromEnum(SystemErrno.EPERM); +pub const UV_EPIPE: i32 = @intFromEnum(SystemErrno.EPIPE); +pub const UV_EPROTO: i32 = @intFromEnum(SystemErrno.EPROTO); +pub const UV_EPROTONOSUPPORT: i32 = @intFromEnum(SystemErrno.EPROTONOSUPPORT); +pub const UV_EPROTOTYPE: i32 = @intFromEnum(SystemErrno.EPROTOTYPE); +pub const UV_EROFS: i32 = @intFromEnum(SystemErrno.EROFS); +pub const UV_ESHUTDOWN: i32 = @intFromEnum(SystemErrno.ESHUTDOWN); +pub const UV_ESPIPE: i32 = @intFromEnum(SystemErrno.ESPIPE); +pub const UV_ESRCH: i32 = @intFromEnum(SystemErrno.ESRCH); +pub const UV_ETIMEDOUT: i32 = @intFromEnum(SystemErrno.ETIMEDOUT); +pub const UV_ETXTBSY: i32 = @intFromEnum(SystemErrno.ETXTBSY); +pub const UV_EXDEV: i32 = @intFromEnum(SystemErrno.EXDEV); +pub const UV_EFBIG: i32 = @intFromEnum(SystemErrno.EFBIG); +pub const UV_ENOPROTOOPT: i32 = @intFromEnum(SystemErrno.ENOPROTOOPT); +pub const UV_ERANGE: i32 = @intFromEnum(SystemErrno.ERANGE); +pub const UV_ENXIO: i32 = @intFromEnum(SystemErrno.ENXIO); +pub const UV_EMLINK: i32 = @intFromEnum(SystemErrno.EMLINK); +pub const UV_EHOSTDOWN: i32 = @intFromEnum(SystemErrno.EHOSTDOWN); +pub const UV_EREMOTEIO: i32 = @intFromEnum(SystemErrno.EREMOTEIO); +pub const UV_ENOTTY: i32 = @intFromEnum(SystemErrno.ENOTTY); +pub const UV_EFTYPE: i32 = -bun.windows.libuv.UV_EFTYPE; +pub const UV_EILSEQ: i32 = @intFromEnum(SystemErrno.EILSEQ); +pub const UV_EOVERFLOW: i32 = @intFromEnum(SystemErrno.EOVERFLOW); +pub const UV_ESOCKTNOSUPPORT: i32 = @intFromEnum(SystemErrno.ESOCKTNOSUPPORT); +pub const UV_ENODATA: i32 = @intFromEnum(SystemErrno.ENODATA); +pub const UV_EUNATCH: i32 = @intFromEnum(SystemErrno.EUNATCH); + pub const preallocate_length = 2048 * 1024; pub fn preallocate_file(fd: std.posix.fd_t, offset: std.posix.off_t, len: std.posix.off_t) anyerror!void { // https://gist.github.com/Jarred-Sumner/b37b93399b63cbfd86e908c59a0a37df diff --git a/src/logger.zig b/src/logger.zig index 23c74879f1..70a301c166 100644 --- a/src/logger.zig +++ b/src/logger.zig @@ -549,7 +549,7 @@ pub const Msg = struct { } } - pub fn formatNoWriter(msg: *const Msg, comptime formatterFunc: @TypeOf(Global.panic)) void { + pub fn formatNoWriter(msg: *const Msg, comptime formatterFunc: @TypeOf(Output.panic)) void { formatterFunc("\n\n{s}: {s}\n{s}\n{s}:{}:{} ({d})", .{ msg.kind.string(), msg.data.text, @@ -717,7 +717,7 @@ pub const Log = struct { const count = @as(u16, @intCast(@min(msgs.len, errors_stack.len))); switch (count) { - 0 => return JSC.JSValue.jsUndefined(), + 0 => return .undefined, 1 => { const msg = msgs[0]; return switch (msg.metadata) { diff --git a/src/main.zig b/src/main.zig index 4488bcb900..8aff3b147f 100644 --- a/src/main.zig +++ b/src/main.zig @@ -8,7 +8,7 @@ const Environment = bun.Environment; pub const panic = bun.crash_handler.panic; pub const std_options = std.Options{ - .enable_segfault_handler = !bun.crash_handler.enable, + .enable_segfault_handler = false, }; pub const io_mode = .blocking; @@ -72,3 +72,7 @@ pub const overrides = struct { } }; }; + +pub export fn Bun__panic(msg: [*]const u8, len: usize) noreturn { + Output.panic("{s}", .{msg[0..len]}); +} diff --git a/src/napi/napi.zig b/src/napi/napi.zig index c8fc69d109..4b37c56322 100644 --- a/src/napi/napi.zig +++ b/src/napi/napi.zig @@ -648,37 +648,7 @@ pub export fn napi_strict_equals(env: napi_env, lhs: napi_value, rhs: napi_value return .ok; } pub extern fn napi_call_function(env: napi_env, recv: napi_value, func: napi_value, argc: usize, argv: [*c]const napi_value, result: *napi_value) napi_status; -pub export fn napi_new_instance(env: napi_env, constructor: napi_value, argc: usize, argv: [*c]const napi_value, result_: ?*napi_value) napi_status { - log("napi_new_instance", .{}); - JSC.markBinding(@src()); - - if (argc > 0 and argv == null) { - return invalidArg(); - } - - const result = result_ orelse { - return invalidArg(); - }; - - var exception = [_]JSC.C.JSValueRef{null}; - result.* = JSValue.c( - JSC.C.JSObjectCallAsConstructor( - env.ref(), - constructor.asObjectRef(), - argc, - if (argv != null) - @as([*]const JSC.C.JSValueRef, @ptrCast(argv)) - else - null, - &exception, - ), - ); - if (exception[0] != null) { - return genericFailure(); - } - - return .ok; -} +pub extern fn napi_new_instance(env: napi_env, constructor: napi_value, argc: usize, argv: [*c]const napi_value, result_: ?*napi_value) napi_status; pub export fn napi_instanceof(env: napi_env, object: napi_value, constructor: napi_value, result_: ?*bool) napi_status { log("napi_instanceof", .{}); const result = result_ orelse { @@ -750,12 +720,12 @@ pub export fn napi_make_callback(env: napi_env, _: *anyopaque, recv: napi_value, return .function_expected; } - const res = func.callWithThis( + const res = func.call( env, if (recv != .zero) recv else - JSC.JSValue.jsUndefined(), + .undefined, if (arg_count > 0 and args != null) @as([*]const JSC.JSValue, @ptrCast(args.?))[0..arg_count] else @@ -972,7 +942,7 @@ pub export fn napi_resolve_deferred(env: napi_env, deferred: napi_deferred, reso log("napi_resolve_deferred", .{}); var prom = deferred.get(); prom.resolve(env, resolution); - deferred.*.strong.deinit(); + deferred.deinit(); bun.default_allocator.destroy(deferred); return .ok; } @@ -980,7 +950,7 @@ pub export fn napi_reject_deferred(env: napi_env, deferred: napi_deferred, rejec log("napi_reject_deferred", .{}); var prom = deferred.get(); prom.reject(env, rejection); - deferred.*.strong.deinit(); + deferred.deinit(); bun.default_allocator.destroy(deferred); return .ok; } @@ -990,9 +960,8 @@ pub export fn napi_is_promise(_: napi_env, value: napi_value, is_promise_: ?*boo return invalidArg(); }; - if (value.isEmptyOrUndefinedOrNull()) { - is_promise.* = false; - return .ok; + if (value.isEmpty()) { + return invalidArg(); } is_promise.* = value.asAnyPromise() != null; @@ -1214,10 +1183,10 @@ pub export fn napi_fatal_error(location_ptr: ?[*:0]const u8, location_len: usize const location = napiSpan(location_ptr, location_len); if (location.len > 0) { - bun.Global.panic("napi: {s}\n {s}", .{ message, location }); + bun.Output.panic("napi: {s}\n {s}", .{ message, location }); } - bun.Global.panic("napi: {s}", .{message}); + bun.Output.panic("napi: {s}", .{message}); } pub export fn napi_create_buffer(env: napi_env, length: usize, data: ?**anyopaque, result: *napi_value) napi_status { log("napi_create_buffer: {d}", .{length}); @@ -1336,13 +1305,16 @@ pub export fn napi_get_node_version(_: napi_env, version_: ?**const napi_node_ve version.* = &napi_node_version.global; return .ok; } -pub export fn napi_get_uv_event_loop(env: napi_env, loop_: ?**JSC.EventLoop) napi_status { +const napi_event_loop = if (bun.Environment.isWindows) *bun.windows.libuv.Loop else *JSC.EventLoop; +pub export fn napi_get_uv_event_loop(env: napi_env, loop_: ?*napi_event_loop) napi_status { log("napi_get_uv_event_loop", .{}); const loop = loop_ orelse { return invalidArg(); }; if (bun.Environment.isWindows) { - loop.* = @ptrCast(@alignCast(env.bunVM().uvLoop())); + // alignment error is incorrect. + @setRuntimeSafety(false); + loop.* = JSC.VirtualMachine.get().uvLoop(); } else { // there is no uv event loop on posix, we use our event loop handle. loop.* = env.bunVM().eventLoop(); @@ -1419,10 +1391,10 @@ pub const ThreadSafeFunction = struct { thread_count: usize = 0, owning_thread_lock: Lock = Lock.init(), event_loop: *JSC.EventLoop, + tracker: JSC.AsyncTaskTracker, env: napi_env, - finalizer_task: JSC.AnyTask = undefined, finalizer: Finalizer = Finalizer{ .fun = null, .data = null }, channel: Queue, @@ -1500,18 +1472,30 @@ pub const ThreadSafeFunction = struct { pub fn call(this: *ThreadSafeFunction) void { const task = this.channel.tryReadItem() catch null orelse return; + const vm = this.event_loop.virtual_machine; + const globalObject = this.env; + + this.tracker.willDispatch(globalObject); + defer this.tracker.didDispatch(globalObject); + switch (this.callback) { .js => |js_function| { if (js_function.isEmptyOrUndefinedOrNull()) { return; } - const err = js_function.call(this.env, &.{}); + const err = js_function.call(globalObject, .undefined, &.{}); if (err.isAnyError()) { - _ = this.env.bunVM().uncaughtException(this.env, err, false); + _ = vm.uncaughtException(globalObject, err, false); } }, .c => |cb| { - cb.napi_threadsafe_function_call_js(this.env, cb.js, this.ctx, task); + if (comptime bun.Environment.isDebug) { + const str = cb.js.toBunString(globalObject); + defer str.deref(); + log("call() {}", .{str}); + } + + cb.napi_threadsafe_function_call_js(globalObject, cb.js, this.ctx, task); }, } } @@ -1530,6 +1514,8 @@ pub const ThreadSafeFunction = struct { pub fn finalize(opaq: *anyopaque) void { var this = bun.cast(*ThreadSafeFunction, opaq); + this.unref(); + if (this.finalizer.fun) |fun| { fun(this.event_loop.global, this.finalizer.data, this.ctx); } @@ -1581,7 +1567,6 @@ pub const ThreadSafeFunction = struct { } if (mode == .abort or this.thread_count == 0) { - this.finalizer_task = JSC.AnyTask{ .ctx = this, .callback = finalize }; this.event_loop.enqueueTaskConcurrent(JSC.ConcurrentTask.fromCallback(this, finalize)); } @@ -1615,27 +1600,30 @@ pub export fn napi_create_threadsafe_function( func.protect(); } + const vm = env.bunVM(); var function = bun.default_allocator.create(ThreadSafeFunction) catch return genericFailure(); function.* = .{ - .event_loop = env.bunVM().eventLoop(), + .event_loop = vm.eventLoop(), .env = env, .callback = if (call_js_cb) |c| .{ .c = .{ .napi_threadsafe_function_call_js = c, - .js = if (func == .zero) JSC.JSValue.jsUndefined() else func, + .js = if (func == .zero) .undefined else func.withAsyncContextIfNeeded(env), }, } else .{ - .js = if (func == .zero) JSC.JSValue.jsUndefined() else func, + .js = if (func == .zero) .undefined else func.withAsyncContextIfNeeded(env), }, .ctx = context, .channel = ThreadSafeFunction.Queue.init(max_queue_size, bun.default_allocator), .thread_count = initial_thread_count, .poll_ref = Async.KeepAlive.init(), + .tracker = JSC.AsyncTaskTracker.init(vm), }; function.finalizer = .{ .data = thread_finalize_data, .fun = thread_finalize_cb }; // nodejs by default keeps the event loop alive until the thread-safe function is unref'd function.ref(); + function.tracker.didSchedule(vm.global); result.* = function; return .ok; @@ -1670,14 +1658,12 @@ pub export fn napi_release_threadsafe_function(func: napi_threadsafe_function, m pub export fn napi_unref_threadsafe_function(env: napi_env, func: napi_threadsafe_function) napi_status { log("napi_unref_threadsafe_function", .{}); bun.assert(func.event_loop.global == env); - func.unref(); return .ok; } pub export fn napi_ref_threadsafe_function(env: napi_env, func: napi_threadsafe_function) napi_status { log("napi_ref_threadsafe_function", .{}); bun.assert(func.event_loop.global == env); - func.ref(); return .ok; } diff --git a/src/options.zig b/src/options.zig index 23a43b04a7..b2b60949cb 100644 --- a/src/options.zig +++ b/src/options.zig @@ -1240,7 +1240,7 @@ pub fn definesFromTransformOptions( } } - const resolved_defines = try defines.DefineData.from_input(user_defines, log, allocator); + const resolved_defines = try defines.DefineData.fromInput(user_defines, log, allocator); return try defines.Define.init( allocator, @@ -1545,7 +1545,6 @@ pub const BundleOptions = struct { "react-client", "react-server", "react-refresh", - "__bun-test-unwrap-commonjs__", }; pub inline fn cssImportBehavior(this: *const BundleOptions) Api.CssInJsBehavior { diff --git a/src/renamer.zig b/src/renamer.zig index 3f3b572546..e23d4aba2c 100644 --- a/src/renamer.zig +++ b/src/renamer.zig @@ -35,7 +35,7 @@ pub const NoOpRenamer = struct { if (renamer.symbols.getConst(resolved)) |symbol| { return symbol.original_name; } else { - Global.panic("Invalid symbol {s} in {s}", .{ ref, renamer.source.path.text }); + Output.panic("Invalid symbol {s} in {s}", .{ ref, renamer.source.path.text }); } } diff --git a/src/resolver/package_json.zig b/src/resolver/package_json.zig index c74338ecf7..a5b539403f 100644 --- a/src/resolver/package_json.zig +++ b/src/resolver/package_json.zig @@ -37,7 +37,6 @@ const FolderResolver = @import("../install/resolvers/folder_resolver.zig"); const Architecture = @import("../install/npm.zig").Architecture; const OperatingSystem = @import("../install/npm.zig").OperatingSystem; -pub const SideEffectsMap = std.HashMapUnmanaged(bun.StringHashMapUnowned.Key, void, bun.StringHashMapUnowned.Adapter, 80); pub const DependencyMap = struct { map: HashMap = .{}, source_buf: []const u8 = "", @@ -57,6 +56,8 @@ pub const PackageJSON = struct { production, }; + pub usingnamespace bun.New(@This()); + pub fn generateHash(package_json: *PackageJSON) void { var hashy: [1024]u8 = undefined; @memset(&hashy, 0); @@ -112,11 +113,7 @@ pub const PackageJSON = struct { package_manager_package_id: Install.PackageID = Install.invalid_package_id, dependencies: DependencyMap = .{}, - side_effects: union(enum) { - unspecified: void, - false: void, - map: SideEffectsMap, - } = .{ .unspecified = {} }, + side_effects: SideEffects = .unspecified, // Present if the "browser" field is present. This field is intended to be // used by bundlers and lets you redirect the paths of certain 3rd-party @@ -148,6 +145,33 @@ pub const PackageJSON = struct { exports: ?ExportsMap = null, imports: ?ExportsMap = null, + pub const SideEffects = union(enum) { + /// either `package.json` is missing "sideEffects", it is true, or some + /// other unsupported value. Treat all files as side effects + unspecified: void, + /// "sideEffects": false + false: void, + /// "sideEffects": ["file.js", "other.js"] + map: Map, + // /// "sideEffects": ["side_effects/*.js"] + // glob: TODO, + + pub const Map = std.HashMapUnmanaged( + bun.StringHashMapUnowned.Key, + void, + bun.StringHashMapUnowned.Adapter, + 80, + ); + + pub fn hasSideEffects(side_effects: SideEffects, path: []const u8) bool { + return switch (side_effects) { + .unspecified => true, + .false => false, + .map => |map| map.contains(bun.StringHashMapUnowned.Key.init(path)), + }; + } + }; + pub inline fn isAppPackage(this: *const PackageJSON) bool { return this.hash == 0xDEADBEEF; } @@ -776,7 +800,7 @@ pub const PackageJSON = struct { } else if (side_effects_field.asArray()) |array_| { var array = array_; // TODO: switch to only storing hashes - var map = SideEffectsMap{}; + var map = SideEffects.Map{}; map.ensureTotalCapacity(allocator, array.array.items.len) catch unreachable; while (array.next()) |item| { if (item.asString(allocator)) |name| { diff --git a/src/resolver/resolve_path.zig b/src/resolver/resolve_path.zig index d5ba7a4270..1eb5cac84e 100644 --- a/src/resolver/resolve_path.zig +++ b/src/resolver/resolve_path.zig @@ -608,10 +608,10 @@ fn windowsVolumeNameLenT(comptime T: type, path: []const T) struct { usize, usiz } } } else { - if (bun.strings.indexAnyComptimeT(T, path[3..], comptime strings.literal(T, "/\\"))) |idx| { + if (bun.strings.indexAnyComptimeT(T, path[3..], strings.literal(T, "/\\"))) |idx| { // TODO: handle input "//abc//def" should be picked up as a unc path if (path.len > idx + 4 and !Platform.windows.isSeparatorT(T, path[idx + 4])) { - if (bun.strings.indexAnyComptimeT(T, path[idx + 4 ..], comptime strings.literal(T, "/\\"))) |idx2| { + if (bun.strings.indexAnyComptimeT(T, path[idx + 4 ..], strings.literal(T, "/\\"))) |idx2| { return .{ idx + idx2 + 4, idx + 3 }; } else { return .{ path.len, idx + 3 }; @@ -761,7 +761,7 @@ pub fn normalizeStringGenericTZ( // // since it is theoretically possible to get here in release // we will not do this check in release. - assert(!strings.hasPrefixComptimeType(T, path_, comptime strings.literal(T, ":\\"))); + assert(!strings.hasPrefixComptimeType(T, path_, strings.literal(T, ":\\"))); } var buf_i: usize = 0; @@ -776,16 +776,16 @@ pub fn normalizeStringGenericTZ( if (isWindows and !options.allow_above_root) { if (volLen > 0) { if (options.add_nt_prefix) { - @memcpy(buf[buf_i .. buf_i + 4], comptime strings.literal(T, "\\??\\")); + @memcpy(buf[buf_i .. buf_i + 4], strings.literal(T, "\\??\\")); buf_i += 4; } if (path_[1] != ':') { // UNC paths if (options.add_nt_prefix) { - @memcpy(buf[buf_i .. buf_i + 4], comptime strings.literal(T, "UNC" ++ sep_str)); + @memcpy(buf[buf_i .. buf_i + 4], strings.literal(T, "UNC" ++ sep_str)); buf_i += 2; } else { - @memcpy(buf[buf_i .. buf_i + 2], comptime strings.literal(T, sep_str ++ sep_str)); + @memcpy(buf[buf_i .. buf_i + 2], strings.literal(T, sep_str ++ sep_str)); } @memcpy(buf[buf_i + 2 .. buf_i + indexOfThirdUNCSlash + 1], path_[2 .. indexOfThirdUNCSlash + 1]); buf[buf_i + indexOfThirdUNCSlash] = options.separator; @@ -827,7 +827,7 @@ pub fn normalizeStringGenericTZ( if (isWindows and options.allow_above_root) { if (path_.len >= 2 and path_[1] == ':') { if (options.add_nt_prefix) { - @memcpy(buf[buf_i .. buf_i + 4], &comptime strings.literalBuf(T, "\\??\\")); + @memcpy(buf[buf_i .. buf_i + 4], &strings.literalBuf(T, "\\??\\")); buf_i += 4; } buf[buf_i] = path_[0]; @@ -884,10 +884,10 @@ pub fn normalizeStringGenericTZ( } } else if (options.allow_above_root) { if (buf_i > buf_start) { - buf[buf_i..][0..3].* = (comptime strings.literal(T, sep_str ++ "..")).*; + buf[buf_i..][0..3].* = (strings.literal(T, sep_str ++ "..")).*; buf_i += 3; } else { - buf[buf_i..][0..2].* = (comptime strings.literal(T, "..")).*; + buf[buf_i..][0..2].* = (strings.literal(T, "..")).*; buf_i += 2; } dotdot = buf_i; @@ -932,7 +932,7 @@ pub fn normalizeStringGenericTZ( const result = if (options.zero_terminate) buf[0..buf_i :0] else buf[0..buf_i]; if (bun.Environment.allow_assert and isWindows) { - assert(!strings.hasPrefixComptimeType(T, result, comptime strings.literal(T, "\\:\\"))); + assert(!strings.hasPrefixComptimeType(T, result, strings.literal(T, "\\:\\"))); } return result; @@ -1616,7 +1616,7 @@ pub fn lastIndexOfSeparatorWindows(slice: []const u8) ?usize { } pub fn lastIndexOfSeparatorWindowsT(comptime T: type, slice: []const T) ?usize { - return std.mem.lastIndexOfAny(T, slice, comptime strings.literal(T, "\\/")); + return std.mem.lastIndexOfAny(T, slice, strings.literal(T, "\\/")); } pub fn lastIndexOfSeparatorPosix(slice: []const u8) ?usize { diff --git a/src/resolver/resolver.zig b/src/resolver/resolver.zig index c08f78cc48..bc95d03e52 100644 --- a/src/resolver/resolver.zig +++ b/src/resolver/resolver.zig @@ -171,9 +171,9 @@ pub const SideEffects = enum { /// known to not have side effects. no_side_effects__pure_data, - // / Same as above but it came from a plugin. We don't want to warn about - // / unused imports to these files since running the plugin is a side effect. - // / Removing the import would not call the plugin which is observable. + // /// Same as above but it came from a plugin. We don't want to warn about + // /// unused imports to these files since running the plugin is a side effect. + // /// Removing the import would not call the plugin which is observable. // no_side_effects__pure_data_from_plugin, }; @@ -481,13 +481,6 @@ pub fn ResolveWatcher(comptime Context: type, comptime onWatch: anytype) type { }; } -fn isExternalModuleLike(import_path: string) bool { - if (strings.startsWith(import_path, ".") or strings.startsWith(import_path, "/") or strings.startsWith(import_path, "..")) { - return false; - } - return true; -} - pub const Resolver = struct { const ThisResolver = @This(); opts: options.BundleOptions, @@ -632,7 +625,7 @@ pub const Resolver = struct { } pub fn isExternalPattern(r: *ThisResolver, import_path: string) bool { - if (r.opts.packages == .external and isExternalModuleLike(import_path)) { + if (r.opts.packages == .external and isPackagePath(import_path)) { return true; } for (r.opts.external.patterns) |pattern| { @@ -807,6 +800,26 @@ pub const Resolver = struct { const tracer = bun.tracy.traceNamed(@src(), "ModuleResolver.resolve"); defer tracer.end(); + // Only setting 'current_action' in debug mode because module resolution + // is done very often, and has a very low crash rate. + const prev_action = if (Environment.isDebug) bun.crash_handler.current_action; + if (Environment.isDebug) bun.crash_handler.current_action = .{ .resolver = .{ + .source_dir = source_dir, + .import_path = import_path, + .kind = kind, + } }; + defer if (Environment.isDebug) { + bun.crash_handler.current_action = prev_action; + }; + + if (Environment.isDebug and bun.CLI.debug_flags.hasResolveBreakpoint(import_path)) { + bun.Output.debug("Resolving {s} from {s}", .{ + import_path, + source_dir, + }); + @breakpoint(); + } + const original_order = r.extension_order; defer r.extension_order = original_order; r.extension_order = switch (kind) { @@ -853,8 +866,10 @@ pub const Resolver = struct { } } - // Certain types of URLs default to being external for convenience - if (r.isExternalPattern(import_path) or + // Certain types of URLs default to being external for convenience, + // while these rules should not be applied to the entrypoint as it is never external (#12734) + if (kind != .entry_point and + (r.isExternalPattern(import_path) or // "fill: url(#filter);" (kind.isFromCSS() and strings.startsWith(import_path, "#")) or @@ -865,7 +880,7 @@ pub const Resolver = struct { strings.startsWith(import_path, "https://") or // "background: url(//example.com/images/image.png);" - strings.startsWith(import_path, "//")) + strings.startsWith(import_path, "//"))) { if (r.debug_logs) |*debug| { debug.addNote("Marking this path as implicitly external"); @@ -1143,13 +1158,13 @@ pub const Resolver = struct { pub fn resolveWithoutSymlinks( r: *ThisResolver, source_dir: string, - import_path_: string, + input_import_path: string, kind: ast.ImportKind, global_cache: GlobalCache, ) Result.Union { assert(std.fs.path.isAbsolute(source_dir)); - var import_path = import_path_; + var import_path = input_import_path; // This implements the module resolution algorithm from node.js, which is // described here: https://nodejs.org/api/modules.html#modules_all_together @@ -1375,7 +1390,10 @@ pub const Resolver = struct { } // Check for external packages first - if (r.opts.external.node_modules.count() > 0) { + if (r.opts.external.node_modules.count() > 0 and + // Imports like "process/" need to resolve to the filesystem, not a builtin + !strings.hasSuffixComptime(import_path, "/")) + { var query = import_path; while (true) { if (r.opts.external.node_modules.contains(query)) { @@ -2481,7 +2499,7 @@ pub const Resolver = struct { const source = logger.Source.initPathString(key_path.text, entry.contents); const file_dir = source.path.sourceDir(); - var result = (try TSConfigJSON.parse(bun.fs_allocator, r.log, source, &r.caches.json)) orelse return null; + var result = (try TSConfigJSON.parse(bun.default_allocator, r.log, source, &r.caches.json)) orelse return null; if (result.hasBaseURL()) { @@ -2541,9 +2559,7 @@ pub const Resolver = struct { ) orelse return null; } - const _pkg = try bun.default_allocator.create(PackageJSON); - _pkg.* = pkg; - return _pkg; + return PackageJSON.new(pkg); } fn dirInfoCached( diff --git a/src/resolver/tsconfig_json.zig b/src/resolver/tsconfig_json.zig index f5a61a0dee..22dd62cdc6 100644 --- a/src/resolver/tsconfig_json.zig +++ b/src/resolver/tsconfig_json.zig @@ -57,6 +57,7 @@ pub const TSConfigJSON = struct { emit_decorator_metadata: bool = false, + pub usingnamespace bun.New(@This()); pub fn hasBaseURL(tsconfig: *const TSConfigJSON) bool { return tsconfig.base_url.len > 0; } @@ -323,13 +324,7 @@ pub const TSConfigJSON = struct { assert(result.base_url.len > 0); } - const _result = allocator.create(TSConfigJSON) catch unreachable; - _result.* = result; - - if (Environment.isDebug and has_base_url) { - assert(_result.base_url.len > 0); - } - return _result; + return TSConfigJSON.new(result); } pub fn isValidTSConfigPathPattern(text: string, log: *logger.Log, source: *const logger.Source, loc: logger.Loc, allocator: std.mem.Allocator) bool { diff --git a/src/runtime.js b/src/runtime.js index 460a1768f7..f1adb2e6c8 100644 --- a/src/runtime.js +++ b/src/runtime.js @@ -1,13 +1,17 @@ +// Since runtime.js loads first in the bundler, Ref.none will point at this +// value. And since it isnt exported, it will always be tree-shaken away. +var __INVALID__REF__; + var tagSymbol; var cjsRequireSymbol; +// This ordering is deliberate so that the printer does optimizes these into a +// single destructuring assignment. var __create = Object.create; var __descs = Object.getOwnPropertyDescriptors; -var __defProp = Object.defineProperty; var __getProtoOf = Object.getPrototypeOf; +var __defProp = Object.defineProperty; var __getOwnPropNames = Object.getOwnPropertyNames; var __getOwnPropDesc = Object.getOwnPropertyDescriptor; - -// This order is deliberate so that the printer does the {} optimization here var __hasOwnProp = Object.prototype.hasOwnProperty; export var __markAsModule = target => __defProp(target, "__esModule", { value: true, configurable: true }); @@ -63,22 +67,23 @@ export var __toESM = (mod, isNodeMode, target) => { // Converts the module from ESM to CommonJS. This clones the input module // object with the addition of a non-enumerable "__esModule" property set // to "true", which overwrites any existing export named "__esModule". +var __moduleCache = /* @__PURE__ */ new WeakMap(); export var __toCommonJS = /* @__PURE__ */ from => { - const moduleCache = (__toCommonJS.moduleCache ??= new WeakMap()); - var cached = moduleCache.get(from); - if (cached) return cached; - var to = __defProp({}, "__esModule", { value: true }); - var desc = { enumerable: false }; + var entry = __moduleCache.get(from), + desc; + if (entry) return entry; + entry = __defProp({}, "__esModule", { value: true }); if ((from && typeof from === "object") || typeof from === "function") - for (let key of __getOwnPropNames(from)) - if (!__hasOwnProp.call(to, key)) - __defProp(to, key, { + __getOwnPropNames(from).map( + key => + !__hasOwnProp.call(entry, key) && + __defProp(entry, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable, - }); - - moduleCache.set(from, to); - return to; + }), + ); + __moduleCache.set(from, entry); + return entry; }; // lazy require to prevent loading one icon from a design system diff --git a/src/shell/interpreter.zig b/src/shell/interpreter.zig index dfd6b3a8aa..9c8fe9b615 100644 --- a/src/shell/interpreter.zig +++ b/src/shell/interpreter.zig @@ -1664,7 +1664,7 @@ pub const Interpreter = struct { this.exit_code = exit_code; if (this.this_jsvalue != .zero) { if (JSC.Codegen.JSShellInterpreter.resolveGetCached(this.this_jsvalue)) |resolve| { - _ = resolve.call(this.globalThis, &.{ JSValue.jsNumberFromU16(exit_code), this.getBufferedStdout(), this.getBufferedStderr() }); + _ = resolve.call(this.globalThis, .undefined, &.{ JSValue.jsNumberFromU16(exit_code), this.getBufferedStdout(), this.getBufferedStderr() }); JSC.Codegen.JSShellInterpreter.resolveSetCached(this.this_jsvalue, this.globalThis, .undefined); JSC.Codegen.JSShellInterpreter.rejectSetCached(this.this_jsvalue, this.globalThis, .undefined); } @@ -7298,31 +7298,34 @@ pub const Interpreter = struct { return Maybe(void).success; } - const first_arg = args[0][0..std.mem.len(args[0]) :0]; - switch (first_arg[0]) { - '-' => { - switch (this.bltn.parentCmd().base.shell.changePrevCwd(this.bltn.parentCmd().base.interpreter)) { - .result => {}, - .err => |err| { - return this.handleChangeCwdErr(err, this.bltn.parentCmd().base.shell.prevCwdZ()); - }, - } - }, - '~' => { - const homedir = this.bltn.parentCmd().base.shell.getHomedir(); - homedir.deref(); - switch (this.bltn.parentCmd().base.shell.changeCwd(this.bltn.parentCmd().base.interpreter, homedir.slice())) { - .result => {}, - .err => |err| return this.handleChangeCwdErr(err, homedir.slice()), - } - }, - else => { - switch (this.bltn.parentCmd().base.shell.changeCwd(this.bltn.parentCmd().base.interpreter, first_arg)) { - .result => {}, - .err => |err| return this.handleChangeCwdErr(err, first_arg), - } - }, + if (args.len == 1) { + const first_arg = args[0][0..std.mem.len(args[0]) :0]; + switch (first_arg[0]) { + '-' => { + switch (this.bltn.parentCmd().base.shell.changePrevCwd(this.bltn.parentCmd().base.interpreter)) { + .result => {}, + .err => |err| { + return this.handleChangeCwdErr(err, this.bltn.parentCmd().base.shell.prevCwdZ()); + }, + } + }, + '~' => { + const homedir = this.bltn.parentCmd().base.shell.getHomedir(); + homedir.deref(); + switch (this.bltn.parentCmd().base.shell.changeCwd(this.bltn.parentCmd().base.interpreter, homedir.slice())) { + .result => {}, + .err => |err| return this.handleChangeCwdErr(err, homedir.slice()), + } + }, + else => { + switch (this.bltn.parentCmd().base.shell.changeCwd(this.bltn.parentCmd().base.interpreter, first_arg)) { + .result => {}, + .err => |err| return this.handleChangeCwdErr(err, first_arg), + } + }, + } } + this.bltn.done(0); return Maybe(void).success; } diff --git a/src/shell/shell.zig b/src/shell/shell.zig index ecd520cf9c..19cb9f1d8b 100644 --- a/src/shell/shell.zig +++ b/src/shell/shell.zig @@ -4362,7 +4362,7 @@ pub const TestingAPIs = struct { var arguments = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments_.slice()); const string = arguments.nextEat() orelse { globalThis.throw("shellInternals.disabledOnPosix: expected 1 arguments, got 0", .{}); - return JSC.JSValue.jsUndefined(); + return .undefined; }; const bunstr = string.toBunString(globalThis); @@ -4386,7 +4386,7 @@ pub const TestingAPIs = struct { var arguments = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments_.slice()); const string_args = arguments.nextEat() orelse { globalThis.throw("shell_parse: expected 2 arguments, got 0", .{}); - return JSC.JSValue.jsUndefined(); + return .undefined; }; var arena = std.heap.ArenaAllocator.init(bun.default_allocator); @@ -4476,7 +4476,7 @@ pub const TestingAPIs = struct { var arguments = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments_.slice()); const string_args = arguments.nextEat() orelse { globalThis.throw("shell_parse: expected 2 arguments, got 0", .{}); - return JSC.JSValue.jsUndefined(); + return .undefined; }; var arena = bun.ArenaAllocator.init(bun.default_allocator); diff --git a/src/sql/postgres.zig b/src/sql/postgres.zig index e9cec3fc81..d88db48535 100644 --- a/src/sql/postgres.zig +++ b/src/sql/postgres.zig @@ -2882,7 +2882,7 @@ pub const PostgresSQLConnection = struct { if (on_close == .zero) return; const instance = this.globalObject.createErrorInstance("{s}", .{message}); instance.put(this.globalObject, JSC.ZigString.static("code"), String.init(@errorName(err)).toJS(this.globalObject)); - _ = on_close.callWithThis( + _ = on_close.call( this.globalObject, this.js_value, &[_]JSValue{ diff --git a/src/string.zig b/src/string.zig index 8e65e83ead..31df35dcdd 100644 --- a/src/string.zig +++ b/src/string.zig @@ -277,7 +277,7 @@ pub const Tag = enum(u8) { /// into a WTF::String. /// Can be in either `utf8` or `utf16le` encodings. ZigString = 2, - /// Static memory that is guarenteed to never be freed. When converted to WTF::String, + /// Static memory that is guaranteed to never be freed. When converted to WTF::String, /// the memory is not cloned, but instead referenced with WTF::ExternalStringImpl. /// Can be in either `utf8` or `utf16le` encodings. StaticZigString = 3, diff --git a/src/string_immutable.zig b/src/string_immutable.zig index 2f1807ecfc..6e0906156c 100644 --- a/src/string_immutable.zig +++ b/src/string_immutable.zig @@ -51,21 +51,19 @@ pub inline fn removeLeadingDotSlash(slice: []const u8) []const u8 { pub const w = toUTF16Literal; pub fn toUTF16Literal(comptime str: []const u8) [:0]const u16 { - return comptime literal(u16, str); + return literal(u16, str); } pub fn literal(comptime T: type, comptime str: []const u8) *const [literalLength(T, str):0]T { - if (!@inComptime()) @compileError("strings.literal() must be called in a comptime context"); - return comptime switch (T) { - u8 => brk: { - var data: [str.len:0]u8 = undefined; - @memcpy(&data, str); - const final = data[0..].*; - break :brk &final; - }, - u16 => return std.unicode.utf8ToUtf16LeStringLiteral(str), - else => @compileError("unsupported type " ++ @typeName(T) ++ " in strings.literal() call."), + const Holder = struct { + pub const value = switch (T) { + u8 => (str[0..str.len].* ++ .{0})[0..str.len :0], + u16 => std.unicode.utf8ToUtf16LeStringLiteral(str), + else => @compileError("unsupported type " ++ @typeName(T) ++ " in strings.literal() call."), + }; }; + + return Holder.value; } fn literalLength(comptime T: type, comptime str: string) usize { @@ -174,7 +172,7 @@ pub inline fn containsAny(in: anytype, target: string) bool { /// - The name ends up being part of a URL, an argument on the command line, and /// a folder name. Therefore, the name can't contain any non-URL-safe /// characters. -pub inline fn isNPMPackageName(target: string) bool { +pub fn isNPMPackageName(target: string) bool { if (target.len == 0) return false; if (target.len > 214) return false; @@ -208,7 +206,7 @@ pub inline fn isNPMPackageName(target: string) bool { return !scoped or slash_index > 0 and slash_index + 1 < target.len; } -pub inline fn indexAnyComptime(target: string, comptime chars: string) ?usize { +pub fn indexAnyComptime(target: string, comptime chars: string) ?usize { for (target, 0..) |parent, i| { inline for (chars) |char| { if (char == parent) return i; @@ -217,7 +215,7 @@ pub inline fn indexAnyComptime(target: string, comptime chars: string) ?usize { return null; } -pub inline fn indexAnyComptimeT(comptime T: type, target: []const T, comptime chars: []const T) ?usize { +pub fn indexAnyComptimeT(comptime T: type, target: []const T, comptime chars: []const T) ?usize { for (target, 0..) |parent, i| { inline for (chars) |char| { if (char == parent) return i; @@ -226,7 +224,7 @@ pub inline fn indexAnyComptimeT(comptime T: type, target: []const T, comptime ch return null; } -pub inline fn indexEqualAny(in: anytype, target: string) ?usize { +pub fn indexEqualAny(in: anytype, target: string) ?usize { for (in, 0..) |str, i| if (eqlLong(str, target, true)) return i; return null; } @@ -786,8 +784,9 @@ pub fn hasSuffixComptime(self: string, comptime alt: anytype) bool { return self.len >= alt.len and eqlComptimeCheckLenWithType(u8, self[self.len - alt.len ..], alt, false); } -inline fn eqlComptimeCheckLenU8(a: []const u8, comptime b: []const u8, comptime check_len: bool) bool { +fn eqlComptimeCheckLenU8(a: []const u8, comptime b: []const u8, comptime check_len: bool) bool { @setEvalBranchQuota(9999); + if (comptime check_len) { if (a.len != b.len) return false; } @@ -824,7 +823,7 @@ inline fn eqlComptimeCheckLenU8(a: []const u8, comptime b: []const u8, comptime return true; } -inline fn eqlComptimeCheckLenWithKnownType(comptime Type: type, a: []const Type, comptime b: []const Type, comptime check_len: bool) bool { +fn eqlComptimeCheckLenWithKnownType(comptime Type: type, a: []const Type, comptime b: []const Type, comptime check_len: bool) bool { if (comptime Type != u8) { return eqlComptimeCheckLenU8(std.mem.sliceAsBytes(a), comptime std.mem.sliceAsBytes(b), comptime check_len); } @@ -835,18 +834,18 @@ inline fn eqlComptimeCheckLenWithKnownType(comptime Type: type, a: []const Type, /// /// strings.eqlComptime(input, "hello world"); /// strings.eqlComptime(input, "hai"); -pub inline fn eqlComptimeCheckLenWithType(comptime Type: type, a: []const Type, comptime b: anytype, comptime check_len: bool) bool { +pub fn eqlComptimeCheckLenWithType(comptime Type: type, a: []const Type, comptime b: anytype, comptime check_len: bool) bool { return eqlComptimeCheckLenWithKnownType(comptime Type, a, if (@typeInfo(@TypeOf(b)) != .Pointer) &b else b, comptime check_len); } -pub inline fn eqlCaseInsensitiveASCIIIgnoreLength( +pub fn eqlCaseInsensitiveASCIIIgnoreLength( a: string, b: string, ) bool { return eqlCaseInsensitiveASCII(a, b, false); } -pub inline fn eqlCaseInsensitiveASCIIICheckLength( +pub fn eqlCaseInsensitiveASCIIICheckLength( a: string, b: string, ) bool { @@ -877,7 +876,7 @@ pub fn eqlLong(a_str: string, b_str: string, comptime check_len: bool) bool { return false; } } else { - if (comptime Environment.allow_assert) assert(b_str.len == a_str.len); + if (comptime Environment.allow_assert) assert(b_str.len <= a_str.len); } const end = b_str.ptr + len; diff --git a/src/symbols.dyn b/src/symbols.dyn index 0be0db03ca..2d16df145d 100644 --- a/src/symbols.dyn +++ b/src/symbols.dyn @@ -51,6 +51,7 @@ _napi_define_class; _napi_define_properties; _napi_delete_async_work; + _napi_delete_element; _napi_delete_property; _napi_delete_reference; _napi_detach_arraybuffer; @@ -67,7 +68,6 @@ _napi_get_dataview_info; _napi_get_date_value; _napi_get_element; - _napi_delete_element; _napi_get_global; _napi_get_instance_data; _napi_get_last_error_info; @@ -144,9 +144,15 @@ _napi_unref_threadsafe_function; _napi_unwrap; _napi_wrap; + _node_api_create_external_string_latin1; + _node_api_create_external_string_utf16; _node_api_create_syntax_error; _node_api_symbol_for; _node_api_throw_syntax_error; - _node_api_create_external_string_latin1; - _node_api_create_external_string_utf16; + __ZN2v87Isolate10GetCurrentEv; + __ZN2v87Isolate13TryGetCurrentEv; + __ZN2v87Isolate17GetCurrentContextEv; + __ZN4node25AddEnvironmentCleanupHookEPN2v87IsolateEPFvPvES3_; + __ZN4node28RemoveEnvironmentCleanupHookEPN2v87IsolateEPFvPvES3_; + __ZN3JSC9CallFrame13describeFrameEv; }; \ No newline at end of file diff --git a/src/symbols.txt b/src/symbols.txt index 577035fa93..2e24afa1c4 100644 --- a/src/symbols.txt +++ b/src/symbols.txt @@ -153,3 +153,4 @@ __ZN2v87Isolate13TryGetCurrentEv __ZN2v87Isolate17GetCurrentContextEv __ZN4node25AddEnvironmentCleanupHookEPN2v87IsolateEPFvPvES3_ __ZN4node28RemoveEnvironmentCleanupHookEPN2v87IsolateEPFvPvES3_ +__ZN3JSC9CallFrame13describeFrameEv \ No newline at end of file diff --git a/src/sys.zig b/src/sys.zig index ac93527180..eee32ce408 100644 --- a/src/sys.zig +++ b/src/sys.zig @@ -1868,7 +1868,8 @@ pub fn renameatConcurrentlyWithoutFallback( var err = switch (bun.sys.renameat2(from_dir_fd, from, to_dir_fd, to, .{ .exclude = true, })) { - .err => |err| err, + // if ENOENT don't retry + .err => |err| if (err.getErrno() == .NOENT) return .{ .err = err } else err, .result => break :attempt_atomic_rename_and_fallback_to_racy_delete, }; @@ -1893,8 +1894,12 @@ pub fn renameatConcurrentlyWithoutFallback( } // sad path: let's try to delete the folder and then rename it - var to_dir = to_dir_fd.asDir(); - to_dir.deleteTree(to) catch {}; + if (to_dir_fd.isValid()) { + var to_dir = to_dir_fd.asDir(); + to_dir.deleteTree(to) catch {}; + } else { + std.fs.deleteTreeAbsolute(to) catch {}; + } switch (bun.sys.renameat(from_dir_fd, from, to_dir_fd, to)) { .err => |err| { return .{ .err = err }; diff --git a/src/thread_pool.zig b/src/thread_pool.zig index 8d5f859d44..e7e8b8d107 100644 --- a/src/thread_pool.zig +++ b/src/thread_pool.zig @@ -134,14 +134,10 @@ pub const Batch = struct { pub const WaitGroup = struct { mutex: std.Thread.Mutex = .{}, counter: u32 = 0, - event: std.Thread.ResetEvent, + event: std.Thread.ResetEvent = .{}, pub fn init(self: *WaitGroup) void { - self.* = .{ - .mutex = .{}, - .counter = 0, - .event = undefined, - }; + self.* = .{}; } pub fn deinit(self: *WaitGroup) void { diff --git a/src/url.zig b/src/url.zig index 9f4614b3b2..38e3b6aab1 100644 --- a/src/url.zig +++ b/src/url.zig @@ -130,7 +130,7 @@ pub const URL = struct { } pub fn hasValidPort(this: *const URL) bool { - return (this.getPort() orelse 0) > 1; + return (this.getPort() orelse 0) > 0; } pub fn isEmpty(this: *const URL) bool { diff --git a/src/windows_c.zig b/src/windows_c.zig index d465ac0ecf..db6a06b8a3 100644 --- a/src/windows_c.zig +++ b/src/windows_c.zig @@ -958,6 +958,75 @@ pub const SystemErrno = enum(u16) { }; }; +pub const UV_E2BIG = -uv.UV_E2BIG; +pub const UV_EACCES = -uv.UV_EACCES; +pub const UV_EADDRINUSE = -uv.UV_EADDRINUSE; +pub const UV_EADDRNOTAVAIL = -uv.UV_EADDRNOTAVAIL; +pub const UV_EAFNOSUPPORT = -uv.UV_EAFNOSUPPORT; +pub const UV_EAGAIN = -uv.UV_EAGAIN; +pub const UV_EALREADY = -uv.UV_EALREADY; +pub const UV_EBADF = -uv.UV_EBADF; +pub const UV_EBUSY = -uv.UV_EBUSY; +pub const UV_ECANCELED = -uv.UV_ECANCELED; +pub const UV_ECHARSET = -uv.UV_ECHARSET; +pub const UV_ECONNABORTED = -uv.UV_ECONNABORTED; +pub const UV_ECONNREFUSED = -uv.UV_ECONNREFUSED; +pub const UV_ECONNRESET = -uv.UV_ECONNRESET; +pub const UV_EDESTADDRREQ = -uv.UV_EDESTADDRREQ; +pub const UV_EEXIST = -uv.UV_EEXIST; +pub const UV_EFAULT = -uv.UV_EFAULT; +pub const UV_EHOSTUNREACH = -uv.UV_EHOSTUNREACH; +pub const UV_EINTR = -uv.UV_EINTR; +pub const UV_EINVAL = -uv.UV_EINVAL; +pub const UV_EIO = -uv.UV_EIO; +pub const UV_EISCONN = -uv.UV_EISCONN; +pub const UV_EISDIR = -uv.UV_EISDIR; +pub const UV_ELOOP = -uv.UV_ELOOP; +pub const UV_EMFILE = -uv.UV_EMFILE; +pub const UV_EMSGSIZE = -uv.UV_EMSGSIZE; +pub const UV_ENAMETOOLONG = -uv.UV_ENAMETOOLONG; +pub const UV_ENETDOWN = -uv.UV_ENETDOWN; +pub const UV_ENETUNREACH = -uv.UV_ENETUNREACH; +pub const UV_ENFILE = -uv.UV_ENFILE; +pub const UV_ENOBUFS = -uv.UV_ENOBUFS; +pub const UV_ENODEV = -uv.UV_ENODEV; +pub const UV_ENOENT = -uv.UV_ENOENT; +pub const UV_ENOMEM = -uv.UV_ENOMEM; +pub const UV_ENONET = -uv.UV_ENONET; +pub const UV_ENOSPC = -uv.UV_ENOSPC; +pub const UV_ENOSYS = -uv.UV_ENOSYS; +pub const UV_ENOTCONN = -uv.UV_ENOTCONN; +pub const UV_ENOTDIR = -uv.UV_ENOTDIR; +pub const UV_ENOTEMPTY = -uv.UV_ENOTEMPTY; +pub const UV_ENOTSOCK = -uv.UV_ENOTSOCK; +pub const UV_ENOTSUP = -uv.UV_ENOTSUP; +pub const UV_EPERM = -uv.UV_EPERM; +pub const UV_EPIPE = -uv.UV_EPIPE; +pub const UV_EPROTO = -uv.UV_EPROTO; +pub const UV_EPROTONOSUPPORT = -uv.UV_EPROTONOSUPPORT; +pub const UV_EPROTOTYPE = -uv.UV_EPROTOTYPE; +pub const UV_EROFS = -uv.UV_EROFS; +pub const UV_ESHUTDOWN = -uv.UV_ESHUTDOWN; +pub const UV_ESPIPE = -uv.UV_ESPIPE; +pub const UV_ESRCH = -uv.UV_ESRCH; +pub const UV_ETIMEDOUT = -uv.UV_ETIMEDOUT; +pub const UV_ETXTBSY = -uv.UV_ETXTBSY; +pub const UV_EXDEV = -uv.UV_EXDEV; +pub const UV_EFBIG = -uv.UV_EFBIG; +pub const UV_ENOPROTOOPT = -uv.UV_ENOPROTOOPT; +pub const UV_ERANGE = -uv.UV_ERANGE; +pub const UV_ENXIO = -uv.UV_ENXIO; +pub const UV_EMLINK = -uv.UV_EMLINK; +pub const UV_EHOSTDOWN = -uv.UV_EHOSTDOWN; +pub const UV_EREMOTEIO = -uv.UV_EREMOTEIO; +pub const UV_ENOTTY = -uv.UV_ENOTTY; +pub const UV_EFTYPE = -uv.UV_EFTYPE; +pub const UV_EILSEQ = -uv.UV_EILSEQ; +pub const UV_EOVERFLOW = -uv.UV_EOVERFLOW; +pub const UV_ESOCKTNOSUPPORT = -uv.UV_ESOCKTNOSUPPORT; +pub const UV_ENODATA = -uv.UV_ENODATA; +pub const UV_EUNATCH = -uv.UV_EUNATCH; + pub const off_t = i64; pub fn preallocate_file(_: posix.fd_t, _: off_t, _: off_t) !void {} diff --git a/src/work_pool.zig b/src/work_pool.zig index 5cbad488ff..b9e1bd1573 100644 --- a/src/work_pool.zig +++ b/src/work_pool.zig @@ -14,13 +14,23 @@ pub fn NewWorkPool(comptime max_threads: ?usize) type { @setCold(true); pool = ThreadPool.init(.{ - .max_threads = max_threads orelse @max(@as(u32, @truncate(std.Thread.getCpuCount() catch 0)), 2), + .max_threads = max_threads orelse @max(2, max_threads: { + if (bun.getenvZ("GOMAXPROCS")) |max_procs| try_override: { + break :max_threads std.fmt.parseInt(u32, max_procs, 10) catch + break :try_override; + } + + break :max_threads @as(u32, @truncate(std.Thread.getCpuCount() catch 0)); + }), .stack_size = ThreadPool.default_thread_stack_size, }); return &pool; } + + /// Initialization of WorkPool is not thread-safe, as it is + /// assumed a single main thread sets everything up. Calling + /// this afterwards is thread-safe. pub inline fn get() *ThreadPool { - // lil racy if (loaded) return &pool; loaded = true; diff --git a/src/zlib.zig b/src/zlib.zig index 7ab99ee6df..b337c9b507 100644 --- a/src/zlib.zig +++ b/src/zlib.zig @@ -291,7 +291,7 @@ pub const ZlibError = error{ const ZlibAllocator = struct { pub fn alloc(_: *anyopaque, items: uInt, len: uInt) callconv(.C) *anyopaque { if (bun.heap_breakdown.enabled) { - const zone = bun.heap_breakdown.getZone(ZlibAllocator); + const zone = bun.heap_breakdown.getZone("zlib"); return zone.malloc_zone_calloc(items, len) orelse bun.outOfMemory(); } @@ -300,7 +300,7 @@ const ZlibAllocator = struct { pub fn free(_: *anyopaque, data: *anyopaque) callconv(.C) void { if (bun.heap_breakdown.enabled) { - const zone = bun.heap_breakdown.getZone(ZlibAllocator); + const zone = bun.heap_breakdown.getZone("zlib"); zone.malloc_zone_free(data); return; } diff --git a/test/bun.lockb b/test/bun.lockb index b3cdc08715..9508f9459a 100755 Binary files a/test/bun.lockb and b/test/bun.lockb differ diff --git a/test/bundler/__snapshots__/bun-build-api.test.ts.snap b/test/bundler/__snapshots__/bun-build-api.test.ts.snap index 32cf05e8c8..e17a2204bc 100644 --- a/test/bundler/__snapshots__/bun-build-api.test.ts.snap +++ b/test/bundler/__snapshots__/bun-build-api.test.ts.snap @@ -11,7 +11,6 @@ var __export = (target, all) => { set: (newValue) => all[name] = () => newValue }); }; -var __esm = (fn, res) => () => (fn && (res = fn(fn = 0)), res); // test/bundler/fixtures/trivial/fn.js var exports_fn = {}; @@ -21,11 +20,9 @@ __export(exports_fn, { function fn(a) { return a + 42; } -var init_fn = __esm(() => { -}); // test/bundler/fixtures/trivial/index.js -var NS = Promise.resolve().then(() => (init_fn(), exports_fn)); +var NS = Promise.resolve().then(() => exports_fn); NS.then(({ fn: fn2 }) => { console.log(fn2(42)); }); @@ -43,7 +40,6 @@ var __export = (target, all) => { set: (newValue) => all[name] = () => newValue }); }; -var __esm = (fn, res) => () => (fn && (res = fn(fn = 0)), res); // test/bundler/fixtures/trivial/fn.js var exports_fn = {}; @@ -53,22 +49,20 @@ __export(exports_fn, { function fn(a) { return a + 42; } -var init_fn = __esm(() => { -}); // test/bundler/fixtures/trivial/index.js -var NS = Promise.resolve().then(() => (init_fn(), exports_fn)); +var NS = Promise.resolve().then(() => exports_fn); NS.then(({ fn: fn2 }) => { console.log(fn2(42)); }); " `; -exports[`Bun.build BuildArtifact properties: hash 1`] = `"cv02d0ez"`; +exports[`Bun.build BuildArtifact properties: hash 1`] = `"r6c8x1cc"`; -exports[`Bun.build BuildArtifact properties + entry.naming: hash 1`] = `"3v155a0d"`; +exports[`Bun.build BuildArtifact properties + entry.naming: hash 1`] = `"vanwb97w"`; -exports[`Bun.build BuildArtifact properties sourcemap: hash index.js 1`] = `"cv02d0ez"`; +exports[`Bun.build BuildArtifact properties sourcemap: hash index.js 1`] = `"r6c8x1cc"`; exports[`Bun.build BuildArtifact properties sourcemap: hash index.js.map 1`] = `"00000000"`; @@ -83,7 +77,6 @@ var __export = (target, all) => { set: (newValue) => all[name] = () => newValue }); }; -var __esm = (fn, res) => () => (fn && (res = fn(fn = 0)), res); // test/bundler/fixtures/trivial/fn.js var exports_fn = {}; @@ -93,11 +86,9 @@ __export(exports_fn, { function fn(a) { return a + 42; } -var init_fn = __esm(() => { -}); // test/bundler/fixtures/trivial/index.js -var NS = Promise.resolve().then(() => (init_fn(), exports_fn)); +var NS = Promise.resolve().then(() => exports_fn); NS.then(({ fn: fn2 }) => { console.log(fn2(42)); }); diff --git a/test/bundler/bun-build-api.test.ts b/test/bundler/bun-build-api.test.ts index 5f5e89634b..586361a9bf 100644 --- a/test/bundler/bun-build-api.test.ts +++ b/test/bundler/bun-build-api.test.ts @@ -14,6 +14,43 @@ describe("Bun.build", () => { throw new Error("should have thrown"); }); + // https://github.com/oven-sh/bun/issues/12818 + test("sourcemap + build error crash case", async () => { + const dir = tempDirWithFiles("build", { + "/src/file1.ts": ` + import { A } from './dir'; + console.log(A); + `, + "/src/dir/index.ts": ` + import { B } from "./file3"; + export const A = [B] + `, + "/src/dir/file3.ts": ` + import { C } from "../file1"; // error + export const B = C; + `, + "/src/package.json": ` + { "type": "module" } + `, + "/src/tsconfig.json": ` + { + "extends": "../tsconfig.json", + "compilerOptions": { + "target": "ESNext", + "module": "ESNext", + "types": [] + } + } + `, + }); + const y = await Bun.build({ + entrypoints: [join(dir, "src/file1.ts")], + outdir: join(dir, "out"), + sourcemap: "external", + external: ["@minecraft"], + }); + }); + test("invalid options throws", async () => { expect(() => Bun.build({} as any)).toThrow(); expect(() => diff --git a/test/bundler/bundler_cjs2esm.test.ts b/test/bundler/bundler_cjs2esm.test.ts index 9cfce2d77b..6aff61dfbc 100644 --- a/test/bundler/bundler_cjs2esm.test.ts +++ b/test/bundler/bundler_cjs2esm.test.ts @@ -238,19 +238,19 @@ describe("bundler", () => { "/entry.js": /* js */ ` const react = require("react"); console.log(react.react); - + const react1 = (console.log(require("react").react), require("react")); console.log(react1.react); - + const react2 = (require("react"), console.log(require("react").react)); console.log(react2); - + let x = {}; x.react = require("react"); console.log(x.react.react); - + console.log(require("react").react); - + let y = {}; y[require("react")] = require("react"); console.log(y[require("react")].react); @@ -284,4 +284,27 @@ describe("bundler", () => { stdout: "react\nreact\nreact\nreact\nundefined\nreact\nreact\nreact\nreact\nreact\nreact\n1 react\nreact\nreact", }, }); + itBundled("cjs2esm/ReactSpecificUnwrapping", { + files: { + "/entry.js": /* js */ ` + import { renderToReadableStream } from "react"; + console.log(renderToReadableStream()); + `, + "/node_modules/react/index.js": /* js */ ` + console.log('side effect'); + module.exports = require('./main'); + `, + "/node_modules/react/main.js": /* js */ ` + "use strict"; + var REACT_ELEMENT_TYPE = Symbol.for("pass"); + exports.renderToReadableStream = (e, t) => { + return REACT_ELEMENT_TYPE; + } + `, + }, + run: { + stdout: "side effect\nSymbol(pass)", + }, + minifySyntax: true, + }); }); diff --git a/test/bundler/bundler_edgecase.test.ts b/test/bundler/bundler_edgecase.test.ts index 373b382c15..6bc239e3cd 100644 --- a/test/bundler/bundler_edgecase.test.ts +++ b/test/bundler/bundler_edgecase.test.ts @@ -668,6 +668,29 @@ describe("bundler", () => { "": ['ModuleNotFound resolving "/entry.js" (entry point)'], }, }); + itBundled("edgecase/AssetEntryPoint", { + files: { + "/entry.zig": ` + const std = @import("std"); + + pub fn main() void { + std.debug.print("Hello, world!\\n", .{}); + } + `, + }, + outdir: "/out", + entryPointsRaw: ["./entry.zig"], + runtimeFiles: { + "/exec.js": ` + import assert from 'node:assert'; + import the_path from './out/entry.js'; + assert.strictEqual(the_path, './entry-6dhkdck1.zig'); + `, + }, + run: { + file: "./exec.js", + }, + }); itBundled("edgecase/ExportDefaultUndefined", { files: { "/entry.ts": /* ts */ ` @@ -1199,6 +1222,19 @@ describe("bundler", () => { stdout: "false", }, }); + itBundled("edgecase/ImportOptionsArgument", { + files: { + "/entry.js": ` + import('ext', { with: { get ''() { KEEP } } }) + .then(function (error) { + console.log(error); + }); + `, + }, + dce: true, + external: ["ext"], + target: "bun", + }); itBundled("edgecase/ConstantFoldingShiftOperations", { files: { "/entry.ts": ` @@ -1334,6 +1370,27 @@ describe("bundler", () => { `, }, }); + itBundled("edgecase/EntrypointWithoutPrefixSlashOrDotIsNotConsideredExternal#12734", { + files: { + "/src/entry.ts": /* ts */ ` + import { helloWorld } from "./second.ts"; + console.log(helloWorld); + `, + "/src/second.ts": /* ts */ ` + export const helloWorld = "Hello World"; + `, + }, + root: "/src", + entryPointsRaw: ["src/entry.ts"], + packages: "external", + target: "bun", + run: { + file: "/src/entry.ts", + stdout: ` + Hello World + `, + }, + }); itBundled("edgecase/IntegerUnderflow#12547", { files: { "/entry.js": ` @@ -1354,6 +1411,362 @@ describe("bundler", () => { api.expectFile("/out.js").toBe(`import{a as c}from"external";\n`); }, }); + itBundled("edgecase/TypeScriptNamespaceSiblingFunction", { + files: { + "/entry.ts": ` + namespace X { + export function Y() { + return 2; + } + export namespace Y { + export const Z = 1; + } + } + console.log(X, X.Y(), X.Y.Z); + `, + }, + run: { + stdout: "{\n Y: [Function: Y],\n} 2 1", + }, + }); + itBundled("edgecase/TypeScriptNamespaceSiblingClass", { + files: { + "/entry.ts": ` + namespace X { + export class Y { + constructor(v) { + this.value = v; + } + + toJSON() { + return this.value; + } + } + export namespace Y { + export const Z = 1; + } + } + console.log(X, new X.Y(2).toJSON(), X.Y.Z); + `, + }, + run: { + stdout: "{\n Y: [class Y],\n} 2 1", + }, + }); + itBundled("edgecase/TypeScriptNamespaceSiblingEnum", { + files: { + "/entry.ts": ` + namespace X { + export enum Y { + A, + B, + } + export namespace Y { + export const Z = 1; + } + } + console.log(JSON.stringify([X, X.Y.A, X.Y.Z])); + `, + }, + run: { + stdout: '[{"Y":{"0":"A","1":"B","A":0,"B":1,"Z":1}},0,1]', + }, + }); + itBundled("edgecase/TypeScriptNamespaceSiblingVariable", { + files: { + "/entry.ts": ` + namespace X { + export let Y = {}; + export namespace Y { + export const Z = 1; + } + } + `, + }, + bundleErrors: { + "/entry.ts": [`"Y" has already been declared`], + }, + }); + // This specifically only happens with 'export { ... } from ...' syntax + itBundled("edgecase/EsmSideEffectsFalseWithSideEffectsExportFrom", { + files: { + "/file1.js": ` + import("./file2.js"); + `, + "/file2.js": ` + export { a } from './file3.js'; + `, + "/file3.js": ` + export function a(input) { + return 42; + } + console.log('side effect'); + `, + "/package.json": ` + { + "name": "my-package", + "sideEffects": false + } + `, + }, + run: { + stdout: "side effect", + }, + }); + itBundled("edgecase/EsmSideEffectsFalseWithSideEffectsExportFromCodeSplitting", { + files: { + "/file1.js": ` + import("./file2.js"); + console.log('file1'); + `, + "/file1b.js": ` + import("./file2.js"); + console.log('file2'); + `, + "/file2.js": ` + export { a } from './file3.js'; + `, + "/file3.js": ` + export function a(input) { + return 42; + } + console.log('side effect'); + `, + "/package.json": ` + { + "name": "my-package", + "sideEffects": false + } + `, + }, + splitting: true, + outdir: "out", + entryPoints: ["./file1.js", "./file1b.js"], + run: [ + { + file: "/out/file1.js", + stdout: "file1\nside effect", + }, + { + file: "/out/file1b.js", + stdout: "file2\nside effect", + }, + ], + }); + itBundled("edgecase/RequireSideEffectsFalseWithSideEffectsExportFrom", { + files: { + "/file1.js": ` + require("./file2.js"); + `, + "/file2.js": ` + export { a } from './file3.js'; + `, + "/file3.js": ` + export function a(input) { + return 42; + } + console.log('side effect'); + `, + "/package.json": ` + { + "name": "my-package", + "sideEffects": false + } + `, + }, + run: { + stdout: "side effect", + }, + }); + itBundled("edgecase/SideEffectsFalseWithSideEffectsExportFrom", { + files: { + "/file1.js": ` + import("./file2.js"); + `, + "/file2.js": ` + import * as foo from './file3.js'; + export default foo; + `, + "/file3.js": ` + export function a(input) { + return 42; + } + console.log('side effect'); + `, + "/package.json": ` + { + "name": "my-package", + "sideEffects": false + } + `, + }, + run: { + stdout: "side effect", + }, + }); + itBundled("edgecase/BuiltinWithTrailingSlash", { + files: { + "/entry.js": ` + import * as process from 'process/'; + console.log(JSON.stringify(process)); + `, + "/node_modules/process/index.js": ` + export default { hello: 'world' }; + `, + }, + run: { + stdout: `{"default":{"hello":"world"}}`, + }, + }); + itBundled("edgecase/EsmWrapperClassHoisting", { + files: { + "/entry.ts": ` + async function hi() { + const { default: MyInherited } = await import('./hello'); + const myInstance = new MyInherited(); + console.log(myInstance.greet()) + } + + hi(); + `, + "/hello.ts": ` + const MyReassignedSuper = class MySuper { + greet() { + return 'Hello, world!'; + } + }; + + class MyInherited extends MyReassignedSuper {}; + + export default MyInherited; + `, + }, + run: { + stdout: "Hello, world!", + }, + }); + itBundled("edgecase/EsmWrapperElimination1", { + files: { + "/entry.ts": ` + async function load() { + return import('./hello'); + } + load().then(({ default: def }) => console.log(def())); + `, + "/hello.ts": ` + export var x = 123; + export var y = function() { return x; }; + export function z() { return y(); } + function a() { return z(); } + export default function c() { return a(); } + `, + }, + run: { + stdout: "123", + }, + }); + itBundled("edgecase/TsEnumTreeShakingUseAndInlineClass", { + files: { + "/entry.ts": ` + import { TestEnum } from './enum'; + + class TestClass { + constructor() { + console.log(JSON.stringify(TestEnum)); + } + + testMethod(name: TestEnum) { + return name === TestEnum.A; + } + } + + // This must use wrapper class + console.log(new TestClass()); + // This must inline + console.log(TestClass.prototype.testMethod.toString().includes('TestEnum')); + `, + "/enum.ts": ` + export enum TestEnum { + A, + B, + } + `, + }, + dce: true, + run: { + stdout: ` + {"0":"A","1":"B","A":0,"B":1} + TestClass { + testMethod: [Function: testMethod], + } + false + `, + }, + }); + // this test checks that visit order doesnt matter (inline then use, above is use then inline) + itBundled("edgecase/TsEnumTreeShakingUseAndInlineClass2", { + files: { + "/entry.ts": ` + import { TestEnum } from './enum'; + + class TestClass { + testMethod(name: TestEnum) { + return name === TestEnum.A; + } + + constructor() { + console.log(JSON.stringify(TestEnum)); + } + } + + // This must use wrapper class + console.log(new TestClass()); + // This must inline + console.log(TestClass.prototype.testMethod.toString().includes('TestEnum')); + `, + "/enum.ts": ` + export enum TestEnum { + A, + B, + } + `, + }, + dce: true, + run: { + stdout: ` + {"0":"A","1":"B","A":0,"B":1} + TestClass { + testMethod: [Function: testMethod], + } + false + `, + }, + }); + itBundled("edgecase/TsEnumTreeShakingUseAndInlineNamespace", { + files: { + "/entry.ts": ` + import { TestEnum } from './enum'; + + namespace TestClass { + console.log(JSON.stringify(TestEnum)); + console.log((() => TestEnum.A).toString().includes('TestEnum')); + } + `, + "/enum.ts": ` + export enum TestEnum { + A, + B, + } + `, + }, + dce: true, + run: { + stdout: ` + {"0":"A","1":"B","A":0,"B":1} + false + `, + }, + }); // TODO(@paperdave): test every case of this. I had already tested it manually, but it may break later const requireTranspilationListESM = [ diff --git a/test/bundler/expectBundled.ts b/test/bundler/expectBundled.ts index ec6f4b3e6e..195a710cda 100644 --- a/test/bundler/expectBundled.ts +++ b/test/bundler/expectBundled.ts @@ -1259,7 +1259,7 @@ for (const [key, blob] of build.outputs) { const outfiletext = api.readFile(path.relative(root, outfile ?? outputPaths[0])); const regex = /\/\/\s+(.+?)\nvar\s+([a-zA-Z0-9_$]+)\s+=\s+__commonJS/g; const matches = [...outfiletext.matchAll(regex)].map(match => ("/" + match[1]).replaceAll("\\", "/")); - const expectedMatches = (cjs2esm === true ? [] : cjs2esm.unhandled ?? []).map(a => a.replaceAll("\\", "/")); + const expectedMatches = (cjs2esm === true ? [] : (cjs2esm.unhandled ?? [])).map(a => a.replaceAll("\\", "/")); try { expect(matches.sort()).toEqual(expectedMatches.sort()); } catch (error) { diff --git a/test/transpiler/__snapshots__/transpiler.test.js.snap b/test/bundler/transpiler/__snapshots__/transpiler.test.js.snap similarity index 100% rename from test/transpiler/__snapshots__/transpiler.test.js.snap rename to test/bundler/transpiler/__snapshots__/transpiler.test.js.snap diff --git a/test/transpiler/async-transpiler-entry.js b/test/bundler/transpiler/async-transpiler-entry.js similarity index 100% rename from test/transpiler/async-transpiler-entry.js rename to test/bundler/transpiler/async-transpiler-entry.js diff --git a/test/transpiler/async-transpiler-imported.js b/test/bundler/transpiler/async-transpiler-imported.js similarity index 100% rename from test/transpiler/async-transpiler-imported.js rename to test/bundler/transpiler/async-transpiler-imported.js diff --git a/test/transpiler/decorator-export-default-class-fixture-anon.ts b/test/bundler/transpiler/decorator-export-default-class-fixture-anon.ts similarity index 100% rename from test/transpiler/decorator-export-default-class-fixture-anon.ts rename to test/bundler/transpiler/decorator-export-default-class-fixture-anon.ts diff --git a/test/transpiler/decorator-export-default-class-fixture.ts b/test/bundler/transpiler/decorator-export-default-class-fixture.ts similarity index 100% rename from test/transpiler/decorator-export-default-class-fixture.ts rename to test/bundler/transpiler/decorator-export-default-class-fixture.ts diff --git a/test/transpiler/decorator-metadata.test.ts b/test/bundler/transpiler/decorator-metadata.test.ts similarity index 100% rename from test/transpiler/decorator-metadata.test.ts rename to test/bundler/transpiler/decorator-metadata.test.ts diff --git a/test/transpiler/decorators.test.ts b/test/bundler/transpiler/decorators.test.ts similarity index 100% rename from test/transpiler/decorators.test.ts rename to test/bundler/transpiler/decorators.test.ts diff --git a/test/transpiler/export-default-with-static-initializer.js b/test/bundler/transpiler/export-default-with-static-initializer.js similarity index 100% rename from test/transpiler/export-default-with-static-initializer.js rename to test/bundler/transpiler/export-default-with-static-initializer.js diff --git a/test/transpiler/export-default.test.js b/test/bundler/transpiler/export-default.test.js similarity index 100% rename from test/transpiler/export-default.test.js rename to test/bundler/transpiler/export-default.test.js diff --git a/test/transpiler/handlebars.hbs b/test/bundler/transpiler/handlebars.hbs similarity index 100% rename from test/transpiler/handlebars.hbs rename to test/bundler/transpiler/handlebars.hbs diff --git a/test/transpiler/inline.macro.js b/test/bundler/transpiler/inline.macro.js similarity index 100% rename from test/transpiler/inline.macro.js rename to test/bundler/transpiler/inline.macro.js diff --git a/test/transpiler/macro-check.js b/test/bundler/transpiler/macro-check.js similarity index 100% rename from test/transpiler/macro-check.js rename to test/bundler/transpiler/macro-check.js diff --git a/test/transpiler/macro-test.test.ts b/test/bundler/transpiler/macro-test.test.ts similarity index 100% rename from test/transpiler/macro-test.test.ts rename to test/bundler/transpiler/macro-test.test.ts diff --git a/test/transpiler/macro.ts b/test/bundler/transpiler/macro.ts similarity index 100% rename from test/transpiler/macro.ts rename to test/bundler/transpiler/macro.ts diff --git a/test/transpiler/property-non-ascii-fixture.js b/test/bundler/transpiler/property-non-ascii-fixture.js similarity index 100% rename from test/transpiler/property-non-ascii-fixture.js rename to test/bundler/transpiler/property-non-ascii-fixture.js diff --git a/test/transpiler/property.test.ts b/test/bundler/transpiler/property.test.ts similarity index 100% rename from test/transpiler/property.test.ts rename to test/bundler/transpiler/property.test.ts diff --git a/test/transpiler/runtime-transpiler-fixture-duplicate-keys.json b/test/bundler/transpiler/runtime-transpiler-fixture-duplicate-keys.json similarity index 100% rename from test/transpiler/runtime-transpiler-fixture-duplicate-keys.json rename to test/bundler/transpiler/runtime-transpiler-fixture-duplicate-keys.json diff --git a/test/transpiler/runtime-transpiler-json-fixture.json b/test/bundler/transpiler/runtime-transpiler-json-fixture.json similarity index 100% rename from test/transpiler/runtime-transpiler-json-fixture.json rename to test/bundler/transpiler/runtime-transpiler-json-fixture.json diff --git a/test/transpiler/runtime-transpiler.test.ts b/test/bundler/transpiler/runtime-transpiler.test.ts similarity index 100% rename from test/transpiler/runtime-transpiler.test.ts rename to test/bundler/transpiler/runtime-transpiler.test.ts diff --git a/test/transpiler/template-literal-fixture-test.js b/test/bundler/transpiler/template-literal-fixture-test.js similarity index 100% rename from test/transpiler/template-literal-fixture-test.js rename to test/bundler/transpiler/template-literal-fixture-test.js diff --git a/test/transpiler/template-literal.test.ts b/test/bundler/transpiler/template-literal.test.ts similarity index 100% rename from test/transpiler/template-literal.test.ts rename to test/bundler/transpiler/template-literal.test.ts diff --git a/test/transpiler/transpiler-stack-overflow.test.ts b/test/bundler/transpiler/transpiler-stack-overflow.test.ts similarity index 100% rename from test/transpiler/transpiler-stack-overflow.test.ts rename to test/bundler/transpiler/transpiler-stack-overflow.test.ts diff --git a/test/transpiler/transpiler.test.js b/test/bundler/transpiler/transpiler.test.js similarity index 99% rename from test/transpiler/transpiler.test.js rename to test/bundler/transpiler/transpiler.test.js index 357925e6ae..581591fb3e 100644 --- a/test/transpiler/transpiler.test.js +++ b/test/bundler/transpiler/transpiler.test.js @@ -617,7 +617,7 @@ describe("Bun.Transpiler", () => { exp("class Foo {}", "class Foo {\n}"); exp("Foo = class {}", "Foo = class {\n}"); exp("Foo = class Bar {}", "Foo = class Bar {\n}"); - exp("function foo() {}", "let foo = function() {\n}"); + exp("function foo() {}", "function foo() {\n}"); exp("foo = function () {}", "foo = function() {\n}"); exp("foo = function bar() {}", "foo = function bar() {\n}"); exp("class Foo { bar() {} }", "class Foo {\n bar() {\n }\n}"); diff --git a/test/transpiler/tsconfig.is-just-a-number.json b/test/bundler/transpiler/tsconfig.is-just-a-number.json similarity index 100% rename from test/transpiler/tsconfig.is-just-a-number.json rename to test/bundler/transpiler/tsconfig.is-just-a-number.json diff --git a/test/transpiler/tsconfig.with-commas.json b/test/bundler/transpiler/tsconfig.with-commas.json similarity index 100% rename from test/transpiler/tsconfig.with-commas.json rename to test/bundler/transpiler/tsconfig.with-commas.json diff --git a/test/transpiler/with-statement-works.js b/test/bundler/transpiler/with-statement-works.js similarity index 100% rename from test/transpiler/with-statement-works.js rename to test/bundler/transpiler/with-statement-works.js diff --git a/test/cli/install/bun-install-retry.test.ts b/test/cli/install/bun-install-retry.test.ts new file mode 100644 index 0000000000..87735923fd --- /dev/null +++ b/test/cli/install/bun-install-retry.test.ts @@ -0,0 +1,111 @@ +import { file, spawn } from "bun"; +import { afterAll, afterEach, beforeAll, beforeEach, expect, it, setDefaultTimeout } from "bun:test"; +import { bunExe, bunEnv as env, toHaveBins, toBeValidBin, toBeWorkspaceLink, tmpdirSync } from "harness"; +import { access, mkdir, readlink, rm, writeFile, copyFile, appendFile, readFile } from "fs/promises"; +import { join, relative } from "path"; +import { + dummyAfterAll, + dummyAfterEach, + dummyBeforeAll, + dummyBeforeEach, + dummyRegistry, + package_dir, + readdirSorted, + requested, + root_url, + setHandler, +} from "./dummy.registry"; + +beforeAll(dummyBeforeAll); +afterAll(dummyAfterAll); + +expect.extend({ + toHaveBins, + toBeValidBin, + toBeWorkspaceLink, +}); + +let port: string; +let add_dir: string; +beforeAll(() => { + setDefaultTimeout(1000 * 60 * 5); + port = new URL(root_url).port; +}); + +beforeEach(async () => { + add_dir = tmpdirSync(); + await dummyBeforeEach(); +}); +afterEach(async () => { + await dummyAfterEach(); +}); + +it("retries on 500", async () => { + const urls: string[] = []; + setHandler(dummyRegistry(urls, undefined, 4)); + await writeFile( + join(package_dir, "package.json"), + JSON.stringify({ + name: "foo", + version: "0.0.1", + }), + ); + const { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "add", "BaR"], + cwd: package_dir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env, + }); + const err = await new Response(stderr).text(); + expect(err).not.toContain("error:"); + expect(err).toContain("Saved lockfile"); + const out = await new Response(stdout).text(); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + "", + "installed BaR@0.0.2", + "", + "1 package installed", + ]); + expect(await exited).toBe(0); + expect(urls.sort()).toEqual([ + `${root_url}/BaR`, + `${root_url}/BaR`, + `${root_url}/BaR`, + `${root_url}/BaR`, + `${root_url}/BaR`, + `${root_url}/BaR`, + `${root_url}/BaR-0.0.2.tgz`, + `${root_url}/BaR-0.0.2.tgz`, + `${root_url}/BaR-0.0.2.tgz`, + `${root_url}/BaR-0.0.2.tgz`, + `${root_url}/BaR-0.0.2.tgz`, + `${root_url}/BaR-0.0.2.tgz`, + ]); + expect(requested).toBe(12); + await Promise.all([ + (async () => expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([".cache", "BaR"]))(), + (async () => expect(await readdirSorted(join(package_dir, "node_modules", "BaR"))).toEqual(["package.json"]))(), + (async () => + expect(await file(join(package_dir, "node_modules", "BaR", "package.json")).json()).toEqual({ + name: "bar", + version: "0.0.2", + }))(), + (async () => + expect(await file(join(package_dir, "package.json")).text()).toEqual( + JSON.stringify( + { + name: "foo", + version: "0.0.1", + dependencies: { + BaR: "^0.0.2", + }, + }, + null, + 2, + ), + ))(), + async () => await access(join(package_dir, "bun.lockb")), + ]); +}); diff --git a/test/cli/install/bun-install.test.ts b/test/cli/install/bun-install.test.ts index 45c60f1465..b75dcd26e1 100644 --- a/test/cli/install/bun-install.test.ts +++ b/test/cli/install/bun-install.test.ts @@ -435,7 +435,7 @@ it("should handle @scoped authentication", async () => { } expect(await request.text()).toBeEmpty(); urls.push(request.url); - return new Response("Feeling lucky?", { status: 555 }); + return new Response("Feeling lucky?", { status: 422 }); }); // workaround against `writeFile(..., { flag: "a" })` await writeFile( @@ -454,7 +454,7 @@ foo = { token = "bar" } env, }); const err = await new Response(stderr).text(); - expect(err.split(/\r?\n/)).toContain(`error: GET ${url} - 555`); + expect(err.split(/\r?\n/)).toContain(`error: GET ${url} - 422`); expect(await new Response(stdout).text()).toBeEmpty(); expect(await exited).toBe(1); expect(urls.sort()).toEqual([url]); diff --git a/test/cli/install/dummy.registry.ts b/test/cli/install/dummy.registry.ts index 0858835b5d..b559f06345 100644 --- a/test/cli/install/dummy.registry.ts +++ b/test/cli/install/dummy.registry.ts @@ -24,15 +24,28 @@ let server: Server; export let package_dir: string; export let requested: number; export let root_url: string; - -export function dummyRegistry(urls: string[], info: any = { "0.0.2": {} }) { +export function dummyRegistry(urls: string[], info: any = { "0.0.2": {} }, numberOfTimesTo500PerURL = 0) { + let retryCountsByURL = new Map(); const _handler: Handler = async request => { urls.push(request.url); const url = request.url.replaceAll("%2f", "/"); + let status = 200; + + if (numberOfTimesTo500PerURL > 0) { + let currentCount = retryCountsByURL.get(request.url); + if (currentCount === undefined) { + retryCountsByURL.set(request.url, numberOfTimesTo500PerURL); + status = 500; + } else { + retryCountsByURL.set(request.url, currentCount - 1); + status = currentCount > 0 ? 500 : 200; + } + } + expect(request.method).toBe("GET"); if (url.endsWith(".tgz")) { - return new Response(file(join(import.meta.dir, basename(url).toLowerCase()))); + return new Response(file(join(import.meta.dir, basename(url).toLowerCase())), { status }); } expect(request.headers.get("accept")).toBe( "application/vnd.npm.install-v1+json; q=1.0, application/json; q=0.8, */*", @@ -54,6 +67,7 @@ export function dummyRegistry(urls: string[], info: any = { "0.0.2": {} }) { ...info[version], }; } + return new Response( JSON.stringify({ name, @@ -62,6 +76,9 @@ export function dummyRegistry(urls: string[], info: any = { "0.0.2": {} }) { latest: info.latest ?? version, }, }), + { + status: status, + }, ); }; return _handler; diff --git a/test/cli/run/fixture-crash.js b/test/cli/run/fixture-crash.js index 9a56452ac7..c90049da5d 100644 --- a/test/cli/run/fixture-crash.js +++ b/test/cli/run/fixture-crash.js @@ -11,5 +11,5 @@ const approach = process.argv[2]; if (approach in crash_handler) { crash_handler[approach](); } else { - console.error("usage: bun fixture-crash.js "); + console.error("usage: bun fixture-crash.js "); } diff --git a/test/cli/run/run-crash-handler.test.ts b/test/cli/run/run-crash-handler.test.ts index 0cc84f40c7..4923c6f06d 100644 --- a/test/cli/run/run-crash-handler.test.ts +++ b/test/cli/run/run-crash-handler.test.ts @@ -11,58 +11,74 @@ test.if(process.platform === "darwin")("macOS has the assumed image offset", () expect(getMachOImageZeroOffset()).toBe(0x100000000); }); +test("raise ignoring panic handler does not trigger the panic handler", async () => { + let sent = false; + + using server = Bun.serve({ + port: 0, + fetch(request, server) { + sent = true; + return new Response("OK"); + }, + }); + + const proc = Bun.spawn({ + cmd: [bunExe(), path.join(import.meta.dir, "fixture-crash.js"), "raiseIgnoringPanicHandler"], + env: mergeWindowEnvs([ + bunEnv, + { + BUN_CRASH_REPORT_URL: server.url.toString(), + BUN_ENABLE_CRASH_REPORTING: "1", + }, + ]), + }); + + expect(proc.exited).resolves.not.toBe(0); + expect(sent).toBe(false); +}); + describe("automatic crash reporter", () => { - const has_reporting = process.platform !== "linux"; + for (const approach of ["panic", "segfault", "outOfMemory"]) { + test(`${approach} should report`, async () => { + let sent = false; - for (const should_report of has_reporting ? [true, false] : [false]) { - for (const approach of ["panic", "segfault"]) { - // TODO: this dependency injection no worky. fix later - test.todo(`${approach} ${should_report ? "should" : "should not"} report`, async () => { - const temp = tempDirWithFiles("crash-handler-path", { - "curl": ({ root }) => `#!/usr/bin/env bash -echo $@ > ${root}/request.out -`, - "powershell.cmd": ({ root }) => `echo true > ${root}\\request.out -`, - }); + // Self host the crash report backend. + using server = Bun.serve({ + port: 0, + fetch(request, server) { + expect(request.url).toEndWith("/ack"); + sent = true; + return new Response("OK"); + }, + }); - const env: any = mergeWindowEnvs([ + const proc = Bun.spawn({ + cmd: [bunExe(), path.join(import.meta.dir, "fixture-crash.js"), approach], + env: mergeWindowEnvs([ + bunEnv, { - ...bunEnv, + BUN_CRASH_REPORT_URL: server.url.toString(), + BUN_ENABLE_CRASH_REPORTING: "1", GITHUB_ACTIONS: undefined, CI: undefined, }, - { - PATH: temp + path.delimiter + process.env.PATH, - }, - ]); - - if (!should_report) { - env.DO_NOT_TRACK = "1"; - } - - const result = Bun.spawnSync( - [ - bunExe(), - path.join(import.meta.dir, "fixture-crash.js"), - approach, - "--debug-crash-handler-use-trace-string", - ], - { env }, - ); - - console.log(result.stderr.toString("utf-8")); - try { - expect(result.stderr.toString("utf-8")).toInclude("https://bun.report/"); - } catch (e) { - throw e; - } - - await Bun.sleep(1000); - - const did_report = existsSync(path.join(temp, "request.out")); - expect(did_report).toBe(should_report); + ]), + stdio: ["ignore", "pipe", "pipe"], }); - } + const exitCode = await proc.exited; + const stderr = await Bun.readableStreamToText(proc.stderr); + + console.log(stderr); + + expect(exitCode).not.toBe(0); + expect(stderr).toContain(server.url.toString()); + if (approach !== "outOfMemory") { + expect(stderr).toContain("oh no: Bun has crashed. This indicates a bug in Bun, not your code"); + } else { + expect(stderr.toLowerCase()).toContain("out of memory"); + expect(stderr.toLowerCase()).not.toContain("panic"); + } + expect(sent).toBe(true); + }); } }); diff --git a/test/cli/test/bun-test.test.ts b/test/cli/test/bun-test.test.ts index 784bbfbd50..908da6f667 100644 --- a/test/cli/test/bun-test.test.ts +++ b/test/cli/test/bun-test.test.ts @@ -891,7 +891,7 @@ function createTest(input?: string | (string | { filename: string; contents: str const inputs = Array.isArray(input) ? input : [input ?? ""]; for (const input of inputs) { const contents = typeof input === "string" ? input : input.contents; - const name = typeof input === "string" ? filename ?? `bun-test-${Math.random()}.test.ts` : input.filename; + const name = typeof input === "string" ? (filename ?? `bun-test-${Math.random()}.test.ts`) : input.filename; const path = join(cwd, name); try { diff --git a/test/cli/test/coverage.test.ts b/test/cli/test/coverage.test.ts index d5bb7bdb7a..c598cc7269 100644 --- a/test/cli/test/coverage.test.ts +++ b/test/cli/test/coverage.test.ts @@ -54,3 +54,26 @@ export class Y { expect(result.signalCode).toBeUndefined(); expect(readFileSync(path.join(dir, "coverage", "lcov.info"), "utf-8")).toMatchSnapshot(); }); + +test("coverage excludes node_modules directory", () => { + const dir = tempDirWithFiles("cov", { + "node_modules/pi/index.js": ` + export const pi = 3.14; + `, + "demo.test.ts": ` + import { pi } from 'pi'; + console.log(pi); + `, + }); + const result = Bun.spawnSync([bunExe(), "test", "--coverage"], { + cwd: dir, + env: { + ...bunEnv, + }, + stdio: [null, null, "pipe"], + }); + expect(result.stderr.toString("utf-8")).toContain("demo.test.ts"); + expect(result.stderr.toString("utf-8")).not.toContain("node_modules"); + expect(result.exitCode).toBe(0); + expect(result.signalCode).toBeUndefined(); +}); diff --git a/test/harness.ts b/test/harness.ts index 9944544ed3..6710381e75 100644 --- a/test/harness.ts +++ b/test/harness.ts @@ -142,9 +142,11 @@ export function tempDirWithFiles(basename: string, files: DirectoryTree): string const joined = join(base, name); if (name.includes("/")) { const dir = dirname(name); - fs.mkdirSync(join(base, dir), { recursive: true }); + if (dir !== name && dir !== ".") { + fs.mkdirSync(join(base, dir), { recursive: true }); + } } - if (typeof contents === "object" && contents && !Buffer.isBuffer(contents)) { + if (typeof contents === "object" && contents && typeof contents?.byteLength === "undefined") { fs.mkdirSync(joined); makeTree(joined, contents); continue; @@ -357,21 +359,21 @@ expect.extend({ } } }, - toRun(cmds: string[], optionalStdout?: string) { + toRun(cmds: string[], optionalStdout?: string, expectedCode: number = 0) { const result = Bun.spawnSync({ cmd: [bunExe(), ...cmds], env: bunEnv, stdio: ["inherit", "pipe", "inherit"], }); - if (result.exitCode !== 0) { + if (result.exitCode !== expectedCode) { return { pass: false, message: () => `Command ${cmds.join(" ")} failed:` + "\n" + result.stdout.toString("utf-8"), }; } - if (optionalStdout) { + if (optionalStdout != null) { return { pass: result.stdout.toString("utf-8") === optionalStdout, message: () => @@ -384,6 +386,43 @@ expect.extend({ message: () => `Expected ${cmds.join(" ")} to fail`, }; }, + toThrowWithCode(fn: CallableFunction, cls: CallableFunction, code: string) { + try { + fn(); + return { + pass: false, + message: () => `Received function did not throw`, + }; + } catch (e) { + // expect(e).toBeInstanceOf(cls); + if (!(e instanceof cls)) { + return { + pass: false, + message: () => `Expected error to be instanceof ${cls.name}; got ${e.__proto__.constructor.name}`, + }; + } + + // expect(e).toHaveProperty("code"); + if (!("code" in e)) { + return { + pass: false, + message: () => `Expected error to have property 'code'; got ${e}`, + }; + } + + // expect(e.code).toEqual(code); + if (e.code !== code) { + return { + pass: false, + message: () => `Expected error to have code '${code}'; got ${e.code}`, + }; + } + + return { + pass: true, + }; + } + }, }); export function ospath(path: string) { @@ -1030,7 +1069,8 @@ interface BunHarnessTestMatchers { toBeUTF16String(): void; toHaveTestTimedOutAfter(expected: number): void; toBeBinaryType(expected: keyof typeof binaryTypes): void; - toRun(optionalStdout?: string): void; + toRun(optionalStdout?: string, expectedCode?: number): void; + toThrowWithCode(cls: CallableFunction, code: string): void; } declare module "bun:test" { diff --git a/test/integration/next-pages/src/pages/index.tsx b/test/integration/next-pages/src/pages/index.tsx index 109f5e5e27..b87da9145d 100644 --- a/test/integration/next-pages/src/pages/index.tsx +++ b/test/integration/next-pages/src/pages/index.tsx @@ -122,7 +122,7 @@ export async function getStaticProps() { bunVersion: process.env.NODE_ENV === "production" ? "[production needs a constant string]" - : process.versions.bun ?? "not in bun", + : (process.versions.bun ?? "not in bun"), }, }; } diff --git a/test/js/bun/dns/resolve-dns.test.ts b/test/js/bun/dns/resolve-dns.test.ts index b237d43a3f..747a2aa3a2 100644 --- a/test/js/bun/dns/resolve-dns.test.ts +++ b/test/js/bun/dns/resolve-dns.test.ts @@ -107,7 +107,7 @@ describe("dns", () => { test.each(malformedHostnames)("'%s'", hostname => { // @ts-expect-error expect(dns.lookup(hostname, { backend })).rejects.toMatchObject({ - code: "DNS_ENOTFOUND", + code: expect.stringMatching(/^DNS_ENOTFOUND|DNS_ESERVFAIL|DNS_ENOTIMP$/), name: "DNSException", }); }); diff --git a/test/js/bun/http/bun-server.test.ts b/test/js/bun/http/bun-server.test.ts index 4d4cc571ea..30318d15d7 100644 --- a/test/js/bun/http/bun-server.test.ts +++ b/test/js/bun/http/bun-server.test.ts @@ -1,4 +1,4 @@ -import type { ServerWebSocket, Server } from "bun"; +import type { ServerWebSocket, Server, Socket } from "bun"; import { describe, expect, test } from "bun:test"; import { bunExe, bunEnv, rejectUnauthorizedScope } from "harness"; import path from "path"; @@ -543,3 +543,83 @@ test("should be able to async upgrade using custom protocol", async () => { expect(await promise).toBe(true); }); + +test("should be able to abrubtly close a upload request", async () => { + const { promise, resolve } = Promise.withResolvers(); + using server = Bun.serve({ + port: 0, + hostname: "localhost", + maxRequestBodySize: 1024 * 1024 * 1024 * 16, + async fetch(req) { + let total_size = 0; + req.signal.addEventListener("abort", resolve); + + for await (const chunk of req.body as ReadableStream) { + total_size += chunk.length; + if (total_size > 1024 * 1024 * 1024) { + return new Response("too big", { status: 413 }); + } + } + + return new Response("Received " + total_size); + }, + }); + // ~100KB + const chunk = Buffer.alloc(1024 * 100, "a"); + // ~1GB + const MAX_PAYLOAD = 1024 * 1024 * 1024; + const request = Buffer.from( + `POST / HTTP/1.1\r\nHost: ${server.hostname}:${server.port}\r\nContent-Length: ${MAX_PAYLOAD}\r\n\r\n`, + ); + + type SocketInfo = { state: number; pending: Buffer | null }; + function tryWritePending(socket: Socket) { + if (socket.data.pending === null) { + // first write + socket.data.pending = request; + } + const data = socket.data.pending as Buffer; + const written = socket.write(data); + if (written < data.byteLength) { + // partial write + socket.data.pending = data.slice(0, written); + return false; + } + + // full write got to next state + if (socket.data.state === 0) { + // request sent -> send chunk + socket.data.pending = chunk; + } else { + // chunk sent -> delay shutdown + setTimeout(() => socket.shutdown(), 100); + } + socket.data.state++; + socket.flush(); + return true; + } + + function trySend(socket: Socket) { + while (socket.data.state < 2) { + if (!tryWritePending(socket)) { + return; + } + } + return; + } + await Bun.connect({ + hostname: server.hostname, + port: server.port, + data: { + state: 0, + pending: null, + } as SocketInfo, + socket: { + open: trySend, + drain: trySend, + data(socket, data) {}, + }, + }); + await promise; + expect().pass(); +}); diff --git a/test/js/bun/shell/exec.test.ts b/test/js/bun/shell/exec.test.ts index 2fb108ddcf..cd6b074161 100644 --- a/test/js/bun/shell/exec.test.ts +++ b/test/js/bun/shell/exec.test.ts @@ -71,6 +71,13 @@ describe("bun exec", () => { } }); + TestBuilder.command`${BUN} exec cd` + .env(bunEnv) + .exitCode(0) + .stderr("") + .stdout("") + .runAsTest("cd with no arguments works"); + test("bun works even when not in PATH", async () => { const val = await $`bun exec 'bun'`.env({ ...bunEnv, PATH: "" }).nothrow(); expect(val.stderr.toString()).not.toContain("bun: command not found: bun"); diff --git a/test/js/bun/test/jest-each.test.ts b/test/js/bun/test/jest-each.test.ts index bb8bd54b4c..0bd0239ac7 100644 --- a/test/js/bun/test/jest-each.test.ts +++ b/test/js/bun/test/jest-each.test.ts @@ -53,3 +53,7 @@ describe.each(["some", "cool", "strings"])("works with describe: %s", s => { done(); }); }); + +describe("does not return zero", () => { + expect(it.each([1, 2])("wat", () => {})).toBeUndefined(); +}); diff --git a/test/js/bun/test/snapshot-tests/snapshots/__snapshots__/snapshot.test.ts.snap b/test/js/bun/test/snapshot-tests/snapshots/__snapshots__/snapshot.test.ts.snap index 51953013d8..47112120bc 100644 --- a/test/js/bun/test/snapshot-tests/snapshots/__snapshots__/snapshot.test.ts.snap +++ b/test/js/bun/test/snapshot-tests/snapshots/__snapshots__/snapshot.test.ts.snap @@ -172,7 +172,7 @@ exports[`most types: Float32Array 1`] = `Float32Array []`; exports[`most types: Float64Array 1`] = `Float64Array []`; -exports[`most types: Function 1`] = `[Function]`; +exports[`most types: Function 1`] = `[Function: test1000000]`; exports[`most types: Int8Array 1`] = `Int8Array []`; diff --git a/test/js/bun/test/test-timers.test.ts b/test/js/bun/test/test-timers.test.ts index 963467dee5..b216469c05 100644 --- a/test/js/bun/test/test-timers.test.ts +++ b/test/js/bun/test/test-timers.test.ts @@ -20,7 +20,9 @@ test("we can go back in time", () => { expect(DateBeforeMocked).not.toBe(Date); expect(DateBeforeMocked.now).not.toBe(Date.now); } - + jest.setSystemTime(new Date("2020-01-01T00:00:00.000Z").getTime()); + expect(new Date().toISOString()).toBe("2020-01-01T00:00:00.000Z"); + expect(Date.now()).toBe(1577836800000); jest.useRealTimers(); const now = new Date(); now.setHours(0, 0, 0, 0); diff --git a/test/js/bun/udp/dgram-unref-hang-fixture.ts b/test/js/bun/udp/dgram-unref-hang-fixture.ts new file mode 100644 index 0000000000..0dcb614d24 --- /dev/null +++ b/test/js/bun/udp/dgram-unref-hang-fixture.ts @@ -0,0 +1,6 @@ +import dgram from "node:dgram"; +const socket = dgram.createSocket({ type: "udp4" }); +socket.unref(); +socket.send("test", 1337, "127.0.0.1", (error, bytes) => { + console.log(error, bytes); +}); diff --git a/test/js/bun/udp/dgram.test.ts b/test/js/bun/udp/dgram.test.ts index c6d486c3c6..5a9301322a 100644 --- a/test/js/bun/udp/dgram.test.ts +++ b/test/js/bun/udp/dgram.test.ts @@ -3,6 +3,7 @@ import { describe, test, expect, it } from "bun:test"; import { nodeDataCases } from "./testdata"; import { disableAggressiveGCScope } from "harness"; +import path from "path"; describe("createSocket()", () => { test("connect", done => { @@ -188,3 +189,9 @@ describe("createSocket()", () => { }); } }); + +describe("unref()", () => { + test("call before bind() does not hang", async () => { + expect([path.join(import.meta.dir, "dgram-unref-hang-fixture.ts")]).toRun(); + }); +}); diff --git a/test/js/bun/util/bun-cryptohasher.test.ts b/test/js/bun/util/bun-cryptohasher.test.ts index 554dfef6fe..28af94e035 100644 --- a/test/js/bun/util/bun-cryptohasher.test.ts +++ b/test/js/bun/util/bun-cryptohasher.test.ts @@ -6,7 +6,14 @@ test("Bun.file in CryptoHasher is not supported yet", () => { expect(() => new Bun.CryptoHasher("sha1").update(Bun.file(import.meta.path))).toThrow(); expect(() => new Bun.SHA1().update(Bun.file(import.meta.path))).toThrow(); }); - +test("CryptoHasher update should throw when no parameter/null/undefined is passed", () => { + // @ts-expect-error + expect(() => new Bun.CryptoHasher("sha1").update()).toThrow(); + // @ts-expect-error + expect(() => new Bun.CryptoHasher("sha1").update(undefined)).toThrow(); + // @ts-expect-error + expect(() => new Bun.CryptoHasher("sha1").update(null)).toThrow(); +}); describe("Hash is consistent", () => { const sourceInputs = [ Buffer.from([ diff --git a/test/js/bun/util/exotic-global-mutable-prototype.test.ts b/test/js/bun/util/exotic-global-mutable-prototype.test.ts index b6ed071aa1..f51ad39b51 100644 --- a/test/js/bun/util/exotic-global-mutable-prototype.test.ts +++ b/test/js/bun/util/exotic-global-mutable-prototype.test.ts @@ -5,6 +5,13 @@ import { test, expect } from "bun:test"; // Some libraries like `web-worker` override the prototype on `globalThis` to add extra properties. test("Object.setPrototypeOf works on globalThis", () => { const orig = Object.getPrototypeOf(globalThis); + let parent = orig; + while (parent) { + for (const key in parent) { + console.log(key); + } + parent = Object.getPrototypeOf(parent); + } Object.setPrototypeOf( globalThis, Object.create(null, { diff --git a/test/js/bun/util/inspect-error-leak.test.js b/test/js/bun/util/inspect-error-leak.test.js new file mode 100644 index 0000000000..49df1a3315 --- /dev/null +++ b/test/js/bun/util/inspect-error-leak.test.js @@ -0,0 +1,23 @@ +import { test, expect } from "bun:test"; + +const perBatch = 2000; +const repeat = 50; +test("Printing errors does not leak", () => { + function batch() { + for (let i = 0; i < perBatch; i++) { + Bun.inspect(new Error("leak")); + } + Bun.gc(true); + } + + batch(); + const baseline = Math.floor(process.memoryUsage.rss() / 1024); + for (let i = 0; i < repeat; i++) { + batch(); + } + + const after = Math.floor(process.memoryUsage.rss() / 1024); + const diff = ((after - baseline) / 1024) | 0; + console.log(`RSS increased by ${diff} MB`); + expect(diff, `RSS grew by ${diff} MB after ${perBatch * repeat} iterations`).toBeLessThan(10); +}, 10_000); diff --git a/test/js/first_party/undici/undici-primordials.test.ts b/test/js/first_party/undici/undici-primordials.test.ts new file mode 100644 index 0000000000..63c57cf086 --- /dev/null +++ b/test/js/first_party/undici/undici-primordials.test.ts @@ -0,0 +1,36 @@ +import { describe, it, expect, beforeAll, afterAll, afterEach } from "bun:test"; +const { Response, Request, Headers, FormData, File, URL, AbortSignal, URLSearchParams } = globalThis; +afterEach(() => { + globalThis.Response = Response; + globalThis.Request = Request; + globalThis.Headers = Headers; + globalThis.FormData = FormData; + globalThis.File = File; + globalThis.URL = URL; + globalThis.AbortSignal = AbortSignal; + globalThis.URLSearchParams = URLSearchParams; +}); + +it("undici", () => { + globalThis.Response = + globalThis.Request = + globalThis.Headers = + globalThis.FormData = + globalThis.File = + globalThis.URL = + globalThis.AbortSignal = + globalThis.URLSearchParams = + 42; + + const undici = require("undici"); + expect(undici).toBeDefined(); + expect(undici.Response).toBe(Response); + expect(undici.Request).toBe(Request); + expect(undici.Headers).toBe(Headers); + expect(undici.FormData).toBe(FormData); + expect(undici.File).toBe(File); + expect(undici.URL).toBe(URL); + expect(undici.URLSearchParams).toBe(URLSearchParams); + + // Note: AbortSignal is not exported. It's just used internally. +}); diff --git a/test/js/node/child_process/child_process.test.ts b/test/js/node/child_process/child_process.test.ts index a124c6f64c..f2ecb17f7a 100644 --- a/test/js/node/child_process/child_process.test.ts +++ b/test/js/node/child_process/child_process.test.ts @@ -223,7 +223,7 @@ describe("spawn()", () => { it("should allow us to spawn in the default shell", async () => { const shellPath: string = await new Promise(resolve => { - const child = spawn("echo", [isWindows ? "$env:SHELL" : "$SHELL"], { shell: true }); + const child = spawn("echo", [isWindows ? "$PSHOME" : "$SHELL"], { shell: true }); child.stdout.on("data", data => { resolve(data.toString().trim()); }); @@ -240,7 +240,7 @@ describe("spawn()", () => { it("should allow us to spawn in a specified shell", async () => { const shell = shellExe(); const shellPath: string = await new Promise(resolve => { - const child = spawn("echo", [isWindows ? "$env:SHELL" : "$SHELL"], { shell }); + const child = spawn("echo", [isWindows ? "$PSHOME" : "$SHELL"], { shell }); child.stdout.on("data", data => { resolve(data.toString().trim()); }); diff --git a/test/js/node/fs/fs-stats-truncate.test.ts b/test/js/node/fs/fs-stats-truncate.test.ts new file mode 100644 index 0000000000..a4cda48275 --- /dev/null +++ b/test/js/node/fs/fs-stats-truncate.test.ts @@ -0,0 +1,43 @@ +// BUN-2C1 +// const value = @field(this, @tagName(field)); +// if (comptime (Big and @typeInfo(@TypeOf(value)) == .Int)) { +// return JSC.JSValue.fromInt64NoTruncate(globalObject, @intCast(value)); +// } +import { Stats, statSync } from "node:fs"; +import { test, expect } from "bun:test"; + +test("fs.stats truncate", async () => { + const stats = new Stats(...Array.from({ length: 14 }, () => Number.MAX_VALUE)); + expect(stats.dev).toBeGreaterThan(0); + expect(stats.mode).toBeGreaterThan(0); + expect(stats.nlink).toBeGreaterThan(0); + expect(stats.uid).toBeGreaterThan(0); + expect(stats.gid).toBeGreaterThan(0); + expect(stats.rdev).toBeGreaterThan(0); + expect(stats.blksize).toBeGreaterThan(0); + expect(stats.ino).toBeGreaterThan(0); + expect(stats.size).toBeGreaterThan(0); + expect(stats.blocks).toBeGreaterThan(0); + expect(stats.atimeMs).toBeGreaterThan(0); + expect(stats.mtimeMs).toBeGreaterThan(0); + expect(stats.ctimeMs).toBeGreaterThan(0); + expect(stats.birthtimeMs).toBeGreaterThan(0); +}); + +test("fs.stats truncate (bigint)", async () => { + const stats = statSync(import.meta.path, { bigint: true }); + expect(stats.dev).toBeTypeOf("bigint"); + expect(stats.mode).toBeTypeOf("bigint"); + expect(stats.nlink).toBeTypeOf("bigint"); + expect(stats.uid).toBeTypeOf("bigint"); + expect(stats.gid).toBeTypeOf("bigint"); + expect(stats.rdev).toBeTypeOf("bigint"); + expect(stats.blksize).toBeTypeOf("bigint"); + expect(stats.ino).toBeTypeOf("bigint"); + expect(stats.size).toBeTypeOf("bigint"); + expect(stats.blocks).toBeTypeOf("bigint"); + expect(stats.atimeMs).toBeTypeOf("bigint"); + expect(stats.mtimeMs).toBeTypeOf("bigint"); + expect(stats.ctimeMs).toBeTypeOf("bigint"); + expect(stats.birthtimeMs).toBeTypeOf("bigint"); +}); diff --git a/test/js/node/http/node-fetch-primordials.test.ts b/test/js/node/http/node-fetch-primordials.test.ts new file mode 100644 index 0000000000..2fdb93eca9 --- /dev/null +++ b/test/js/node/http/node-fetch-primordials.test.ts @@ -0,0 +1,30 @@ +import { afterEach, expect, test } from "bun:test"; + +const originalResponse = globalThis.Response; +const originalRequest = globalThis.Request; +const originalHeaders = globalThis.Headers; +afterEach(() => { + globalThis.Response = originalResponse; + globalThis.Request = originalRequest; + globalThis.Headers = originalHeaders; + globalThis.fetch = Bun.fetch; +}); + +test("fetch, Response, Request can be overriden", async () => { + const { Response, Request } = globalThis; + globalThis.Response = class BadResponse {}; + globalThis.Request = class BadRequest {}; + globalThis.fetch = function badFetch() {}; + + const fetch = require("node-fetch").fetch; + + using server = Bun.serve({ + port: 0, + async fetch(req) { + return new Response("Hello, World!"); + }, + }); + + const response = await fetch(server.url); + expect(response).toBeInstanceOf(Response); +}); diff --git a/test/js/node/http/node-fetch.test.js b/test/js/node/http/node-fetch.test.js index 92a3f12e39..a865d0b157 100644 --- a/test/js/node/http/node-fetch.test.js +++ b/test/js/node/http/node-fetch.test.js @@ -3,7 +3,17 @@ import * as iso from "isomorphic-fetch"; import * as vercelFetch from "@vercel/fetch"; import * as stream from "stream"; -import { test, expect } from "bun:test"; +import { test, expect, beforeAll, afterAll, afterEach } from "bun:test"; + +const originalResponse = globalThis.Response; +const originalRequest = globalThis.Request; +const originalHeaders = globalThis.Headers; +afterEach(() => { + globalThis.Response = originalResponse; + globalThis.Request = originalRequest; + globalThis.Headers = originalHeaders; + globalThis.fetch = Bun.fetch; +}); test("node-fetch", () => { expect(Response.prototype).toBeInstanceOf(globalThis.Response); diff --git a/test/js/node/http/node-http-error-in-data-handler-fixture.1.js b/test/js/node/http/node-http-error-in-data-handler-fixture.1.js new file mode 100644 index 0000000000..b33d56f40f --- /dev/null +++ b/test/js/node/http/node-http-error-in-data-handler-fixture.1.js @@ -0,0 +1,35 @@ +const http = require("http"); +const server = http.createServer((req, res) => { + res.end("Hello World\n"); +}); +const { promise, resolve, reject } = Promise.withResolvers(); +process.exitCode = 1; + +server.listen(0, function () { + const port = server.address().port; + http + .request(`http://localhost:${port}`, res => { + res + .on("data", data => { + // base64 the message to ensure we don't confuse source code with the error message + throw new Error(Buffer.from("VGVzdCBwYXNzZWQ=", "base64")); + }) + .on("end", () => { + server.close(); + }); + }) + .on("error", reject) + .end(); +}); + +server.on("close", () => { + resolve(); +}); +server.on("error", err => { + reject(err); +}); + +process.on("uncaughtException", err => { + console.log(err); + process.exit(0); +}); diff --git a/test/js/node/http/node-http-error-in-data-handler-fixture.2.js b/test/js/node/http/node-http-error-in-data-handler-fixture.2.js new file mode 100644 index 0000000000..7fb81dc9f2 --- /dev/null +++ b/test/js/node/http/node-http-error-in-data-handler-fixture.2.js @@ -0,0 +1,36 @@ +const http = require("http"); +const server = http.createServer(async (req, res) => { + res.end("Hello World\n"); +}); +const { promise, resolve, reject } = Promise.withResolvers(); +process.exitCode = 1; + +server.listen(0, function () { + const port = server.address().port; + http + .request(`http://localhost:${port}`, res => { + res + .on("data", async data => { + await Bun.sleep(1); + // base64 the message to ensure we don't confuse source code with the error message + throw new Error(Buffer.from("VGVzdCBwYXNzZWQ=", "base64")); + }) + .on("end", () => { + server.close(); + }); + }) + .on("error", reject) + .end(); +}); + +server.on("close", () => { + resolve(); +}); +server.on("error", err => { + reject(err); +}); + +process.on("unhandledRejection", err => { + console.log(err); + process.exit(0); +}); diff --git a/test/js/node/http/node-http-primoridals.test.ts b/test/js/node/http/node-http-primoridals.test.ts new file mode 100644 index 0000000000..0076080135 --- /dev/null +++ b/test/js/node/http/node-http-primoridals.test.ts @@ -0,0 +1,130 @@ +import { test, expect, afterEach } from "bun:test"; + +const Response = globalThis.Response; +const Request = globalThis.Request; +const Headers = globalThis.Headers; +const Blob = globalThis.Blob; + +afterEach(() => { + globalThis.Response = Response; + globalThis.Request = Request; + globalThis.Headers = Headers; + globalThis.Blob = Blob; +}); + +// This test passes by not hanging. +test("Overriding Request, Response, Headers, and Blob should not break node:http server", async () => { + const Response = globalThis.Response; + const Request = globalThis.Request; + const Headers = globalThis.Headers; + const Blob = globalThis.Blob; + + globalThis.Response = class MyResponse { + get body() { + throw new Error("body getter should not be called"); + } + + get headers() { + throw new Error("headers getter should not be called"); + } + + get status() { + throw new Error("status getter should not be called"); + } + + get statusText() { + throw new Error("statusText getter should not be called"); + } + + get ok() { + throw new Error("ok getter should not be called"); + } + + get url() { + throw new Error("url getter should not be called"); + } + + get type() { + throw new Error("type getter should not be called"); + } + }; + globalThis.Request = class MyRequest {}; + globalThis.Headers = class MyHeaders { + entries() { + throw new Error("entries should not be called"); + } + + get() { + throw new Error("get should not be called"); + } + + has() { + throw new Error("has should not be called"); + } + + keys() { + throw new Error("keys should not be called"); + } + + values() { + throw new Error("values should not be called"); + } + + forEach() { + throw new Error("forEach should not be called"); + } + + [Symbol.iterator]() { + throw new Error("[Symbol.iterator] should not be called"); + } + + [Symbol.toStringTag]() { + throw new Error("[Symbol.toStringTag] should not be called"); + } + + append() { + throw new Error("append should not be called"); + } + }; + globalThis.Blob = class MyBlob {}; + + const http = require("http"); + const server = http.createServer((req, res) => { + res.end("Hello World\n"); + }); + const { promise, resolve, reject } = Promise.withResolvers(); + + server.listen(0, () => { + const { port } = server.address(); + // client request + const req = http + .request(`http://localhost:${port}`, res => { + res + .on("data", data => { + expect(data.toString()).toBe("Hello World\n"); + }) + .on("end", () => { + server.close(); + console.log("closing time"); + }); + }) + .on("error", reject) + .end(); + }); + + server.on("close", () => { + resolve(); + }); + server.on("error", err => { + reject(err); + }); + + try { + await promise; + } finally { + globalThis.Response = Response; + globalThis.Request = Request; + globalThis.Headers = Headers; + globalThis.Blob = Blob; + } +}); diff --git a/test/js/node/http/node-http.test.ts b/test/js/node/http/node-http.test.ts index d151e47415..883f30ba7b 100644 --- a/test/js/node/http/node-http.test.ts +++ b/test/js/node/http/node-http.test.ts @@ -25,7 +25,7 @@ import { unlinkSync } from "node:fs"; import { PassThrough } from "node:stream"; const { describe, expect, it, beforeAll, afterAll, createDoneDotAll, mock } = createTest(import.meta.path); import { bunExe } from "bun:harness"; -import { bunEnv, disableAggressiveGCScope, tmpdirSync } from "harness"; +import { bunEnv, disableAggressiveGCScope, tmpdirSync, randomPort } from "harness"; import * as stream from "node:stream"; import * as zlib from "node:zlib"; @@ -158,6 +158,28 @@ describe("node:http", () => { server.close(); }); + it("should use the provided port", async () => { + const server = http.createServer(() => {}); + const random_port = randomPort(); + server.listen(random_port); + const { port } = server.address(); + expect(port).toEqual(random_port); + server.close(); + }); + + it("should assign a random port when undefined", async () => { + const server1 = http.createServer(() => {}); + const server2 = http.createServer(() => {}); + server1.listen(undefined); + server2.listen(undefined); + const { port: port1 } = server1.address(); + const { port: port2 } = server2.address(); + expect(port1).not.toEqual(port2); + expect(port1).toBeWithin(1024, 65535); + server1.close(); + server2.close(); + }); + it("option method should be uppercase (#7250)", async () => { try { var server = createServer((req, res) => { @@ -1921,7 +1943,7 @@ it("should emit events in the right order", async () => { it("destroy should end download", async () => { // just simulate some file that will take forever to download - const payload = Buffer.from("X".repeat(16 * 1024)); + const payload = Buffer.from("X".repeat(128 * 1024)); using server = Bun.serve({ port: 0, @@ -1936,24 +1958,33 @@ it("destroy should end download", async () => { }); }, }); - { - let chunks = 0; - const { promise, resolve } = Promise.withResolvers(); + async function run() { + let receivedByteLength = 0; + let { promise, resolve } = Promise.withResolvers(); const req = request(server.url, res => { - res.on("data", () => { - process.nextTick(resolve); - chunks++; + res.on("data", data => { + receivedByteLength += data.length; + if (resolve) { + resolve(); + resolve = null; + } }); }); req.end(); - // wait for the first chunk await promise; - // should stop the download req.destroy(); - await Bun.sleep(200); - expect(chunks).toBeLessThanOrEqual(3); + await Bun.sleep(10); + const initialByteLength = receivedByteLength; + expect(receivedByteLength).toBeLessThanOrEqual(payload.length * 3); + await Bun.sleep(10); + expect(initialByteLength).toBe(receivedByteLength); + await Bun.sleep(10); } + + const runCount = 50; + const runs = Array.from({ length: runCount }, run); + await Promise.all(runs); }); it("can send brotli from Server and receive with fetch", async () => { @@ -2197,3 +2228,27 @@ it("should mark complete true", async () => { server.close(); } }); + +it("should propagate exception in sync data handler", async () => { + const { exitCode, stdout } = Bun.spawnSync({ + cmd: [bunExe(), "run", path.join(import.meta.dir, "node-http-error-in-data-handler-fixture.1.js")], + stdout: "pipe", + stderr: "inherit", + env: bunEnv, + }); + + expect(stdout.toString()).toContain("Test passed"); + expect(exitCode).toBe(0); +}); + +it("should propagate exception in async data handler", async () => { + const { exitCode, stdout } = Bun.spawnSync({ + cmd: [bunExe(), "run", path.join(import.meta.dir, "node-http-error-in-data-handler-fixture.2.js")], + stdout: "pipe", + stderr: "inherit", + env: bunEnv, + }); + + expect(stdout.toString()).toContain("Test passed"); + expect(exitCode).toBe(0); +}); diff --git a/test/js/node/path/basename.test.js b/test/js/node/path/basename.test.js index 7d53a9909c..c94cd91132 100644 --- a/test/js/node/path/basename.test.js +++ b/test/js/node/path/basename.test.js @@ -45,6 +45,7 @@ describe("path.dirname", () => { assert.strictEqual(path.win32.basename("basename.ext\\"), "basename.ext"); assert.strictEqual(path.win32.basename("basename.ext\\\\"), "basename.ext"); assert.strictEqual(path.win32.basename("foo"), "foo"); + assert.strictEqual(path.win32.basename("foo", undefined), "foo"); assert.strictEqual(path.win32.basename("aaa\\bbb", "\\bbb"), "bbb"); assert.strictEqual(path.win32.basename("aaa\\bbb", "a\\bbb"), "bbb"); assert.strictEqual(path.win32.basename("aaa\\bbb", "bbb"), "bbb"); @@ -72,6 +73,7 @@ describe("path.dirname", () => { assert.strictEqual(path.posix.basename("basename.ext\\"), "basename.ext\\"); assert.strictEqual(path.posix.basename("basename.ext\\\\"), "basename.ext\\\\"); assert.strictEqual(path.posix.basename("foo"), "foo"); + assert.strictEqual(path.posix.basename("foo", undefined), "foo"); }); test("posix with control characters", () => { diff --git a/test/js/node/process/process.test.js b/test/js/node/process/process.test.js index cb8b466fb5..825b5bde45 100644 --- a/test/js/node/process/process.test.js +++ b/test/js/node/process/process.test.js @@ -1,9 +1,53 @@ import { spawnSync, which } from "bun"; import { describe, expect, it } from "bun:test"; -import { existsSync, readFileSync } from "fs"; +import { existsSync, readFileSync, writeFileSync } from "fs"; import { bunEnv, bunExe, isWindows, tmpdirSync } from "harness"; import { basename, join, resolve } from "path"; +expect.extend({ + toRunInlineFixture(input) { + const script = input[0]; + const optionalStdout = input[1]; + const expectedCode = input[2]; + const x = tmpdirSync(); + const path = join(x, "index.js"); + writeFileSync(path, script); + + // return expect([path]).toRun(optionalStdout, expectedCode); + const cmds = [path]; + const result = Bun.spawnSync({ + cmd: [bunExe(), ...cmds], + env: bunEnv, + stdio: ["inherit", "pipe", "pipe"], + }); + + if (result.exitCode !== expectedCode) { + return { + pass: false, + message: () => + `Command ${cmds.join(" ")} failed: ${result.exitCode} != ${expectedCode}:` + + "\n" + + result.stdout.toString("utf-8") + + "\n" + + result.stderr.toString("utf-8"), + }; + } + + if (optionalStdout != null) { + return { + pass: result.stdout.toString("utf-8") === optionalStdout, + message: () => + `Expected ${cmds.join(" ")} to output ${optionalStdout} but got ${result.stdout.toString("utf-8")}`, + }; + } + + return { + pass: true, + message: () => `Expected ${cmds.join(" ")} to fail`, + }; + }, +}); + const process_sleep = join(import.meta.dir, "process-sleep.js"); it("process", () => { @@ -622,7 +666,7 @@ it("aborts when the uncaughtException handler throws", async () => { const proc = Bun.spawn([bunExe(), join(import.meta.dir, "process-onUncaughtExceptionAbort.js")], { stderr: "pipe", }); - expect(await proc.exited).toBe(1); + expect(await proc.exited).toBe(7); expect(await new Response(proc.stderr).text()).toContain("bar"); }); @@ -655,3 +699,289 @@ it("process.execArgv", async () => { expect(result, `bun ${cmd}`).toEqual({ execArgv, argv }); } }); + +describe("process.exitCode", () => { + it("normal", () => { + expect([ + ` + process.on("exit", (code) => console.log("exit", code, process.exitCode)); + process.on("beforeExit", (code) => console.log("beforeExit", code, process.exitCode)); + `, + "beforeExit 0 undefined\nexit 0 undefined\n", + 0, + ]).toRunInlineFixture(); + }); + + it("setter", () => { + expect([ + ` + process.on("exit", (code) => console.log("exit", code, process.exitCode)); + process.on("beforeExit", (code) => console.log("beforeExit", code, process.exitCode)); + + process.exitCode = 0; + `, + "beforeExit 0 0\nexit 0 0\n", + 0, + ]).toRunInlineFixture(); + }); + + it("setter non-zero", () => { + expect([ + ` + process.on("exit", (code) => console.log("exit", code, process.exitCode)); + process.on("beforeExit", (code) => console.log("beforeExit", code, process.exitCode)); + + process.exitCode = 3; + `, + "beforeExit 3 3\nexit 3 3\n", + 3, + ]).toRunInlineFixture(); + }); + + it("exit", () => { + expect([ + ` + process.on("exit", (code) => console.log("exit", code, process.exitCode)); + process.on("beforeExit", (code) => console.log("beforeExit", code, process.exitCode)); + + process.exit(0); + `, + "exit 0 0\n", + 0, + ]).toRunInlineFixture(); + }); + + it("exit non-zero", () => { + expect([ + ` + process.on("exit", (code) => console.log("exit", code, process.exitCode)); + process.on("beforeExit", (code) => console.log("beforeExit", code, process.exitCode)); + + process.exit(3); + `, + "exit 3 3\n", + 3, + ]).toRunInlineFixture(); + }); + + it("property access on undefined", () => { + expect([ + ` + process.on("exit", (code) => console.log("exit", code, process.exitCode)); + process.on("beforeExit", (code) => console.log("beforeExit", code, process.exitCode)); + + const x = {}; + x.y.z(); + `, + "exit 1 1\n", + 1, + ]).toRunInlineFixture(); + }); + + it("thrown Error", () => { + expect([ + ` + process.on("exit", (code) => console.log("exit", code, process.exitCode)); + process.on("beforeExit", (code) => console.log("beforeExit", code, process.exitCode)); + + throw new Error("oops"); + `, + "exit 1 1\n", + 1, + ]).toRunInlineFixture(); + }); + + it("unhandled rejected promise", () => { + expect([ + ` + process.on("exit", (code) => console.log("exit", code, process.exitCode)); + process.on("beforeExit", (code) => console.log("beforeExit", code, process.exitCode)); + + await Promise.reject(); + `, + "exit 1 1\n", + 1, + ]).toRunInlineFixture(); + }); + + it("exitsOnExitCodeSet", () => { + expect([ + ` + const assert = require('assert'); + process.exitCode = 42; + process.on('exit', (code) => { + assert.strictEqual(process.exitCode, 42); + assert.strictEqual(code, 42); + }); + `, + "", + 42, + ]).toRunInlineFixture(); + }); + + it("changesCodeViaExit", () => { + expect([ + ` + const assert = require('assert'); + process.exitCode = 99; + process.on('exit', (code) => { + assert.strictEqual(process.exitCode, 42); + assert.strictEqual(code, 42); + }); + process.exit(42); + `, + "", + 42, + ]).toRunInlineFixture(); + }); + + it("changesCodeZeroExit", () => { + expect([ + ` + const assert = require('assert'); + process.exitCode = 99; + process.on('exit', (code) => { + assert.strictEqual(process.exitCode, 0); + assert.strictEqual(code, 0); + }); + process.exit(0); + `, + "", + 0, + ]).toRunInlineFixture(); + }); + + it("exitWithOneOnUncaught", () => { + expect([ + ` + process.exitCode = 99; + process.on('exit', (code) => { + // cannot use assert because it will be uncaughtException -> 1 exit code that will render this test useless + if (code !== 1 || process.exitCode !== 1) { + console.log('wrong code! expected 1 for uncaughtException'); + process.exit(99); + } + }); + throw new Error('ok'); + `, + "", + 1, + ]).toRunInlineFixture(); + }); + + it("changeCodeInsideExit", () => { + expect([ + ` + const assert = require('assert'); + process.exitCode = 95; + process.on('exit', (code) => { + assert.strictEqual(process.exitCode, 95); + assert.strictEqual(code, 95); + process.exitCode = 99; + }); + `, + "", + 99, + ]).toRunInlineFixture(); + }); + + it.todoIf(isWindows)("zeroExitWithUncaughtHandler", () => { + expect([ + ` + process.on('exit', (code) => { + if (code !== 0) { + console.log('wrong code! expected 0; got', code); + process.exit(99); + } + if (process.exitCode !== undefined) { + console.log('wrong exitCode! expected undefined; got', process.exitCode); + process.exit(99); + } + }); + process.on('uncaughtException', () => { }); + throw new Error('ok'); + `, + "", + 0, + ]).toRunInlineFixture(); + }); + + it.todoIf(isWindows)("changeCodeInUncaughtHandler", () => { + expect([ + ` + process.on('exit', (code) => { + if (code !== 97) { + console.log('wrong code! expected 97; got', code); + process.exit(99); + } + if (process.exitCode !== 97) { + console.log('wrong exitCode! expected 97; got', process.exitCode); + process.exit(99); + } + }); + process.on('uncaughtException', () => { + process.exitCode = 97; + }); + throw new Error('ok'); + `, + "", + 97, + ]).toRunInlineFixture(); + }); + + it("changeCodeInExitWithUncaught", () => { + expect([ + ` + const assert = require('assert'); + process.on('exit', (code) => { + assert.strictEqual(process.exitCode, 1); + assert.strictEqual(code, 1); + process.exitCode = 98; + }); + throw new Error('ok'); + `, + "", + 98, + ]).toRunInlineFixture(); + }); + + it("exitWithZeroInExitWithUncaught", () => { + expect([ + ` + const assert = require('assert'); + process.on('exit', (code) => { + assert.strictEqual(process.exitCode, 1); + assert.strictEqual(code, 1); + process.exitCode = 0; + }); + throw new Error('ok'); + `, + "", + 0, + ]).toRunInlineFixture(); + }); + + it("exitWithThrowInUncaughtHandler", () => { + expect([ + ` + process.on('uncaughtException', () => { + throw new Error('ok') + }); + throw new Error('bad'); + `, + "", + 7, + ]).toRunInlineFixture(); + }); + + it.todo("exitWithUndefinedFatalException", () => { + expect([ + ` + process._fatalException = undefined; + throw new Error('ok'); + `, + "", + 6, + ]).toRunInlineFixture(); + }); +}); diff --git a/test/js/node/tls/node-tls-connect.test.ts b/test/js/node/tls/node-tls-connect.test.ts index 68416e10e6..123d2015f3 100644 --- a/test/js/node/tls/node-tls-connect.test.ts +++ b/test/js/node/tls/node-tls-connect.test.ts @@ -150,9 +150,10 @@ it("should have peer certificate", async () => { expect(infoAccess["OCSP - URI"]).toBeDefined(); expect(infoAccess["CA Issuers - URI"]).toBeDefined(); expect(cert.ca).toBeFalse(); - expect(cert.bits).toBe(2048); - expect(typeof cert.modulus).toBe("string"); - expect(typeof cert.exponent).toBe("string"); + expect(cert.bits).toBeInteger(); + // These can change: + // expect(typeof cert.modulus).toBe("string"); + // expect(typeof cert.exponent).toBe("string"); expect(cert.pubkey).toBeInstanceOf(Buffer); expect(typeof cert.valid_from).toBe("string"); expect(typeof cert.valid_to).toBe("string"); diff --git a/test/js/node/util/util.test.js b/test/js/node/util/util.test.js index 4c1c51511e..a1f63ca801 100644 --- a/test/js/node/util/util.test.js +++ b/test/js/node/util/util.test.js @@ -24,6 +24,7 @@ import { expect, describe, it } from "bun:test"; import util from "util"; import assert from "assert"; +import "harness"; // const context = require('vm').runInNewContext; // TODO: Use a vm polyfill const strictEqual = (...args) => { @@ -357,4 +358,46 @@ describe("util", () => { assert.strictEqual(util.styleText("red", "test"), "\u001b[31mtest\u001b[39m"); }); + + describe("getSystemErrorName", () => { + for (const item of ["test", {}, []]) { + it(`throws when passing: ${item}`, () => { + expect(() => util.getSystemErrorName(item)).toThrowWithCode(TypeError, "ERR_INVALID_ARG_TYPE"); + }); + } + + for (const item of [0, 1, Infinity, -Infinity, NaN]) { + it(`throws when passing: ${item}`, () => { + expect(() => util.getSystemErrorName(item)).toThrowWithCode(RangeError, "ERR_OUT_OF_RANGE"); + }); + } + + const proc = Bun.spawnSync({ + cmd: [ + "node", + "-e", + "console.log(JSON.stringify([...require('node:util').getSystemErrorMap().entries()].map((v) => [v[0], v[1][0]])));", + ], + stdio: ["ignore", "pipe", "pipe"], + }); + for (const [code, name] of JSON.parse(proc.stdout.toString())) { + it(`getSystemErrorName(${code}) should be ${name}`, () => { + expect(util.getSystemErrorName(code)).toBe(name); + }); + } + + it("getSystemErrorName(-4096) should be unknown", () => { + expect(util.getSystemErrorName(-4096)).toBe("Unknown system error -4096"); + }); + + // these are the windows/fallback codes and they should match node in either returning the correct name or 'Unknown system error'. + // eg on linux getSystemErrorName(-4034) should return unkown and not 'ERANGE' since errno defines it as -34 for that platform. + for (let i = -4095; i <= -4023; i++) { + it(`negative space: getSystemErrorName(${i}) is correct`, () => { + const cmd = ["node", "-e", `console.log(JSON.stringify(util.getSystemErrorName(${i})));`]; + const stdio = ["ignore", "pipe", "pipe"]; + expect(util.getSystemErrorName(i)).toEqual(JSON.parse(Bun.spawnSync({ cmd, stdio }).stdout.toString())); + }); + } + }); }); diff --git a/test/js/node/zlib/zlib.test.js b/test/js/node/zlib/zlib.test.js index 35bd2ffdc5..40f9670856 100644 --- a/test/js/node/zlib/zlib.test.js +++ b/test/js/node/zlib/zlib.test.js @@ -9,28 +9,35 @@ import { tmpdirSync } from "harness"; import * as stream from "node:stream"; describe("zlib", () => { - it("should be able to deflate and inflate", () => { - const data = new TextEncoder().encode("Hello World!".repeat(1)); - const compressed = deflateSync(data); - const decompressed = inflateSync(compressed); - expect(decompressed.join("")).toBe(data.join("")); - }); + for (let library of ["zlib", "libdeflate"]) { + for (let outputLibrary of ["zlib", "libdeflate"]) { + describe(`${library} -> ${outputLibrary}`, () => { + it("should be able to deflate and inflate", () => { + const data = new TextEncoder().encode("Hello World!".repeat(1)); + const compressed = deflateSync(data, { library }); + console.log(compressed); + const decompressed = inflateSync(compressed, { library: outputLibrary }); + expect(decompressed.join("")).toBe(data.join("")); + }); - it("should be able to gzip and gunzip", () => { - const data = new TextEncoder().encode("Hello World!".repeat(1)); - const compressed = gzipSync(data); - const decompressed = gunzipSync(compressed); - expect(decompressed.join("")).toBe(data.join("")); - }); + it("should be able to gzip and gunzip", () => { + const data = new TextEncoder().encode("Hello World!".repeat(1)); + const compressed = gzipSync(data, { library }); + const decompressed = gunzipSync(compressed, { library: outputLibrary }); + expect(decompressed.join("")).toBe(data.join("")); + }); + }); + } + } it("should throw on invalid raw deflate data", () => { const data = new TextEncoder().encode("Hello World!".repeat(1)); - expect(() => inflateSync(data)).toThrow(new Error("invalid stored block lengths")); + expect(() => inflateSync(data, { library: "zlib" })).toThrow(new Error("invalid stored block lengths")); }); it("should throw on invalid gzip data", () => { const data = new TextEncoder().encode("Hello World!".repeat(1)); - expect(() => gunzipSync(data)).toThrow(new Error("incorrect header check")); + expect(() => gunzipSync(data, { library: "zlib" })).toThrow(new Error("incorrect header check")); }); }); diff --git a/test/js/third_party/remix/remix-build/server/index.js b/test/js/third_party/remix/remix-build/server/index.js new file mode 100644 index 0000000000..4e62ccc331 --- /dev/null +++ b/test/js/third_party/remix/remix-build/server/index.js @@ -0,0 +1,262 @@ +import { jsx, jsxs } from "react/jsx-runtime"; +import { PassThrough } from "node:stream"; +import { createReadableStreamFromReadable } from "@remix-run/node"; +import { RemixServer, Outlet, Meta, Links, ScrollRestoration, Scripts } from "@remix-run/react"; +import { isbot } from "isbot"; +import { renderToPipeableStream } from "react-dom/server"; +const ABORT_DELAY = 5e3; +function handleRequest(request, responseStatusCode, responseHeaders, remixContext, loadContext) { + return isbot(request.headers.get("user-agent") || "") + ? handleBotRequest(request, responseStatusCode, responseHeaders, remixContext) + : handleBrowserRequest(request, responseStatusCode, responseHeaders, remixContext); +} +function handleBotRequest(request, responseStatusCode, responseHeaders, remixContext) { + return new Promise((resolve, reject) => { + let shellRendered = false; + const { pipe, abort } = renderToPipeableStream( + /* @__PURE__ */ jsx(RemixServer, { + context: remixContext, + url: request.url, + abortDelay: ABORT_DELAY, + }), + { + onAllReady() { + shellRendered = true; + const body = new PassThrough(); + const stream = createReadableStreamFromReadable(body); + responseHeaders.set("Content-Type", "text/html"); + console.log(responseHeaders); + resolve( + new Response(stream, { + headers: responseHeaders, + status: responseStatusCode, + }), + ); + pipe(body); + }, + onShellError(error) { + reject(error); + }, + onError(error) { + responseStatusCode = 500; + if (shellRendered) { + console.error(error); + } + }, + }, + ); + setTimeout(abort, ABORT_DELAY); + }); +} +function handleBrowserRequest(request, responseStatusCode, responseHeaders, remixContext) { + return new Promise((resolve, reject) => { + let shellRendered = false; + const { pipe, abort } = renderToPipeableStream( + /* @__PURE__ */ jsx(RemixServer, { + context: remixContext, + url: request.url, + abortDelay: ABORT_DELAY, + }), + { + onShellReady() { + shellRendered = true; + const body = new PassThrough(); + const stream = createReadableStreamFromReadable(body); + responseHeaders.set("Content-Type", "text/html"); + resolve( + new Response(stream, { + headers: responseHeaders, + status: responseStatusCode, + }), + ); + pipe(body); + }, + onShellError(error) { + reject(error); + }, + onError(error) { + responseStatusCode = 500; + if (shellRendered) { + console.error(error); + } + }, + }, + ); + setTimeout(abort, ABORT_DELAY); + }); +} +const entryServer = /* @__PURE__ */ Object.freeze( + /* @__PURE__ */ Object.defineProperty( + { + __proto__: null, + default: handleRequest, + }, + Symbol.toStringTag, + { value: "Module" }, + ), +); +function Layout({ children }) { + return /* @__PURE__ */ jsxs("html", { + lang: "en", + children: [ + /* @__PURE__ */ jsxs("head", { + children: [ + /* @__PURE__ */ jsx("meta", { charSet: "utf-8" }), + /* @__PURE__ */ jsx("meta", { + name: "viewport", + content: "width=device-width, initial-scale=1", + }), + /* @__PURE__ */ jsx(Meta, {}), + /* @__PURE__ */ jsx(Links, {}), + ], + }), + /* @__PURE__ */ jsxs("body", { + children: [children, /* @__PURE__ */ jsx(ScrollRestoration, {}), /* @__PURE__ */ jsx(Scripts, {})], + }), + ], + }); +} +function App() { + return /* @__PURE__ */ jsx(Outlet, {}); +} +const route0 = /* @__PURE__ */ Object.freeze( + /* @__PURE__ */ Object.defineProperty( + { + __proto__: null, + Layout, + default: App, + }, + Symbol.toStringTag, + { value: "Module" }, + ), +); +const meta = () => { + return [{ title: "New Remix App" }, { name: "description", content: "Welcome to Remix!" }]; +}; +function Index() { + return /* @__PURE__ */ jsxs("div", { + className: "font-sans p-4", + children: [ + /* @__PURE__ */ jsx("h1", { + className: "text-3xl", + children: "Welcome to Remix", + }), + /* @__PURE__ */ jsxs("ul", { + className: "list-disc mt-4 pl-6 space-y-2", + children: [ + /* @__PURE__ */ jsx("li", { + children: /* @__PURE__ */ jsx("a", { + className: "text-blue-700 underline visited:text-purple-900", + target: "_blank", + href: "https://remix.run/start/quickstart", + rel: "noreferrer", + children: "5m Quick Start", + }), + }), + /* @__PURE__ */ jsx("li", { + children: /* @__PURE__ */ jsx("a", { + className: "text-blue-700 underline visited:text-purple-900", + target: "_blank", + href: "https://remix.run/start/tutorial", + rel: "noreferrer", + children: "30m Tutorial", + }), + }), + /* @__PURE__ */ jsx("li", { + children: /* @__PURE__ */ jsx("a", { + className: "text-blue-700 underline visited:text-purple-900", + target: "_blank", + href: "https://remix.run/docs", + rel: "noreferrer", + children: "Remix Docs", + }), + }), + ], + }), + ], + }); +} +const route1 = /* @__PURE__ */ Object.freeze( + /* @__PURE__ */ Object.defineProperty( + { + __proto__: null, + default: Index, + meta, + }, + Symbol.toStringTag, + { value: "Module" }, + ), +); +const serverManifest = { + entry: { + module: "/assets/entry.client-ER-smVHW.js", + imports: ["/assets/jsx-runtime-56DGgGmo.js", "/assets/components-BI_hnQlH.js"], + css: [], + }, + routes: { + root: { + id: "root", + parentId: void 0, + path: "", + index: void 0, + caseSensitive: void 0, + hasAction: false, + hasLoader: false, + hasClientAction: false, + hasClientLoader: false, + hasErrorBoundary: false, + module: "/assets/root-CBMuz_vA.js", + imports: ["/assets/jsx-runtime-56DGgGmo.js", "/assets/components-BI_hnQlH.js"], + css: ["/assets/root-BFUH26ow.css"], + }, + "routes/_index": { + id: "routes/_index", + parentId: "root", + path: void 0, + index: true, + caseSensitive: void 0, + hasAction: false, + hasLoader: false, + hasClientAction: false, + hasClientLoader: false, + hasErrorBoundary: false, + module: "/assets/_index-B6hwyHK-.js", + imports: ["/assets/jsx-runtime-56DGgGmo.js"], + css: [], + }, + }, + url: "/assets/manifest-c2e02a52.js", + version: "c2e02a52", +}; +const mode = "production"; +const assetsBuildDirectory = "build/client"; +const basename = "/"; +const future = { + v3_fetcherPersist: true, + v3_relativeSplatPath: true, + v3_throwAbortReason: true, + unstable_singleFetch: false, + unstable_fogOfWar: false, +}; +const isSpaMode = false; +const publicPath = "/"; +const entry = { module: entryServer }; +const routes = { + root: { + id: "root", + parentId: void 0, + path: "", + index: void 0, + caseSensitive: void 0, + module: route0, + }, + "routes/_index": { + id: "routes/_index", + parentId: "root", + path: void 0, + index: true, + caseSensitive: void 0, + module: route1, + }, +}; +export { serverManifest as assets, assetsBuildDirectory, basename, entry, future, isSpaMode, mode, publicPath, routes }; diff --git a/test/js/third_party/remix/remix.test.ts b/test/js/third_party/remix/remix.test.ts new file mode 100644 index 0000000000..16667e0f11 --- /dev/null +++ b/test/js/third_party/remix/remix.test.ts @@ -0,0 +1,44 @@ +import { describe, it, expect, test } from "bun:test"; +test("remix works", async () => { + process.env.PORT = "0"; + process.exitCode = 1; + process.env.NODE_ENV = "production"; + process.env.HOST = "localhost"; + process.argv = [process.argv[0], ".", require("path").join(__dirname, "remix-build", "server", "index.js")]; + const http = require("node:http"); + const originalListen = http.Server.prototype.listen; + let { promise, resolve, reject } = Promise.withResolvers(); + http.Server.prototype.listen = function listen(...args) { + setTimeout(() => { + resolve(this.address()); + }, 10); + return originalListen.apply(this, args); + }; + + require("@remix-run/serve/dist/cli.js"); + + // Wait long enough for the server's setTimeout to run. + await Bun.sleep(10); + + const port = (await promise).port; + + ({ promise, resolve, reject } = Promise.withResolvers()); + let chunks = []; + const req = http + .request(`http://localhost:${port}`, res => { + res + .on("data", data => { + chunks.push(data); + }) + .on("end", () => { + resolve(); + }) + .on("error", reject); + }) + .end(); + + await promise; + const data = Buffer.concat(chunks).toString(); + expect(data).toContain("Remix Docs"); + process.exitCode = 0; +}); diff --git a/test/js/web/console/console-log.expected.txt b/test/js/web/console/console-log.expected.txt index 167512e7bd..86cff95206 100644 --- a/test/js/web/console/console-log.expected.txt +++ b/test/js/web/console/console-log.expected.txt @@ -284,3 +284,4 @@ Hello NaN % 1 Hello NaN %j 1 Hello \5 6, Hello %i 5 6 +%d 1 diff --git a/test/js/web/console/console-log.js b/test/js/web/console/console-log.js index 46357f219d..32a817bc78 100644 --- a/test/js/web/console/console-log.js +++ b/test/js/web/console/console-log.js @@ -261,3 +261,6 @@ console.log("Hello %i %", [1, 2, 3, 4], 1); console.log("Hello %i %j", [1, 2, 3, 4], 1); console.log("Hello \\%i %i,", 5, 6); console.log("Hello %%i %i", 5, 6); + +// doesn't go out of bounds when printing +console.log("%%d", 1); diff --git a/test/js/web/fetch/fetch-preconnect.test.ts b/test/js/web/fetch/fetch-preconnect.test.ts new file mode 100644 index 0000000000..1259b0cd45 --- /dev/null +++ b/test/js/web/fetch/fetch-preconnect.test.ts @@ -0,0 +1,117 @@ +import { describe, it, expect } from "bun:test"; +import "harness"; +import { isWindows } from "harness"; + +// TODO: on Windows, these tests fail. +// This feature is mostly meant for serverless JS environments, so we can no-op it on Windows. +describe.todoIf(isWindows)("fetch.preconnect", () => { + it("fetch.preconnect works", async () => { + const { promise, resolve } = Promise.withResolvers(); + using listener = Bun.listen({ + port: 0, + hostname: "localhost", + socket: { + open(socket) { + resolve(socket); + }, + data() {}, + close() {}, + }, + }); + fetch.preconnect(`http://localhost:${listener.port}`); + const socket = await promise; + const fetchPromise = fetch(`http://localhost:${listener.port}`); + await Bun.sleep(64); + socket.write("HTTP/1.1 200 OK\r\nContent-Length: 0\r\n\r\n"); + socket.end(); + + const response = await fetchPromise; + expect(response.status).toBe(200); + }); + + describe("doesn't break the request when", () => { + for (let endOrTerminate of ["end", "terminate", "shutdown"]) { + describe(endOrTerminate, () => { + for (let at of ["before", "middle", "after"]) { + it(at, async () => { + let { promise, resolve } = Promise.withResolvers(); + using listener = Bun.listen({ + port: 0, + hostname: "localhost", + socket: { + open(socket) { + resolve(socket); + }, + data() {}, + close() {}, + }, + }); + fetch.preconnect(`http://localhost:${listener.port}`); + let socket = await promise; + ({ promise, resolve } = Promise.withResolvers()); + if (at === "before") { + await Bun.sleep(16); + socket[endOrTerminate](); + if (endOrTerminate === "shutdown") { + await Bun.sleep(0); + socket.end(); + } + } + const fetchPromise = fetch(`http://localhost:${listener.port}`); + if (at === "middle") { + socket[endOrTerminate](); + if (endOrTerminate === "shutdown") { + socket.end(); + } + await Bun.sleep(16); + } + + if (at === "after") { + await Bun.sleep(16); + socket[endOrTerminate](); + if (endOrTerminate === "shutdown") { + socket.end(); + } + } + socket = await promise; + socket.write("HTTP/1.1 200 OK\r\nContent-Length: 0\r\n\r\n"); + socket.end(); + + const response = await fetchPromise; + expect(response.status).toBe(200); + }); + } + }); + } + }); + + it("--fetch-preconnect works", async () => { + const { promise, resolve } = Promise.withResolvers(); + using listener = Bun.listen({ + port: 0, + hostname: "localhost", + socket: { + open(socket) { + socket.write("HTTP/1.1 200 OK\r\nContent-Length: 0\r\n\r\n"); + socket.end(); + resolve(); + }, + data() {}, + close() {}, + }, + }); + + // Do --fetch-preconnect, but don't actually send a request. + expect([`--fetch-preconnect=http://localhost:${listener.port}`, "--eval", "Bun.sleep(64)"]).toRun(); + + await promise; + }); + + it("fetch.preconnect validates the URL", async () => { + expect(() => fetch.preconnect("http://localhost:0")).toThrow(); + expect(() => fetch.preconnect("")).toThrow(); + expect(() => fetch.preconnect(" ")).toThrow(); + expect(() => fetch.preconnect("unix:///tmp/foo")).toThrow(); + expect(() => fetch.preconnect("http://:0")).toThrow(); + }); +}); diff --git a/test/js/web/fetch/fetch-redirect.test.ts b/test/js/web/fetch/fetch-redirect.test.ts new file mode 100644 index 0000000000..f02fb5fc5c --- /dev/null +++ b/test/js/web/fetch/fetch-redirect.test.ts @@ -0,0 +1,32 @@ +import { describe, it, expect } from "bun:test"; + +// https://github.com/oven-sh/bun/issues/12701 +it("fetch() preserves body on redirect", async () => { + using server = Bun.serve({ + port: 0, + + async fetch(req) { + const { pathname } = new URL(req.url); + if (pathname === "/redirect") { + return new Response(null, { + status: 308, + headers: { + Location: "/redirect2", + }, + }); + } + if (pathname === "/redirect2") { + return new Response(req.body, { status: 200 }); + } + return new Response("you shouldnt see this?", { status: 200 }); + }, + }); + + const res = await fetch(new URL("/redirect", server.url), { + method: "POST", + body: "hello", + }); + + expect(res.status).toBe(200); + expect(await res.text()).toBe("hello"); +}); diff --git a/test/js/web/fetch/fetch.stream.test.ts b/test/js/web/fetch/fetch.stream.test.ts index eac19feffc..7d7908f6a3 100644 --- a/test/js/web/fetch/fetch.stream.test.ts +++ b/test/js/web/fetch/fetch.stream.test.ts @@ -652,24 +652,28 @@ describe("fetch() with streaming", () => { } } - type CompressionType = "no" | "gzip" | "deflate" | "br" | "deflate_with_headers"; - type TestType = { headers: Record; compression: CompressionType; skip?: boolean }; - const types: Array = [ + const types = [ { headers: {}, compression: "no" }, { headers: { "Content-Encoding": "gzip" }, compression: "gzip" }, + { headers: { "Content-Encoding": "gzip" }, compression: "gzip-libdeflate" }, { headers: { "Content-Encoding": "deflate" }, compression: "deflate" }, + { headers: { "Content-Encoding": "deflate" }, compression: "deflate-libdeflate" }, { headers: { "Content-Encoding": "deflate" }, compression: "deflate_with_headers" }, - // { headers: { "Content-Encoding": "br" }, compression: "br", skip: true }, // not implemented yet - ]; + { headers: { "Content-Encoding": "br" }, compression: "br" }, + ] as const; - function compress(compression: CompressionType, data: Uint8Array) { + function compress(compression, data: Uint8Array) { switch (compression) { + case "gzip-libdeflate": case "gzip": - return Bun.gzipSync(data); + return Bun.gzipSync(data, { library: compression === "gzip-libdeflate" ? "libdeflate" : "zlib" }); + case "deflate-libdeflate": case "deflate": - return Bun.deflateSync(data); + return Bun.deflateSync(data, { library: compression === "deflate-libdeflate" ? "libdeflate" : "zlib" }); case "deflate_with_headers": return zlib.deflateSync(data); + case "br": + return zlib.brotliCompressSync(data); default: return data; } @@ -1186,7 +1190,14 @@ describe("fetch() with streaming", () => { gcTick(false); expect(buffer.toString("utf8")).toBe("unreachable"); } catch (err) { - expect((err as Error).name).toBe("ZlibError"); + if (compression === "br") { + expect((err as Error).name).toBe("BrotliDecompressionError"); + } else if (compression === "deflate-libdeflate") { + // Since the compressed data is different, the error ends up different. + expect((err as Error).name).toBe("ShortRead"); + } else { + expect((err as Error).name).toBe("ZlibError"); + } } } }); diff --git a/test/js/web/workers/worker.test.ts b/test/js/web/workers/worker.test.ts index f99ecb735b..609eb37dfc 100644 --- a/test/js/web/workers/worker.test.ts +++ b/test/js/web/workers/worker.test.ts @@ -45,6 +45,10 @@ describe("web worker", () => { test("worker-env", done => { const worker = new Worker(new URL("worker-fixture-env.js", import.meta.url).href, { env: { + // Verify that we use putDirectMayBeIndex instead of putDirect + [0]: "123", + [1]: "234", + hello: "world", another_key: 123 as any, }, @@ -57,6 +61,8 @@ describe("web worker", () => { try { expect(e.data).toEqual({ env: { + [0]: "123", + [1]: "234", hello: "world", another_key: "123", }, diff --git a/test/napi/napi-app/main.cpp b/test/napi/napi-app/main.cpp index f443ee79d7..61f7c564c7 100644 --- a/test/napi/napi-app/main.cpp +++ b/test/napi/napi-app/main.cpp @@ -69,6 +69,33 @@ static napi_value test_issue_11949(const Napi::CallbackInfo &info) { return result; } +static void callback_1(napi_env env, napi_value js_callback, void *context, + void *data) {} + +napi_value test_napi_threadsafe_function_does_not_hang_after_finalize( + const Napi::CallbackInfo &info) { + + Napi::Env env = info.Env(); + napi_status status; + + napi_value resource_name; + status = napi_create_string_utf8(env, "simple", 6, &resource_name); + assert(status == napi_ok); + + napi_threadsafe_function cb; + status = napi_create_threadsafe_function(env, nullptr, nullptr, resource_name, + 0, 1, nullptr, nullptr, nullptr, + &callback_1, &cb); + assert(status == napi_ok); + + status = napi_release_threadsafe_function(cb, napi_tsfn_release); + assert(status == napi_ok); + + printf("success!"); + + return ok(env); +} + napi_value test_napi_get_value_string_utf8_with_buffer(const Napi::CallbackInfo &info) { Napi::Env env = info.Env(); @@ -123,10 +150,8 @@ Napi::Object InitAll(Napi::Env env, Napi::Object exports1) { Napi::Object exports = Init2(env, exports1); - node::AddEnvironmentCleanupHook( - isolate, [](void *) {}, isolate); - node::RemoveEnvironmentCleanupHook( - isolate, [](void *) {}, isolate); + node::AddEnvironmentCleanupHook(isolate, [](void *) {}, isolate); + node::RemoveEnvironmentCleanupHook(isolate, [](void *) {}, isolate); exports.Set("test_issue_7685", Napi::Function::New(env, test_issue_7685)); exports.Set("test_issue_11949", Napi::Function::New(env, test_issue_11949)); @@ -134,6 +159,12 @@ Napi::Object InitAll(Napi::Env env, Napi::Object exports1) { exports.Set( "test_napi_get_value_string_utf8_with_buffer", Napi::Function::New(env, test_napi_get_value_string_utf8_with_buffer)); + + exports.Set( + "test_napi_threadsafe_function_does_not_hang_after_finalize", + Napi::Function::New( + env, test_napi_threadsafe_function_does_not_hang_after_finalize)); + return exports; } diff --git a/test/napi/napi.test.ts b/test/napi/napi.test.ts index 2c8a64c262..90e800446b 100644 --- a/test/napi/napi.test.ts +++ b/test/napi/napi.test.ts @@ -59,6 +59,11 @@ describe("napi", () => { }); }); + it("threadsafe function does not hang on finalize", () => { + const result = checkSameOutput("test_napi_threadsafe_function_does_not_hang_after_finalize", []); + expect(result).toBe("success!"); + }); + it("#1288", async () => { const result = checkSameOutput("self", []); expect(result).toBe("hello world!"); diff --git a/test/package.json b/test/package.json index 3008ded2ec..96c4bd14c1 100644 --- a/test/package.json +++ b/test/package.json @@ -11,6 +11,8 @@ "@grpc/proto-loader": "0.7.10", "@napi-rs/canvas": "0.1.47", "@prisma/client": "5.8.0", + "@remix-run/react": "2.10.3", + "@remix-run/serve": "2.10.3", "@resvg/resvg-js": "2.4.1", "@swc/core": "1.3.38", "@types/ws": "8.5.10", @@ -23,6 +25,7 @@ "express": "4.18.2", "fast-glob": "3.3.1", "iconv-lite": "0.6.3", + "isbot": "5.1.13", "jest-extended": "4.0.0", "jsonwebtoken": "9.0.2", "jws": "4.0.0", @@ -61,5 +64,8 @@ "private": true, "scripts": { "typecheck": "tsc --noEmit" + }, + "resolutions": { + "react": "../node_modules/react" } } diff --git a/test/regression/issue/012360.test.ts b/test/regression/issue/012360.test.ts new file mode 100644 index 0000000000..e50a2b87fd --- /dev/null +++ b/test/regression/issue/012360.test.ts @@ -0,0 +1,40 @@ +// https://github.com/oven-sh/bun/issues/12360 +import { test, expect } from "bun:test"; +import { fileURLToPath, pathToFileURL } from "bun"; +import { tmpdirSync, isWindows } from "harness"; +import { join } from "path"; + +export async function validatePath(path: URL): Promise { + const filePath = fileURLToPath(path); + + if (await Bun.file(filePath).exists()) { + return pathToFileURL(filePath); + } else { + return ""; + } +} + +test("validate executable given in the config using `validatePath`: invalid value", async () => { + const dir = tmpdirSync(); + + const filePath = join(dir, "./sample.exe"); + + const newFilePath = await validatePath(pathToFileURL(filePath)); + + expect(newFilePath).toBe(""); +}); + +test("validate executable given in the config using `validatePath`: expected real implementation", async () => { + const dir = tmpdirSync(); + const editorPath: URL | string = pathToFileURL(join(dir, "./metaeditor64.exe")); + const terminalPath: URL | string = pathToFileURL(join(dir, "./terminal64.exe")); + + await Bun.write(isWindows ? editorPath.pathname.slice(1) : editorPath.pathname, "im a editor"); + await Bun.write(isWindows ? terminalPath.pathname.slice(1) : terminalPath.pathname, "im a terminal"); + + const newEditorPath = await validatePath(editorPath); + const newTerminalPath = await validatePath(terminalPath); + + expect(newEditorPath.pathname).toBe(editorPath.pathname); + expect(newTerminalPath.pathname).toBe(terminalPath.pathname); +}); diff --git a/test/regression/issue/09555.test.ts b/test/regression/issue/09555.test.ts new file mode 100644 index 0000000000..be5e2123f3 --- /dev/null +++ b/test/regression/issue/09555.test.ts @@ -0,0 +1,163 @@ +import { Readable } from "stream"; +import { test, describe, expect } from "bun:test"; +import { tempDirWithFiles } from "harness"; +import { join } from "path"; +describe("#09555", () => { + test("fetch() Response body", async () => { + const full = crypto.getRandomValues(new Uint8Array(1024 * 3)); + const sha = Bun.hash(full); + using server = Bun.serve({ + port: 0, + async fetch() { + const chunks = [full.slice(0, 1024), full.slice(1024, 1024 * 2), full.slice(1024 * 2)]; + + return new Response( + new ReadableStream({ + async pull(controller) { + if (chunks.length === 0) { + controller.close(); + return; + } + controller.enqueue(chunks.shift()); + await Bun.sleep(100); + }, + }), + ); + }, + }); + + let total = 0; + const res = await fetch(server.url.href); + const stream = Readable.fromWeb(res.body); + let chunks = []; + for await (const chunk of stream) { + total += chunk.length; + chunks.push(chunk); + } + + const out = Bun.hash(Buffer.concat(chunks)); + expect(out).toBe(sha); + expect(total).toBe(1024 * 3); + }); + + test("Bun.serve() Request body streaming", async () => { + const full = crypto.getRandomValues(new Uint8Array(1024 * 3)); + const sha = Bun.CryptoHasher.hash("sha256", full, "base64"); + using server = Bun.serve({ + port: 0, + async fetch(req) { + const readable = Readable.fromWeb(req.body); + let chunks = []; + + for await (const chunk of readable) { + chunks.push(chunk); + } + + const out = Bun.CryptoHasher.hash("sha256", Buffer.concat(chunks), "base64"); + console.log(out); + return new Response(out); + }, + }); + + const { promise, resolve } = Promise.withResolvers(); + const chunks = []; + await Bun.connect({ + hostname: server.url.hostname, + port: server.url.port, + + socket: { + async open(socket) { + socket.write( + "POST / HTTP/1.1\r\n" + + "Connection: close\r\n" + + "Content-Length: " + + full.length + + "\r\n" + + "Host: " + + server.url.hostname + + "\r\n\r\n", + ); + const chunks = [full.slice(0, 1024), full.slice(1024, 1024 * 2), full.slice(1024 * 2)]; + + for (const chunk of chunks) { + socket.write(chunk); + await Bun.sleep(100); + } + }, + drain() {}, + data(socket, received) { + chunks.push(received); + }, + close() { + resolve(Buffer.concat(chunks).toString()); + }, + }, + }); + const outHTTPResponse = (await promise).toString(); + const out = outHTTPResponse.split("\r\n\r\n")[1]; + expect(out).toEqual(sha); + }); + + test("Bun.serve() Request body buffered", async () => { + const full = crypto.getRandomValues(new Uint8Array(1024 * 3)); + const sha = Bun.CryptoHasher.hash("sha256", full, "base64"); + using server = Bun.serve({ + port: 0, + async fetch(req) { + const readable = Readable.fromWeb(req.body); + let chunks = []; + + for await (const chunk of readable) { + chunks.push(chunk); + } + + const out = Bun.CryptoHasher.hash("sha256", Buffer.concat(chunks), "base64"); + return new Response(out); + }, + }); + + const outHTTPResponse = await fetch(server.url.href, { + method: "POST", + body: full, + }); + const out = await outHTTPResponse.text(); + expect(out).toEqual(sha); + }); + + test("Bun.file() NativeReadable", async () => { + const full = crypto.getRandomValues(new Uint8Array(1024 * 3)); + const sha = Bun.CryptoHasher.hash("sha256", full, "base64"); + const dir = tempDirWithFiles("09555", { + "/file.blob": full, + }); + await Bun.write(join(dir, "file.blob"), full); + const web = Bun.file(join(dir, "file.blob")).stream(); + const stream = Readable.fromWeb(web); + + const chunks = []; + let total = 0; + for await (const chunk of stream) { + chunks.push(chunk); + total += chunk.length; + } + + const out = Bun.CryptoHasher.hash("sha256", Buffer.concat(chunks), "base64"); + expect(out).toEqual(sha); + expect(total).toBe(1024 * 3); + }); + + test("Readable.fromWeb consumes the ReadableStream", async () => { + const bytes = new Blob([crypto.getRandomValues(new Uint8Array(1024 * 3)), new ArrayBuffer(1024 * 1024 * 10)]); + const response = new Response(bytes); + + const web = response.body; + expect(response.bodyUsed).toBe(false); + const stream = Readable.fromWeb(web); + expect(response.bodyUsed).toBe(true); + expect(() => response.body?.getReader()).toThrow(); + const methods = ["arrayBuffer", "blob", "formData", "json", "text"]; + for (const method of methods) { + expect(() => response[method]()).toThrow(); + } + }); +}); diff --git a/test/regression/issue/12650.test.js b/test/regression/issue/12650.test.js new file mode 100644 index 0000000000..eab73865d2 --- /dev/null +++ b/test/regression/issue/12650.test.js @@ -0,0 +1,23 @@ +import { expect, describe, it } from "bun:test"; + +// Custom class for testing +class CustomException extends Error { + constructor(message) { + super(message); + this.name = "CustomException"; + } +} + +describe("Test expect.toThrow(expect.any())", () => { + it("should throw an error", () => { + expect(() => { + throw new CustomException("Custom error message"); + }).toThrow(expect.any(Error)); + }); + + it("should throw a CustomException", () => { + expect(() => { + throw new CustomException("Custom error message"); + }).toThrow(expect.any(CustomException)); + }); +}); diff --git a/test/snippets/bundled-entry-point.js b/test/snippets/bundled-entry-point.js index a996f86327..77e119729e 100644 --- a/test/snippets/bundled-entry-point.js +++ b/test/snippets/bundled-entry-point.js @@ -1,6 +1,6 @@ import "react"; -var hello = 123 ? null ?? "world" : "ok"; +var hello = 123 ? (null ?? "world") : "ok"; export function test() { return testDone(import.meta.url);