diff --git a/.buildkite/ci.md b/.buildkite/ci.md deleted file mode 100644 index 57dde54e69..0000000000 --- a/.buildkite/ci.md +++ /dev/null @@ -1,63 +0,0 @@ -## CI - -How does CI work? - -### Building - -Bun is built on macOS, Linux, and Windows. The process is split into the following steps, the first 3 of which are able to run in parallel: - -#### 1. `build-deps` - -Builds the static libaries in `src/deps` and outputs a directory: `build/bun-deps`. - -- on Windows, this runs the script: [`scripts/all-dependencies.ps1`](scripts/all-dependencies.ps1) -- on macOS and Linux, this runs the script: [`scripts/all-dependencies.sh`](scripts/all-dependencies.sh) - -#### 2. `build-zig` - -Builds the Zig object file: `build/bun-zig.o`. Since `zig build` supports cross-compiling, this step is run on macOS aarch64 since we have observed it to be the fastest. - -- on macOS and Linux, this runs the script: [`scripts/build-bun-zig.sh`](scripts/build-bun-zig.sh) - -#### 3. `build-cpp` - -Builds the C++ object file: `build/bun-cpp-objects.a`. - -- on Windows, this runs the script: [`scripts/build-bun-cpp.ps1`](scripts/build-bun-cpp.ps1) -- on macOS and Linux, this runs the script: [`scripts/build-bun-cpp.sh`](scripts/build-bun-cpp.sh) - -#### 4. `link` / `build-bun` - -After the `build-deps`, `build-zig`, and `build-cpp` steps have completed, this step links the Zig object file and C++ object file into a single binary: `bun--.zip`. - -- on Windows, this runs the script: [`scripts/buildkite-link-bun.ps1`](scripts/buildkite-link-bun.ps1) -- on macOS and Linux, this runs the script: [`scripts/buildkite-link-bun.sh`](scripts/buildkite-link-bun.sh) - -To speed up the build, thare are two options: - -- `--fast`: This disables the LTO (link-time optimization) step. -- without `--fast`: This runs the LTO step, which is the default. The binaries that are release to Github are always built with LTO. - -### Testing - -### FAQ - -> How do I add a new CI agent? - -> How do I add/modify system dependencies? - -> How do I SSH into a CI agent? - -### Known issues - -These are things that we know about, but haven't fixed or optimized yet. - -- There is no `scripts/build-bun-zig.ps1` for Windows. - -- The `build-deps` step does not cache in CI, so it re-builds each time (though it does use ccache). It attempts to check the `BUN_DEPS_CACHE_DIR` environment variable, but for some reason it doesn't work. - -- Windows and Linux machines sometimes take up to 1-2 minutes to start tests. This is because robobun is listening for when the job is scheduled to provision the VM. Instead, it can start provisioning during the link step, or keep a pool of idle VMs around (but it's unclear how more expensive this is). - -- There are a limited number of macOS VMs. This is because they are expensive and manually provisioned, mostly through MacStadium. If wait times are too long we can just provision more, or buy some. - -- To prevent idle machines, robobun periodically checks for idle machines and terminates them. Before doing this, it checks to see if the machine is connected as an agent to Buildkite. However, sometimes the machine picks up a job in-between this time, and the job is terminated. diff --git a/.buildkite/ci.mjs b/.buildkite/ci.mjs new file mode 100644 index 0000000000..5eb781d307 --- /dev/null +++ b/.buildkite/ci.mjs @@ -0,0 +1,453 @@ +#!/usr/bin/env node + +/** + * Build and test Bun on macOS, Linux, and Windows. + * @link https://buildkite.com/docs/pipelines/defining-steps + */ + +import { writeFileSync } from "node:fs"; +import { join } from "node:path"; +import { + getCanaryRevision, + getChangedFiles, + getCommit, + getCommitMessage, + getLastSuccessfulBuild, + getMainBranch, + getRepositoryOwner, + getTargetBranch, + isBuildkite, + isFork, + isMainBranch, + isMergeQueue, + printEnvironment, + spawnSafe, +} from "../scripts/utils.mjs"; + +function toYaml(obj, indent = 0) { + const spaces = " ".repeat(indent); + let result = ""; + + for (const [key, value] of Object.entries(obj)) { + if (value === undefined) { + continue; + } + + if (value === null) { + result += `${spaces}${key}: null\n`; + continue; + } + + if (Array.isArray(value)) { + result += `${spaces}${key}:\n`; + value.forEach(item => { + if (typeof item === "object" && item !== null) { + result += `${spaces}- \n${toYaml(item, indent + 2) + .split("\n") + .map(line => `${spaces} ${line}`) + .join("\n")}\n`; + } else { + result += `${spaces}- ${item}\n`; + } + }); + continue; + } + + if (typeof value === "object") { + result += `${spaces}${key}:\n${toYaml(value, indent + 2)}`; + continue; + } + + if ( + typeof value === "string" && + (value.includes(":") || value.includes("#") || value.includes("'") || value.includes('"') || value.includes("\n")) + ) { + result += `${spaces}${key}: "${value.replace(/"/g, '\\"')}"\n`; + continue; + } + + result += `${spaces}${key}: ${value}\n`; + } + + return result; +} + +function getPipeline(buildId) { + /** + * Helpers + */ + + const getKey = platform => { + const { os, arch, abi, baseline } = platform; + + if (abi) { + if (baseline) { + return `${os}-${arch}-${abi}-baseline`; + } + return `${os}-${arch}-${abi}`; + } + if (baseline) { + return `${os}-${arch}-baseline`; + } + + return `${os}-${arch}`; + }; + + const getLabel = platform => { + const { os, arch, abi, baseline, release } = platform; + let label = release ? `:${os}: ${release} ${arch}` : `:${os}: ${arch}`; + if (abi) { + label += `-${abi}`; + } + if (baseline) { + label += `-baseline`; + } + return label; + }; + + // https://buildkite.com/docs/pipelines/command-step#retry-attributes + const getRetry = (limit = 3) => { + return { + automatic: [ + { exit_status: 1, limit: 1 }, + { exit_status: -1, limit }, + { exit_status: 255, limit }, + { signal_reason: "agent_stop", limit }, + ], + }; + }; + + // https://buildkite.com/docs/pipelines/managing-priorities + const getPriority = () => { + if (isFork()) { + return -1; + } + if (isMainBranch()) { + return 2; + } + if (isMergeQueue()) { + return 1; + } + return 0; + }; + + /** + * Steps + */ + + const getBuildVendorStep = platform => { + const { os, arch, abi, baseline } = platform; + + return { + key: `${getKey(platform)}-build-vendor`, + label: `build-vendor`, + agents: { + os, + arch, + abi, + queue: abi ? `build-${os}-${abi}` : `build-${os}`, + }, + retry: getRetry(), + cancel_on_build_failing: isMergeQueue(), + env: { + ENABLE_BASELINE: baseline ? "ON" : "OFF", + }, + command: "bun run build:ci --target dependencies", + }; + }; + + const getBuildCppStep = platform => { + const { os, arch, abi, baseline } = platform; + + return { + key: `${getKey(platform)}-build-cpp`, + label: `build-cpp`, + agents: { + os, + arch, + abi, + queue: abi ? `build-${os}-${abi}` : `build-${os}`, + }, + retry: getRetry(), + cancel_on_build_failing: isMergeQueue(), + env: { + BUN_CPP_ONLY: "ON", + ENABLE_BASELINE: baseline ? "ON" : "OFF", + }, + command: "bun run build:ci --target bun", + }; + }; + + const getBuildZigStep = platform => { + const { os, arch, abi, baseline } = platform; + const toolchain = getKey(platform); + + return { + key: `${getKey(platform)}-build-zig`, + label: `build-zig`, + agents: { + queue: "build-zig", + }, + retry: getRetry(), + cancel_on_build_failing: isMergeQueue(), + env: { + ENABLE_BASELINE: baseline ? "ON" : "OFF", + }, + command: `bun run build:ci --target bun-zig --toolchain ${toolchain}`, + }; + }; + + const getBuildBunStep = platform => { + const { os, arch, abi, baseline } = platform; + + return { + key: `${getKey(platform)}-build-bun`, + label: `build-bun`, + depends_on: [ + `${getKey(platform)}-build-vendor`, + `${getKey(platform)}-build-cpp`, + `${getKey(platform)}-build-zig`, + ], + agents: { + os, + arch, + abi, + queue: `build-${os}`, + }, + retry: getRetry(), + cancel_on_build_failing: isMergeQueue(), + env: { + BUN_LINK_ONLY: "ON", + ENABLE_BASELINE: baseline ? "ON" : "OFF", + }, + command: "bun run build:ci --target bun", + }; + }; + + const getTestBunStep = platform => { + const { os, arch, abi, distro, release } = platform; + + let name; + if (os === "darwin" || os === "windows") { + name = getLabel({ ...platform, release }); + } else { + name = getLabel({ ...platform, os: distro, release }); + } + + let agents; + if (os === "darwin") { + agents = { os, arch, abi, queue: `test-darwin` }; + } else if (os === "windows") { + agents = { os, arch, abi, robobun: true }; + } else { + agents = { os, arch, abi, distro, release, robobun: true }; + } + + let command; + if (os === "windows") { + command = `node .\\scripts\\runner.node.mjs --step ${getKey(platform)}-build-bun`; + } else { + command = `./scripts/runner.node.mjs --step ${getKey(platform)}-build-bun`; + } + + let parallelism; + if (os === "darwin") { + parallelism = 2; + } else { + parallelism = 10; + } + + let depends; + let env; + if (buildId) { + env = { + BUILDKITE_ARTIFACT_BUILD_ID: buildId, + }; + } else { + depends = [`${getKey(platform)}-build-bun`]; + } + + let retry; + if (os !== "windows") { + // When the runner fails on Windows, Buildkite only detects an exit code of 1. + // Because of this, we don't know if the run was fatal, or soft-failed. + retry = getRetry(); + } + + return { + key: `${getKey(platform)}-${distro}-${release.replace(/\./g, "")}-test-bun`, + label: `${name} - test-bun`, + depends_on: depends, + agents, + retry, + cancel_on_build_failing: isMergeQueue(), + soft_fail: isMainBranch(), + parallelism, + command, + env, + }; + }; + + /** + * Config + */ + + const buildPlatforms = [ + { os: "darwin", arch: "aarch64" }, + { os: "darwin", arch: "x64" }, + { os: "linux", arch: "aarch64" }, + // { os: "linux", arch: "aarch64", abi: "musl" }, // TODO: + { os: "linux", arch: "x64" }, + { os: "linux", arch: "x64", baseline: true }, + // { os: "linux", arch: "x64", abi: "musl" }, // TODO: + { os: "windows", arch: "x64" }, + { os: "windows", arch: "x64", baseline: true }, + ]; + + const testPlatforms = [ + { os: "darwin", arch: "aarch64", distro: "sonoma", release: "14" }, + { os: "darwin", arch: "aarch64", distro: "ventura", release: "13" }, + { os: "darwin", arch: "x64", distro: "sonoma", release: "14" }, + { os: "darwin", arch: "x64", distro: "ventura", release: "13" }, + { os: "linux", arch: "aarch64", distro: "debian", release: "12" }, + { os: "linux", arch: "aarch64", distro: "ubuntu", release: "22.04" }, + { os: "linux", arch: "aarch64", distro: "ubuntu", release: "20.04" }, + // { os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "edge" }, // TODO: + { os: "linux", arch: "x64", distro: "debian", release: "12" }, + { os: "linux", arch: "x64", distro: "ubuntu", release: "22.04" }, + { os: "linux", arch: "x64", distro: "ubuntu", release: "20.04" }, + { os: "linux", arch: "x64", distro: "debian", release: "12", baseline: true }, + { os: "linux", arch: "x64", distro: "ubuntu", release: "22.04", baseline: true }, + { os: "linux", arch: "x64", distro: "ubuntu", release: "20.04", baseline: true }, + // { os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "edge" }, // TODO: + { os: "windows", arch: "x64", distro: "server", release: "2019" }, + { os: "windows", arch: "x64", distro: "server", release: "2019", baseline: true }, + ]; + + return { + priority: getPriority(), + steps: [ + ...buildPlatforms.map(platform => { + const { os, arch, baseline } = platform; + + let steps = [ + ...testPlatforms + .filter(platform => platform.os === os && platform.arch === arch && baseline === platform.baseline) + .map(platform => getTestBunStep(platform)), + ]; + + if (!buildId) { + steps.unshift( + getBuildVendorStep(platform), + getBuildCppStep(platform), + getBuildZigStep(platform), + getBuildBunStep(platform), + ); + } + + return { + key: getKey(platform), + group: getLabel(platform), + steps, + }; + }), + ], + }; +} + +async function main() { + printEnvironment(); + + console.log("Checking last successful build..."); + const lastBuild = await getLastSuccessfulBuild(); + if (lastBuild) { + const { id, path, commit_id: commit } = lastBuild; + console.log(" - Build ID:", id); + console.log(" - Build URL:", new URL(path, "https://buildkite.com/").toString()); + console.log(" - Commit:", commit); + } else { + console.log(" - No build found"); + } + + console.log("Checking changed files..."); + const baseRef = isFork() ? `${getRepositoryOwner()}:${getCommit()}` : getCommit(); + console.log(" - Base Ref:", baseRef); + const headRef = lastBuild?.commit_id || getTargetBranch() || getMainBranch(); + console.log(" - Head Ref:", headRef); + + const changedFiles = await getChangedFiles(undefined, baseRef, headRef); + if (changedFiles) { + if (changedFiles.length) { + changedFiles.forEach(filename => console.log(` - ${filename}`)); + } else { + console.log(" - No changed files"); + } + } + + const isDocumentationFile = filename => /^(\.vscode|\.github|bench|docs|examples)|\.(md)$/i.test(filename); + const isTestFile = filename => /^test/i.test(filename) || /runner\.node\.mjs$/i.test(filename); + + console.log("Checking if CI should be skipped..."); + { + const message = getCommitMessage(); + const match = /\[(skip ci|no ci|ci skip|ci no)\]/i.exec(message); + if (match) { + const [, reason] = match; + console.log(" - Yes, because commit message contains:", reason); + return; + } + } + if (changedFiles && changedFiles.every(filename => isDocumentationFile(filename))) { + console.log(" - Yes, because all changed files are documentation"); + return; + } + + console.log("Checking if build should be skipped..."); + let skipBuild; + { + const message = getCommitMessage(); + const match = /\[(only tests?|tests? only|skip build|no build|build skip|build no)\]/i.exec(message); + if (match) { + const [, reason] = match; + console.log(" - Yes, because commit message contains:", reason); + skipBuild = true; + } + } + if (changedFiles && changedFiles.every(filename => isTestFile(filename) || isDocumentationFile(filename))) { + console.log(" - Yes, because all changed files are tests or documentation"); + skipBuild = true; + } + + console.log("Checking if build is a named release..."); + let buildRelease; + { + const message = getCommitMessage(); + const match = /\[(release|release build|build release)\]/i.exec(message); + if (match) { + const [, reason] = match; + console.log(" - Yes, because commit message contains:", reason); + buildRelease = true; + } + } + + console.log("Generating pipeline..."); + const pipeline = getPipeline(lastBuild && skipBuild ? lastBuild.id : undefined); + const content = toYaml(pipeline); + const contentPath = join(process.cwd(), ".buildkite", "ci.yml"); + writeFileSync(contentPath, content); + + console.log("Generated pipeline:"); + console.log(" - Path:", contentPath); + console.log(" - Size:", (content.length / 1024).toFixed(), "KB"); + + if (isBuildkite) { + console.log("Setting canary revision..."); + const canaryRevision = buildRelease ? 0 : await getCanaryRevision(); + await spawnSafe(["buildkite-agent", "meta-data", "set", "canary", `${canaryRevision}`]); + + console.log("Uploading pipeline..."); + await spawnSafe(["buildkite-agent", "pipeline", "upload", contentPath]); + } +} + +await main(); diff --git a/.buildkite/ci.yml b/.buildkite/ci.yml deleted file mode 100644 index 742b19c8ec..0000000000 --- a/.buildkite/ci.yml +++ /dev/null @@ -1,782 +0,0 @@ -# Build and test Bun on macOS, Linux, and Windows. -# https://buildkite.com/docs/pipelines/defining-steps -# -# If a step has the `robobun: true` label, robobun will listen -# to webhooks from Buildkite and provision a VM to run the step. -# -# Changes to this file will be automatically uploaded on the next run -# for a particular commit. -# -# Future tests machines to be added: -# - macOS 12 -# - Windows Server 2016 & 2019 -# - Amazon Linux 2 & 2023 -# - CentOS / RHEL / Fedora / other Linux distros -# - Docker containers -# - Rasberry Pi? -steps: - # macOS aarch64 - - key: "darwin-aarch64" - group: ":darwin: aarch64" - steps: - - key: "darwin-aarch64-build-deps" - label: ":darwin: aarch64 - build-deps" - agents: - queue: "build-darwin" - os: "darwin" - arch: "aarch64" - command: - - "./.buildkite/scripts/build-deps.sh" - - - key: "darwin-aarch64-build-zig" - label: ":darwin: aarch64 - build-zig" - agents: - queue: "build-darwin" - os: "darwin" - arch: "aarch64" - command: - - "./.buildkite/scripts/build-zig.sh darwin aarch64" - - - key: "darwin-aarch64-build-cpp" - label: ":darwin: aarch64 - build-cpp" - agents: - queue: "build-darwin" - os: "darwin" - arch: "aarch64" - command: - - "./.buildkite/scripts/build-cpp.sh" - - - key: "darwin-aarch64-build-bun" - label: ":darwin: aarch64 - build-bun" - depends_on: - - "darwin-aarch64-build-deps" - - "darwin-aarch64-build-zig" - - "darwin-aarch64-build-cpp" - agents: - queue: "build-darwin" - os: "darwin" - arch: "aarch64" - command: - - "./.buildkite/scripts/build-bun.sh" - - - key: "darwin-aarch64-test-macos-14" - label: ":darwin: 14 aarch64 - test-bun" - if: "build.branch != 'main'" - parallelism: 3 - soft_fail: - - exit_status: 2 - retry: - automatic: - - exit_status: 1 - limit: 1 - - exit_status: -1 - limit: 3 - - exit_status: 255 - limit: 3 - - signal_reason: agent_stop - limit: 3 - - signal: SIGTERM - limit: 3 - depends_on: - - "darwin-aarch64-build-bun" - agents: - queue: "test-darwin" - os: "darwin" - arch: "aarch64" - release: "14" - command: - - "./scripts/runner.node.mjs --step darwin-aarch64-build-bun" - - - key: "darwin-aarch64-test-macos-13" - label: ":darwin: 13 aarch64 - test-bun" - if: "build.branch != 'main'" - parallelism: 3 - soft_fail: - - exit_status: 2 - retry: - automatic: - - exit_status: 1 - limit: 1 - - exit_status: -1 - limit: 3 - - exit_status: 255 - limit: 3 - - signal_reason: agent_stop - limit: 3 - - signal: SIGTERM - limit: 3 - depends_on: - - "darwin-aarch64-build-bun" - agents: - queue: "test-darwin" - os: "darwin" - arch: "aarch64" - release: "13" - command: - - "./scripts/runner.node.mjs --step darwin-aarch64-build-bun" - - # macOS x64 - - key: "darwin-x64" - group: ":darwin: x64" - steps: - - key: "darwin-x64-build-deps" - label: ":darwin: x64 - build-deps" - agents: - queue: "build-darwin" - os: "darwin" - arch: "x64" - command: - - "./.buildkite/scripts/build-deps.sh" - - - key: "darwin-x64-build-zig" - label: ":darwin: x64 - build-zig" - agents: - queue: "build-darwin" - os: "darwin" - arch: "aarch64" - command: - - "./.buildkite/scripts/build-zig.sh darwin x64" - - - key: "darwin-x64-build-cpp" - label: ":darwin: x64 - build-cpp" - agents: - queue: "build-darwin" - os: "darwin" - arch: "x64" - command: - - "./.buildkite/scripts/build-cpp.sh" - - - key: "darwin-x64-build-bun" - label: ":darwin: x64 - build-bun" - depends_on: - - "darwin-x64-build-deps" - - "darwin-x64-build-zig" - - "darwin-x64-build-cpp" - agents: - queue: "build-darwin" - os: "darwin" - arch: "x64" - command: - - "./.buildkite/scripts/build-bun.sh" - - - key: "darwin-x64-test-macos-14" - label: ":darwin: 14 x64 - test-bun" - if: "build.branch != 'main'" - parallelism: 2 - soft_fail: - - exit_status: 2 - retry: - automatic: - - exit_status: 1 - limit: 1 - - exit_status: -1 - limit: 3 - - exit_status: 255 - limit: 3 - - signal_reason: agent_stop - limit: 3 - - signal: SIGTERM - limit: 3 - depends_on: - - "darwin-x64-build-bun" - agents: - queue: "test-darwin" - os: "darwin" - arch: "x64" - release: "14" - command: - - "./scripts/runner.node.mjs --step darwin-x64-build-bun" - - - key: "darwin-x64-test-macos-13" - label: ":darwin: 13 x64 - test-bun" - if: "build.branch != 'main'" - parallelism: 2 - soft_fail: - - exit_status: 2 - retry: - automatic: - - exit_status: 1 - limit: 1 - - exit_status: -1 - limit: 3 - - exit_status: 255 - limit: 3 - - signal_reason: agent_stop - limit: 3 - - signal: SIGTERM - limit: 3 - depends_on: - - "darwin-x64-build-bun" - agents: - queue: "test-darwin" - os: "darwin" - arch: "x64" - release: "13" - command: - - "./scripts/runner.node.mjs --step darwin-x64-build-bun" - - # Linux aarch64 - - key: "linux-aarch64" - group: ":linux: aarch64" - steps: - - key: "linux-aarch64-build-deps" - label: ":linux: aarch64 - build-deps" - agents: - queue: "build-linux" - os: "linux" - arch: "aarch64" - command: - - "./.buildkite/scripts/build-deps.sh" - - - key: "linux-aarch64-build-zig" - label: ":linux: aarch64 - build-zig" - agents: - queue: "build-darwin" - os: "darwin" - arch: "aarch64" - command: - - "./.buildkite/scripts/build-zig.sh linux aarch64" - - - key: "linux-aarch64-build-cpp" - label: ":linux: aarch64 - build-cpp" - agents: - queue: "build-linux" - os: "linux" - arch: "aarch64" - command: - - "./.buildkite/scripts/build-cpp.sh" - - - key: "linux-aarch64-build-bun" - label: ":linux: aarch64 - build-bun" - depends_on: - - "linux-aarch64-build-deps" - - "linux-aarch64-build-zig" - - "linux-aarch64-build-cpp" - agents: - queue: "build-linux" - os: "linux" - arch: "aarch64" - command: - - "./.buildkite/scripts/build-bun.sh" - - - key: "linux-aarch64-test-debian-12" - label: ":debian: 12 aarch64 - test-bun" - if: "build.branch != 'main'" - parallelism: 5 - soft_fail: - - exit_status: 2 - retry: - automatic: - - exit_status: 1 - limit: 1 - - exit_status: -1 - limit: 3 - - exit_status: 255 - limit: 3 - - signal_reason: agent_stop - limit: 3 - - signal: SIGTERM - limit: 3 - depends_on: - - "linux-aarch64-build-bun" - agents: - robobun: "true" - os: "linux" - arch: "aarch64" - distro: "debian" - release: "12" - command: - - "./scripts/runner.node.mjs --step linux-aarch64-build-bun" - - - key: "linux-aarch64-test-ubuntu-2204" - label: ":ubuntu: 22.04 aarch64 - test-bun" - if: "build.branch != 'main'" - parallelism: 5 - soft_fail: - - exit_status: 2 - retry: - automatic: - - exit_status: 1 - limit: 1 - - exit_status: -1 - limit: 3 - - exit_status: 255 - limit: 3 - - signal_reason: agent_stop - limit: 3 - - signal: SIGTERM - limit: 3 - depends_on: - - "linux-aarch64-build-bun" - agents: - robobun: "true" - os: "linux" - arch: "aarch64" - distro: "ubuntu" - release: "22.04" - command: - - "./scripts/runner.node.mjs --step linux-aarch64-build-bun" - - - key: "linux-aarch64-test-ubuntu-2004" - label: ":ubuntu: 20.04 aarch64 - test-bun" - if: "build.branch != 'main'" - parallelism: 5 - soft_fail: - - exit_status: 2 - retry: - automatic: - - exit_status: 1 - limit: 1 - - exit_status: -1 - limit: 3 - - exit_status: 255 - limit: 3 - - signal_reason: agent_stop - limit: 3 - - signal: SIGTERM - limit: 3 - depends_on: - - "linux-aarch64-build-bun" - agents: - robobun: "true" - os: "linux" - arch: "aarch64" - distro: "ubuntu" - release: "20.04" - command: - - "./scripts/runner.node.mjs --step linux-aarch64-build-bun" - - # Linux x64 - - key: "linux-x64" - group: ":linux: x64" - steps: - - key: "linux-x64-build-deps" - label: ":linux: x64 - build-deps" - agents: - queue: "build-linux" - os: "linux" - arch: "x64" - command: - - "./.buildkite/scripts/build-deps.sh" - - - key: "linux-x64-build-zig" - label: ":linux: x64 - build-zig" - agents: - queue: "build-darwin" - os: "darwin" - arch: "aarch64" - command: - - "./.buildkite/scripts/build-zig.sh linux x64" - - - key: "linux-x64-build-cpp" - label: ":linux: x64 - build-cpp" - agents: - queue: "build-linux" - os: "linux" - arch: "x64" - command: - - "./.buildkite/scripts/build-cpp.sh" - - - key: "linux-x64-build-bun" - label: ":linux: x64 - build-bun" - depends_on: - - "linux-x64-build-deps" - - "linux-x64-build-zig" - - "linux-x64-build-cpp" - agents: - queue: "build-linux" - os: "linux" - arch: "x64" - command: - - "./.buildkite/scripts/build-bun.sh" - - - key: "linux-x64-test-debian-12" - label: ":debian: 12 x64 - test-bun" - if: "build.branch != 'main'" - parallelism: 5 - soft_fail: - - exit_status: 2 - retry: - automatic: - - exit_status: 1 - limit: 1 - - exit_status: -1 - limit: 3 - - exit_status: 255 - limit: 3 - - signal_reason: agent_stop - limit: 3 - - signal: SIGTERM - limit: 3 - depends_on: - - "linux-x64-build-bun" - agents: - robobun: "true" - os: "linux" - arch: "x64" - distro: "debian" - release: "12" - command: - - "./scripts/runner.node.mjs --step linux-x64-build-bun" - - - key: "linux-x64-test-ubuntu-2204" - label: ":ubuntu: 22.04 x64 - test-bun" - if: "build.branch != 'main'" - parallelism: 5 - soft_fail: - - exit_status: 2 - retry: - automatic: - - exit_status: 1 - limit: 1 - - exit_status: -1 - limit: 3 - - exit_status: 255 - limit: 3 - - signal_reason: agent_stop - limit: 3 - - signal: SIGTERM - limit: 3 - depends_on: - - "linux-x64-build-bun" - agents: - robobun: "true" - os: "linux" - arch: "x64" - distro: "ubuntu" - release: "22.04" - command: - - "./scripts/runner.node.mjs --step linux-x64-build-bun" - - - key: "linux-x64-test-ubuntu-2004" - label: ":ubuntu: 20.04 x64 - test-bun" - if: "build.branch != 'main'" - parallelism: 5 - soft_fail: - - exit_status: 2 - retry: - automatic: - - exit_status: 1 - limit: 1 - - exit_status: -1 - limit: 3 - - exit_status: 255 - limit: 3 - - signal_reason: agent_stop - limit: 3 - - signal: SIGTERM - limit: 3 - depends_on: - - "linux-x64-build-bun" - agents: - robobun: "true" - os: "linux" - arch: "x64" - distro: "ubuntu" - release: "20.04" - command: - - "./scripts/runner.node.mjs --step linux-x64-build-bun" - - # Linux x64-baseline - - key: "linux-x64-baseline" - group: ":linux: x64-baseline" - steps: - - key: "linux-x64-baseline-build-deps" - label: ":linux: x64-baseline - build-deps" - agents: - queue: "build-linux" - os: "linux" - arch: "x64" - command: - - "./.buildkite/scripts/build-deps.sh" - - - key: "linux-x64-baseline-build-zig" - label: ":linux: x64-baseline - build-zig" - agents: - queue: "build-darwin" - os: "darwin" - arch: "aarch64" - command: - - "./.buildkite/scripts/build-zig.sh linux x64" - - - key: "linux-x64-baseline-build-cpp" - label: ":linux: x64-baseline - build-cpp" - agents: - queue: "build-linux" - os: "linux" - arch: "x64" - command: - - "./.buildkite/scripts/build-cpp.sh" - - - key: "linux-x64-baseline-build-bun" - label: ":linux: x64-baseline - build-bun" - depends_on: - - "linux-x64-baseline-build-deps" - - "linux-x64-baseline-build-zig" - - "linux-x64-baseline-build-cpp" - agents: - queue: "build-linux" - os: "linux" - arch: "x64" - command: - - "./.buildkite/scripts/build-bun.sh" - - - key: "linux-x64-baseline-test-debian-12" - label: ":debian: 12 x64-baseline - test-bun" - if: "build.branch != 'main'" - parallelism: 5 - soft_fail: - - exit_status: 2 - retry: - automatic: - - exit_status: 1 - limit: 1 - - exit_status: -1 - limit: 3 - - exit_status: 255 - limit: 3 - - signal_reason: agent_stop - limit: 3 - - signal: SIGTERM - limit: 3 - depends_on: - - "linux-x64-baseline-build-bun" - agents: - robobun: "true" - os: "linux" - arch: "x64" - distro: "debian" - release: "12" - command: - - "./scripts/runner.node.mjs --step linux-x64-baseline-build-bun" - - - key: "linux-x64-baseline-test-ubuntu-2204" - label: ":ubuntu: 22.04 x64-baseline - test-bun" - if: "build.branch != 'main'" - parallelism: 5 - soft_fail: - - exit_status: 2 - retry: - automatic: - - exit_status: 1 - limit: 1 - - exit_status: -1 - limit: 3 - - exit_status: 255 - limit: 3 - - signal_reason: agent_stop - limit: 3 - - signal: SIGTERM - limit: 3 - depends_on: - - "linux-x64-baseline-build-bun" - agents: - robobun: "true" - os: "linux" - arch: "x64" - distro: "ubuntu" - release: "22.04" - command: - - "./scripts/runner.node.mjs --step linux-x64-baseline-build-bun" - - - key: "linux-x64-baseline-test-ubuntu-2004" - label: ":ubuntu: 20.04 x64-baseline - test-bun" - if: "build.branch != 'main'" - parallelism: 5 - soft_fail: - - exit_status: 2 - retry: - automatic: - - exit_status: 1 - limit: 1 - - exit_status: -1 - limit: 3 - - exit_status: 255 - limit: 3 - - signal_reason: agent_stop - limit: 3 - - signal: SIGTERM - limit: 3 - depends_on: - - "linux-x64-baseline-build-bun" - agents: - robobun: "true" - os: "linux" - arch: "x64" - distro: "ubuntu" - release: "20.04" - command: - - "./scripts/runner.node.mjs --step linux-x64-baseline-build-bun" - - # Windows x64 - - key: "windows-x64" - group: ":windows: x64" - steps: - - key: "windows-x64-build-deps" - label: ":windows: x64 - build-deps" - agents: - queue: "build-windows" - os: "windows" - arch: "x64" - artifact_paths: - - "build\\bun-deps\\*.lib" - env: - CCACHE_DISABLE: "1" - command: - - ".\\scripts\\all-dependencies.ps1" - - - key: "windows-x64-build-zig" - label: ":windows: x64 - build-zig" - agents: - queue: "build-darwin" - os: "darwin" # cross-compile on Linux or Darwin - arch: "aarch64" - command: - - "./.buildkite/scripts/build-zig.sh windows x64" - - - key: "windows-x64-build-cpp" - label: ":windows: x64 - build-cpp" - agents: - queue: "build-windows" - os: "windows" - arch: "x64" - artifact_paths: - # HACK: See scripts/build-bun-cpp.ps1 - # - "build\\bun-cpp-objects.a" - - "build\\bun-cpp-objects.a.*" - env: - CCACHE_DISABLE: "1" - command: - - ".\\scripts\\build-bun-cpp.ps1" - - - key: "windows-x64-build-bun" - label: ":windows: x64 - build-bun" - depends_on: - - "windows-x64-build-deps" - - "windows-x64-build-zig" - - "windows-x64-build-cpp" - agents: - queue: "build-windows" - os: "windows" - arch: "x64" - artifact_paths: - - "bun-windows-x64.zip" - - "bun-windows-x64-profile.zip" - - "features.json" - env: - CCACHE_DISABLE: "1" - command: - - ".\\scripts\\buildkite-link-bun.ps1" - - - key: "windows-x64-test-bun" - label: ":windows: x64 - test-bun" - if: "build.branch != 'main'" - parallelism: 10 - soft_fail: - - exit_status: 1 - retry: - automatic: - - exit_status: -1 - limit: 3 - - exit_status: 255 - limit: 3 - - signal_reason: agent_stop - limit: 3 - - signal: SIGTERM - limit: 3 - depends_on: - - "windows-x64-build-bun" - agents: - robobun: "true" - os: "windows" - arch: "x64" - command: - - "node .\\scripts\\runner.node.mjs --step windows-x64-build-bun" - - # Windows x64-baseline - - key: "windows-x64-baseline" - group: ":windows: x64-baseline" - steps: - - key: "windows-x64-baseline-build-deps" - label: ":windows: x64-baseline - build-deps" - agents: - queue: "build-windows" - os: "windows" - arch: "x64" - artifact_paths: - - "build\\bun-deps\\*.lib" - env: - CCACHE_DISABLE: "1" - USE_BASELINE_BUILD: "1" - command: - - ".\\scripts\\all-dependencies.ps1" - - - key: "windows-x64-baseline-build-zig" - label: ":windows: x64-baseline - build-zig" - agents: - queue: "build-darwin" - os: "darwin" # cross-compile on Linux or Darwin - arch: "aarch64" - command: - - "./.buildkite/scripts/build-zig.sh windows x64" - - - key: "windows-x64-baseline-build-cpp" - label: ":windows: x64-baseline - build-cpp" - agents: - queue: "build-windows" - os: "windows" - arch: "x64" - artifact_paths: - # HACK: See scripts/build-bun-cpp.ps1 - # - "build\\bun-cpp-objects.a" - - "build\\bun-cpp-objects.a.*" - env: - CCACHE_DISABLE: "1" - USE_BASELINE_BUILD: "1" - command: - - ".\\scripts\\build-bun-cpp.ps1" - - - key: "windows-x64-baseline-build-bun" - label: ":windows: x64-baseline - build-bun" - depends_on: - - "windows-x64-baseline-build-deps" - - "windows-x64-baseline-build-zig" - - "windows-x64-baseline-build-cpp" - agents: - queue: "build-windows" - os: "windows" - arch: "x64" - artifact_paths: - - "bun-windows-x64-baseline.zip" - - "bun-windows-x64-baseline-profile.zip" - - "features.json" - env: - CCACHE_DISABLE: "1" - USE_BASELINE_BUILD: "1" - command: - - ".\\scripts\\buildkite-link-bun.ps1 -Baseline $$True" - - - key: "windows-x64-baseline-test-bun" - label: ":windows: x64-baseline - test-bun" - if: "build.branch != 'main'" - parallelism: 10 - soft_fail: - - exit_status: 1 - retry: - automatic: - - exit_status: -1 - limit: 3 - - exit_status: 255 - limit: 3 - - signal_reason: agent_stop - limit: 3 - - signal: SIGTERM - limit: 3 - depends_on: - - "windows-x64-baseline-build-bun" - agents: - robobun: "true" - os: "windows" - arch: "x64" - command: - - "node .\\scripts\\runner.node.mjs --step windows-x64-baseline-build-bun" diff --git a/.buildkite/scripts/build-bun.sh b/.buildkite/scripts/build-bun.sh deleted file mode 100755 index 59363a39fd..0000000000 --- a/.buildkite/scripts/build-bun.sh +++ /dev/null @@ -1,55 +0,0 @@ -#!/bin/bash - -set -eo pipefail -source "$(dirname "$0")/env.sh" - -function run_command() { - set -x - "$@" - { set +x; } 2>/dev/null -} - -cwd="$(pwd)" - -mkdir -p build -source "$(dirname "$0")/download-artifact.sh" "build/bun-deps/**" --step "$BUILDKITE_GROUP_KEY-build-deps" -source "$(dirname "$0")/download-artifact.sh" "build/bun-zig.o" --step "$BUILDKITE_GROUP_KEY-build-zig" -source "$(dirname "$0")/download-artifact.sh" "build/bun-cpp-objects.a" --step "$BUILDKITE_GROUP_KEY-build-cpp" --split -cd build - -run_command cmake .. "${CMAKE_FLAGS[@]}" \ - -GNinja \ - -DBUN_LINK_ONLY="1" \ - -DNO_CONFIGURE_DEPENDS="1" \ - -DBUN_ZIG_OBJ_DIR="$cwd/build" \ - -DBUN_CPP_ARCHIVE="$cwd/build/bun-cpp-objects.a" \ - -DBUN_DEPS_OUT_DIR="$cwd/build/bun-deps" \ - -DCMAKE_BUILD_TYPE="$CMAKE_BUILD_TYPE" \ - -DCPU_TARGET="$CPU_TARGET" \ - -DUSE_LTO="$USE_LTO" \ - -DUSE_DEBUG_JSC="$USE_DEBUG_JSC" \ - -DCANARY="$CANARY" \ - -DGIT_SHA="$GIT_SHA" -run_command ninja -v -j "$CPUS" -run_command ls - -tag="bun-$BUILDKITE_GROUP_KEY" -if [ "$USE_LTO" == "OFF" ]; then - # Remove OS check when LTO is enabled on macOS again - if [[ "$tag" == *"darwin"* ]]; then - tag="$tag-nolto" - fi -fi - -for name in bun bun-profile; do - dir="$tag" - if [ "$name" == "bun-profile" ]; then - dir="$tag-profile" - fi - run_command chmod +x "$name" - run_command "./$name" --revision - run_command mkdir -p "$dir" - run_command mv "$name" "$dir/$name" - run_command zip -r "$dir.zip" "$dir" - source "$cwd/.buildkite/scripts/upload-artifact.sh" "$dir.zip" -done diff --git a/.buildkite/scripts/build-cpp.sh b/.buildkite/scripts/build-cpp.sh deleted file mode 100755 index 4112227fe5..0000000000 --- a/.buildkite/scripts/build-cpp.sh +++ /dev/null @@ -1,35 +0,0 @@ -#!/bin/bash - -set -eo pipefail -source "$(dirname "$0")/env.sh" -export FORCE_UPDATE_SUBMODULES=1 -source "$(realpath $(dirname "$0")/../../scripts/update-submodules.sh)" -{ set +x; } 2>/dev/null - -function run_command() { - set -x - "$@" - { set +x; } 2>/dev/null -} - -mkdir -p build -cd build -mkdir -p tmp_modules tmp_functions js codegen - -run_command cmake .. "${CMAKE_FLAGS[@]}" \ - -GNinja \ - -DBUN_CPP_ONLY="1" \ - -DNO_CONFIGURE_DEPENDS="1" \ - -DCMAKE_BUILD_TYPE="$CMAKE_BUILD_TYPE" \ - -DCPU_TARGET="$CPU_TARGET" \ - -DUSE_LTO="$USE_LTO" \ - -DUSE_DEBUG_JSC="$USE_DEBUG_JSC" \ - -DCANARY="$CANARY" \ - -DGIT_SHA="$GIT_SHA" - -chmod +x compile-cpp-only.sh -source compile-cpp-only.sh -v -j "$CPUS" -{ set +x; } 2>/dev/null - -cd .. -source "$(dirname "$0")/upload-artifact.sh" "build/bun-cpp-objects.a" --split diff --git a/.buildkite/scripts/build-deps.sh b/.buildkite/scripts/build-deps.sh deleted file mode 100755 index e736fb43ff..0000000000 --- a/.buildkite/scripts/build-deps.sh +++ /dev/null @@ -1,22 +0,0 @@ -#!/bin/bash - -set -eo pipefail -source "$(dirname "$0")/env.sh" -source "$(realpath $(dirname "$0")/../../scripts/all-dependencies.sh)" - -artifacts=( - libcrypto.a libssl.a libdecrepit.a - libcares.a - libarchive.a - liblolhtml.a - libmimalloc.a libmimalloc.o - libtcc.a - libz.a - libzstd.a - libdeflate.a - liblshpack.a -) - -for artifact in "${artifacts[@]}"; do - source "$(dirname "$0")/upload-artifact.sh" "build/bun-deps/$artifact" -done diff --git a/.buildkite/scripts/build-old-js.sh b/.buildkite/scripts/build-old-js.sh deleted file mode 100755 index 92484aebe1..0000000000 --- a/.buildkite/scripts/build-old-js.sh +++ /dev/null @@ -1,40 +0,0 @@ -#!/bin/bash - -set -eo pipefail -source "$(dirname "$0")/env.sh" - -function assert_bun() { - if ! command -v bun &>/dev/null; then - echo "error: bun is not installed" 1>&2 - exit 1 - fi -} - -function assert_make() { - if ! command -v make &>/dev/null; then - echo "error: make is not installed" 1>&2 - exit 1 - fi -} - -function run_command() { - set -x - "$@" - { set +x; } 2>/dev/null -} - -function build_node_fallbacks() { - local cwd="src/node-fallbacks" - run_command bun install --cwd "$cwd" --frozen-lockfile - run_command bun run --cwd "$cwd" build -} - -function build_old_js() { - run_command bun install --frozen-lockfile - run_command make runtime_js fallback_decoder bun_error -} - -assert_bun -assert_make -build_node_fallbacks -build_old_js diff --git a/.buildkite/scripts/build-zig.sh b/.buildkite/scripts/build-zig.sh deleted file mode 100755 index e7a2614556..0000000000 --- a/.buildkite/scripts/build-zig.sh +++ /dev/null @@ -1,80 +0,0 @@ -#!/bin/bash - -set -eo pipefail -source "$(dirname "$0")/env.sh" - -function assert_target() { - local arch="${2-$(uname -m)}" - case "$(echo "$arch" | tr '[:upper:]' '[:lower:]')" in - x64 | x86_64 | amd64) - export ZIG_ARCH="x86_64" - if [[ "$BUILDKITE_STEP_KEY" == *"baseline"* ]]; then - export ZIG_CPU_TARGET="nehalem" - else - export ZIG_CPU_TARGET="haswell" - fi - ;; - aarch64 | arm64) - export ZIG_ARCH="aarch64" - export ZIG_CPU_TARGET="native" - ;; - *) - echo "error: Unsupported architecture: $arch" 1>&2 - exit 1 - ;; - esac - local os="${1-$(uname -s)}" - case "$(echo "$os" | tr '[:upper:]' '[:lower:]')" in - linux) - export ZIG_TARGET="$ZIG_ARCH-linux-gnu" ;; - darwin) - export ZIG_TARGET="$ZIG_ARCH-macos-none" ;; - windows) - export ZIG_TARGET="$ZIG_ARCH-windows-msvc" ;; - *) - echo "error: Unsupported operating system: $os" 1>&2 - exit 1 - ;; - esac -} - -function run_command() { - set -x - "$@" - { set +x; } 2>/dev/null -} - -assert_target "$@" - -# Since the zig build depends on files from the zig submodule, -# make sure to update the submodule before building. -run_command git submodule update --init --recursive --progress --depth=1 --checkout src/deps/zig - -# TODO: Move these to be part of the CMake build -source "$(dirname "$0")/build-old-js.sh" - -cwd="$(pwd)" -mkdir -p build -cd build - -run_command cmake .. "${CMAKE_FLAGS[@]}" \ - -GNinja \ - -DNO_CONFIGURE_DEPENDS="1" \ - -DNO_CODEGEN="0" \ - -DWEBKIT_DIR="omit" \ - -DBUN_ZIG_OBJ_DIR="$cwd/build" \ - -DZIG_LIB_DIR="$cwd/src/deps/zig/lib" \ - -DCMAKE_BUILD_TYPE="$CMAKE_BUILD_TYPE" \ - -DARCH="$ZIG_ARCH" \ - -DCPU_TARGET="$ZIG_CPU_TARGET" \ - -DZIG_TARGET="$ZIG_TARGET" \ - -DUSE_LTO="$USE_LTO" \ - -DUSE_DEBUG_JSC="$USE_DEBUG_JSC" \ - -DCANARY="$CANARY" \ - -DGIT_SHA="$GIT_SHA" - -export ONLY_ZIG="1" -run_command ninja "$cwd/build/bun-zig.o" -v -j "$CPUS" - -cd .. -source "$(dirname "$0")/upload-artifact.sh" "build/bun-zig.o" diff --git a/.buildkite/scripts/download-artifact.ps1 b/.buildkite/scripts/download-artifact.ps1 deleted file mode 100755 index 0504474077..0000000000 --- a/.buildkite/scripts/download-artifact.ps1 +++ /dev/null @@ -1,47 +0,0 @@ -param ( - [Parameter(Mandatory=$true)] - [string[]] $Paths, - [switch] $Split -) - -$ErrorActionPreference = "Stop" - -function Assert-Buildkite-Agent() { - if (-not (Get-Command "buildkite-agent" -ErrorAction SilentlyContinue)) { - Write-Error "Cannot find buildkite-agent, please install it: https://buildkite.com/docs/agent/v3/install" - exit 1 - } -} - -function Assert-Join-File() { - if (-not (Get-Command "Join-File" -ErrorAction SilentlyContinue)) { - Write-Error "Cannot find Join-File, please install it: https://www.powershellgallery.com/packages/FileSplitter/1.3" - exit 1 - } -} - -function Download-Buildkite-Artifact() { - param ( - [Parameter(Mandatory=$true)] - [string] $Path, - ) - if ($Split) { - & buildkite-agent artifact download "$Path.*" --debug --debug-http - Join-File -Path "$(Resolve-Path .)\$Path" -Verbose -DeletePartFiles - } else { - & buildkite-agent artifact download "$Path" --debug --debug-http - } - if (-not (Test-Path $Path)) { - Write-Error "Could not find artifact: $Path" - exit 1 - } -} - -Assert-Buildkite-Agent -if ($Split) { - Assert-Join-File -} - -foreach ($Path in $Paths) { - Download-Buildkite-Artifact $Path -} diff --git a/.buildkite/scripts/download-artifact.sh b/.buildkite/scripts/download-artifact.sh deleted file mode 100755 index 5907561853..0000000000 --- a/.buildkite/scripts/download-artifact.sh +++ /dev/null @@ -1,46 +0,0 @@ -#!/bin/bash - -set -eo pipefail - -function assert_buildkite_agent() { - if ! command -v buildkite-agent &> /dev/null; then - echo "error: Cannot find buildkite-agent, please install it:" - echo "https://buildkite.com/docs/agent/v3/install" - exit 1 - fi -} - -function download_buildkite_artifact() { - local path="$1"; shift - local split="0" - local args=() - while true; do - if [ -z "$1" ]; then - break - fi - case "$1" in - --split) split="1"; shift ;; - *) args+=("$1"); shift ;; - esac - done - if [ "$split" == "1" ]; then - run_command buildkite-agent artifact download "$path.*" . "${args[@]}" - run_command cat $path.?? > "$path" - run_command rm -f $path.?? - else - run_command buildkite-agent artifact download "$path" . "${args[@]}" - fi - if [[ "$path" != *"*"* ]] && [ ! -f "$path" ]; then - echo "error: Could not find artifact: $path" - exit 1 - fi -} - -function run_command() { - set -x - "$@" - { set +x; } 2>/dev/null -} - -assert_buildkite_agent -download_buildkite_artifact "$@" diff --git a/.buildkite/scripts/env.sh b/.buildkite/scripts/env.sh deleted file mode 100755 index fcea58e949..0000000000 --- a/.buildkite/scripts/env.sh +++ /dev/null @@ -1,120 +0,0 @@ -#!/bin/bash - -set -eo pipefail - -function assert_os() { - local os="$(uname -s)" - case "$os" in - Linux) - echo "linux" ;; - Darwin) - echo "darwin" ;; - *) - echo "error: Unsupported operating system: $os" 1>&2 - exit 1 - ;; - esac -} - -function assert_arch() { - local arch="$(uname -m)" - case "$arch" in - aarch64 | arm64) - echo "aarch64" ;; - x86_64 | amd64) - echo "x64" ;; - *) - echo "error: Unknown architecture: $arch" 1>&2 - exit 1 - ;; - esac -} - -function assert_build() { - if [ -z "$BUILDKITE_REPO" ]; then - echo "error: Cannot find repository for this build" - exit 1 - fi - if [ -z "$BUILDKITE_COMMIT" ]; then - echo "error: Cannot find commit for this build" - exit 1 - fi - if [ -z "$BUILDKITE_STEP_KEY" ]; then - echo "error: Cannot find step key for this build" - exit 1 - fi - if [ -n "$BUILDKITE_GROUP_KEY" ] && [[ "$BUILDKITE_STEP_KEY" != "$BUILDKITE_GROUP_KEY"* ]]; then - echo "error: Build step '$BUILDKITE_STEP_KEY' does not start with group key '$BUILDKITE_GROUP_KEY'" - exit 1 - fi - # Skip os and arch checks for Zig, since it's cross-compiled on macOS - if [[ "$BUILDKITE_STEP_KEY" != *"zig"* ]]; then - local os="$(assert_os)" - if [[ "$BUILDKITE_STEP_KEY" != *"$os"* ]]; then - echo "error: Build step '$BUILDKITE_STEP_KEY' does not match operating system '$os'" - exit 1 - fi - local arch="$(assert_arch)" - if [[ "$BUILDKITE_STEP_KEY" != *"$arch"* ]]; then - echo "error: Build step '$BUILDKITE_STEP_KEY' does not match architecture '$arch'" - exit 1 - fi - fi -} - -function assert_buildkite_agent() { - if ! command -v buildkite-agent &> /dev/null; then - echo "error: Cannot find buildkite-agent, please install it:" - echo "https://buildkite.com/docs/agent/v3/install" - exit 1 - fi -} - -function export_environment() { - source "$(realpath $(dirname "$0")/../../scripts/env.sh)" - source "$(realpath $(dirname "$0")/../../scripts/update-submodules.sh)" - { set +x; } 2>/dev/null - export GIT_SHA="$BUILDKITE_COMMIT" - export CCACHE_DIR="$HOME/.cache/ccache/$BUILDKITE_STEP_KEY" - export SCCACHE_DIR="$HOME/.cache/sccache/$BUILDKITE_STEP_KEY" - export ZIG_LOCAL_CACHE_DIR="$HOME/.cache/zig-cache/$BUILDKITE_STEP_KEY" - export ZIG_GLOBAL_CACHE_DIR="$HOME/.cache/zig-cache/$BUILDKITE_STEP_KEY" - export BUN_DEPS_CACHE_DIR="$HOME/.cache/bun-deps/$BUILDKITE_STEP_KEY" - if [ "$(assert_os)" == "linux" ]; then - export USE_LTO="ON" - fi - if [ "$(assert_arch)" == "aarch64" ]; then - export CPU_TARGET="native" - elif [[ "$BUILDKITE_STEP_KEY" == *"baseline"* ]]; then - export CPU_TARGET="nehalem" - else - export CPU_TARGET="haswell" - fi - if $(buildkite-agent meta-data exists release &> /dev/null); then - export CMAKE_BUILD_TYPE="$(buildkite-agent meta-data get release)" - else - export CMAKE_BUILD_TYPE="Release" - fi - if $(buildkite-agent meta-data exists canary &> /dev/null); then - export CANARY="$(buildkite-agent meta-data get canary)" - else - export CANARY="1" - fi - if $(buildkite-agent meta-data exists assertions &> /dev/null); then - export USE_DEBUG_JSC="$(buildkite-agent meta-data get assertions)" - else - export USE_DEBUG_JSC="OFF" - fi - if [ "$BUILDKITE_CLEAN_CHECKOUT" == "true" || "$BUILDKITE_BRANCH" == "main" ]; then - rm -rf "$CCACHE_DIR" - rm -rf "$SCCACHE_DIR" - rm -rf "$ZIG_LOCAL_CACHE_DIR" - rm -rf "$ZIG_GLOBAL_CACHE_DIR" - rm -rf "$BUN_DEPS_CACHE_DIR" - export CCACHE_RECACHE="1" - fi -} - -assert_build -assert_buildkite_agent -export_environment diff --git a/.buildkite/scripts/prepare-build.sh b/.buildkite/scripts/prepare-build.sh index 1c245d9618..a76370fd7c 100755 --- a/.buildkite/scripts/prepare-build.sh +++ b/.buildkite/scripts/prepare-build.sh @@ -2,96 +2,10 @@ set -eo pipefail -function assert_build() { - if [ -z "$BUILDKITE_REPO" ]; then - echo "error: Cannot find repository for this build" - exit 1 - fi - if [ -z "$BUILDKITE_COMMIT" ]; then - echo "error: Cannot find commit for this build" - exit 1 - fi -} - -function assert_buildkite_agent() { - if ! command -v buildkite-agent &> /dev/null; then - echo "error: Cannot find buildkite-agent, please install it:" - echo "https://buildkite.com/docs/agent/v3/install" - exit 1 - fi -} - -function assert_jq() { - assert_command "jq" "jq" "https://stedolan.github.io/jq/" -} - -function assert_curl() { - assert_command "curl" "curl" "https://curl.se/download.html" -} - -function assert_command() { - local command="$1" - local package="$2" - local help_url="$3" - if ! command -v "$command" &> /dev/null; then - echo "warning: $command is not installed, installing..." - if command -v brew &> /dev/null; then - HOMEBREW_NO_AUTO_UPDATE=1 brew install "$package" - else - echo "error: Cannot install $command, please install it" - if [ -n "$help_url" ]; then - echo "" - echo "hint: See $help_url for help" - fi - exit 1 - fi - fi -} - -function assert_release() { - if [ "$RELEASE" == "1" ]; then - run_command buildkite-agent meta-data set canary "0" - fi -} - -function assert_canary() { - local canary="$(buildkite-agent meta-data get canary 2>/dev/null)" - if [ -z "$canary" ]; then - local repo=$(echo "$BUILDKITE_REPO" | sed -E 's#https://github.com/([^/]+)/([^/]+).git#\1/\2#g') - local tag="$(curl -sL "https://api.github.com/repos/$repo/releases/latest" | jq -r ".tag_name")" - if [ "$tag" == "null" ]; then - canary="1" - else - local revision=$(curl -sL "https://api.github.com/repos/$repo/compare/$tag...$BUILDKITE_COMMIT" | jq -r ".ahead_by") - if [ "$revision" == "null" ]; then - canary="1" - else - canary="$revision" - fi - fi - run_command buildkite-agent meta-data set canary "$canary" - fi -} - -function upload_buildkite_pipeline() { - local path="$1" - if [ ! -f "$path" ]; then - echo "error: Cannot find pipeline: $path" - exit 1 - fi - run_command buildkite-agent pipeline upload "$path" -} - function run_command() { set -x "$@" { set +x; } 2>/dev/null } -assert_build -assert_buildkite_agent -assert_jq -assert_curl -assert_release -assert_canary -upload_buildkite_pipeline ".buildkite/ci.yml" +run_command node ".buildkite/ci.mjs" diff --git a/.buildkite/scripts/upload-artifact.ps1 b/.buildkite/scripts/upload-artifact.ps1 deleted file mode 100755 index b7d79a410b..0000000000 --- a/.buildkite/scripts/upload-artifact.ps1 +++ /dev/null @@ -1,47 +0,0 @@ -param ( - [Parameter(Mandatory=$true)] - [string[]] $Paths, - [switch] $Split -) - -$ErrorActionPreference = "Stop" - -function Assert-Buildkite-Agent() { - if (-not (Get-Command "buildkite-agent" -ErrorAction SilentlyContinue)) { - Write-Error "Cannot find buildkite-agent, please install it: https://buildkite.com/docs/agent/v3/install" - exit 1 - } -} - -function Assert-Split-File() { - if (-not (Get-Command "Split-File" -ErrorAction SilentlyContinue)) { - Write-Error "Cannot find Split-File, please install it: https://www.powershellgallery.com/packages/FileSplitter/1.3" - exit 1 - } -} - -function Upload-Buildkite-Artifact() { - param ( - [Parameter(Mandatory=$true)] - [string] $Path, - ) - if (-not (Test-Path $Path)) { - Write-Error "Could not find artifact: $Path" - exit 1 - } - if ($Split) { - Remove-Item -Path "$Path.*" -Force - Split-File -Path (Resolve-Path $Path) -PartSizeBytes "50MB" -Verbose - $Path = "$Path.*" - } - & buildkite-agent artifact upload "$Path" --debug --debug-http -} - -Assert-Buildkite-Agent -if ($Split) { - Assert-Split-File -} - -foreach ($Path in $Paths) { - Upload-Buildkite-Artifact $Path -} diff --git a/.buildkite/scripts/upload-artifact.sh b/.buildkite/scripts/upload-artifact.sh deleted file mode 100755 index 0284a93c79..0000000000 --- a/.buildkite/scripts/upload-artifact.sh +++ /dev/null @@ -1,54 +0,0 @@ -#!/bin/bash - -set -eo pipefail - -function assert_buildkite_agent() { - if ! command -v buildkite-agent &> /dev/null; then - echo "error: Cannot find buildkite-agent, please install it:" - echo "https://buildkite.com/docs/agent/v3/install" - exit 1 - fi -} - -function assert_split() { - if ! command -v split &> /dev/null; then - echo "error: Cannot find split, please install it:" - echo "https://www.gnu.org/software/coreutils/split" - exit 1 - fi -} - -function upload_buildkite_artifact() { - local path="$1"; shift - local split="0" - local args=() - while true; do - if [ -z "$1" ]; then - break - fi - case "$1" in - --split) split="1"; shift ;; - *) args+=("$1"); shift ;; - esac - done - if [ ! -f "$path" ]; then - echo "error: Could not find artifact: $path" - exit 1 - fi - if [ "$split" == "1" ]; then - run_command rm -f "$path."* - run_command split -b 50MB -d "$path" "$path." - run_command buildkite-agent artifact upload "$path.*" "${args[@]}" - else - run_command buildkite-agent artifact upload "$path" "${args[@]}" - fi -} - -function run_command() { - set -x - "$@" - { set +x; } 2>/dev/null -} - -assert_buildkite_agent -upload_buildkite_artifact "$@" diff --git a/.buildkite/scripts/upload-release.sh b/.buildkite/scripts/upload-release.sh index c3328cb9c2..5a69f89861 100755 --- a/.buildkite/scripts/upload-release.sh +++ b/.buildkite/scripts/upload-release.sh @@ -3,6 +3,10 @@ set -eo pipefail function assert_main() { + if [ "$RELEASE" == "1" ]; then + echo "info: Skipping canary release because this is a release build" + exit 0 + fi if [ -z "$BUILDKITE_REPO" ]; then echo "error: Cannot find repository for this build" exit 1 @@ -158,6 +162,27 @@ function upload_s3_file() { run_command aws --endpoint-url="$AWS_ENDPOINT" s3 cp "$file" "s3://$AWS_BUCKET/$folder/$file" } +function send_bench_webhook() { + if [ -z "$BENCHMARK_URL" ]; then + echo "error: \$BENCHMARK_URL is not set" + # exit 1 # TODO: this isn't live yet + return + fi + + local tag="$1" + local commit="$BUILDKITE_COMMIT" + local artifact_path="${commit}" + + if [ "$tag" == "canary" ]; then + artifact_path="${commit}-canary" + fi + + local artifact_url="https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/$artifact_path/bun-linux-x64.zip" + local webhook_url="$BENCHMARK_URL?tag=$tag&commit=$commit&artifact_url=$artifact_url" + + curl -X POST "$webhook_url" +} + function create_release() { assert_main assert_buildkite_agent @@ -186,7 +211,11 @@ function create_release() { function upload_artifact() { local artifact="$1" download_buildkite_artifact "$artifact" - upload_s3_file "releases/$BUILDKITE_COMMIT" "$artifact" & + if [ "$tag" == "canary" ]; then + upload_s3_file "releases/$BUILDKITE_COMMIT-canary" "$artifact" & + else + upload_s3_file "releases/$BUILDKITE_COMMIT" "$artifact" & + fi upload_s3_file "releases/$tag" "$artifact" & upload_github_asset "$tag" "$artifact" & wait @@ -198,6 +227,7 @@ function create_release() { update_github_release "$tag" create_sentry_release "$tag" + send_bench_webhook "$tag" } function assert_canary() { diff --git a/.clang-tidy b/.clang-tidy new file mode 100644 index 0000000000..56bea1f588 --- /dev/null +++ b/.clang-tidy @@ -0,0 +1,9 @@ +WarningsAsErrors: "*" +FormatStyle: webkit +Checks: > + -*, + clang-analyzer-*, + -clang-analyzer-optin.core.EnumCastOutOfRange + -clang-analyzer-webkit.UncountedLambdaCapturesChecker + -clang-analyzer-optin.core.EnumCastOutOfRange + -clang-analyzer-webkit.RefCntblBaseVirtualDtor diff --git a/.clangd b/.clangd index 35856fb414..f736d521d0 100644 --- a/.clangd +++ b/.clangd @@ -1,3 +1,5 @@ Index: Background: Skip # Disable slow background indexing of these files. +CompileFlags: + CompilationDatabase: build/debug diff --git a/.docker/chrome.json b/.docker/chrome.json deleted file mode 100644 index 6bd45b6e04..0000000000 --- a/.docker/chrome.json +++ /dev/null @@ -1,1539 +0,0 @@ -{ - "defaultAction": "SCMP_ACT_ERRNO", - "syscalls": [ - { - "name": "accept", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "accept4", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "access", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "alarm", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "arch_prctl", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "bind", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "brk", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "capget", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "capset", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "chdir", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "chmod", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "chown", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "chown32", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "chroot", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "clock_getres", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "clock_gettime", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "clock_nanosleep", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "clone", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "close", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "connect", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "creat", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "dup", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "dup2", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "dup3", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "epoll_create", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "epoll_create1", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "epoll_ctl", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "epoll_ctl_old", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "epoll_pwait", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "epoll_wait", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "epoll_wait_old", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "eventfd", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "eventfd2", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "execve", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "execveat", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "exit", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "exit_group", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "faccessat", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "fadvise64", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "fadvise64_64", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "fallocate", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "fanotify_init", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "fanotify_mark", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "fchdir", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "fchmod", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "fchmodat", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "fchown", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "fchown32", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "fchownat", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "fcntl", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "fcntl64", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "fdatasync", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "fgetxattr", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "flistxattr", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "flock", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "fork", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "fremovexattr", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "fsetxattr", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "fstat", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "fstat64", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "fstatat64", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "fstatfs", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "fstatfs64", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "fsync", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "ftruncate", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "ftruncate64", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "futex", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "futimesat", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "getcpu", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "getcwd", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "getdents", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "getdents64", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "getegid", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "getegid32", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "geteuid", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "geteuid32", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "getgid", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "getgid32", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "getgroups", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "getgroups32", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "getitimer", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "getpeername", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "getpgid", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "getpgrp", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "getpid", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "getppid", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "getpriority", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "getrandom", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "getresgid", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "getresgid32", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "getresuid", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "getresuid32", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "getrlimit", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "get_robust_list", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "getrusage", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "getsid", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "getsockname", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "getsockopt", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "get_thread_area", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "gettid", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "gettimeofday", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "getuid", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "getuid32", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "getxattr", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "inotify_add_watch", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "inotify_init", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "inotify_init1", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "inotify_rm_watch", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "io_cancel", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "ioctl", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "io_destroy", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "io_getevents", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "ioprio_get", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "ioprio_set", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "io_setup", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "io_submit", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "kill", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "lchown", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "lchown32", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "lgetxattr", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "link", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "linkat", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "listen", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "listxattr", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "llistxattr", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "_llseek", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "lremovexattr", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "lseek", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "lsetxattr", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "lstat", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "lstat64", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "madvise", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "memfd_create", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "mincore", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "mkdir", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "mkdirat", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "mknod", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "mknodat", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "mlock", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "mlockall", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "mmap", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "mmap2", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "mprotect", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "mq_getsetattr", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "mq_notify", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "mq_open", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "mq_timedreceive", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "mq_timedsend", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "mq_unlink", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "mremap", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "msgctl", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "msgget", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "msgrcv", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "msgsnd", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "msync", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "munlock", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "munlockall", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "munmap", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "name_to_handle_at", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "nanosleep", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "newfstatat", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "_newselect", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "open", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "open_by_handle_at", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "openat", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "pause", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "pipe", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "pipe2", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "poll", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "ppoll", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "prctl", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "pread64", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "preadv", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "prlimit64", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "pselect6", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "pwrite64", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "pwritev", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "read", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "readahead", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "readlink", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "readlinkat", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "readv", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "recvfrom", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "recvmmsg", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "recvmsg", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "remap_file_pages", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "removexattr", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "rename", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "renameat", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "renameat2", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "rmdir", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "rt_sigaction", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "rt_sigpending", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "rt_sigprocmask", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "rt_sigqueueinfo", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "rt_sigreturn", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "rt_sigsuspend", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "rt_sigtimedwait", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "rt_tgsigqueueinfo", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "sched_getaffinity", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "sched_getattr", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "sched_getparam", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "sched_get_priority_max", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "sched_get_priority_min", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "sched_getscheduler", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "sched_rr_get_interval", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "sched_setaffinity", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "sched_setattr", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "sched_setparam", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "sched_setscheduler", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "sched_yield", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "seccomp", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "select", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "semctl", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "semget", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "semop", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "semtimedop", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "sendfile", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "sendfile64", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "sendmmsg", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "sendmsg", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "sendto", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "setdomainname", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "setfsgid", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "setfsgid32", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "setfsuid", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "setfsuid32", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "setgid", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "setgid32", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "setgroups", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "setgroups32", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "sethostname", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "setitimer", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "setns", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "setpgid", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "setpriority", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "setregid", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "setregid32", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "setresgid", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "setresgid32", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "setresuid", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "setresuid32", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "setreuid", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "setreuid32", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "setrlimit", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "set_robust_list", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "setsid", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "setsockopt", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "set_thread_area", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "set_tid_address", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "setuid", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "setuid32", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "setxattr", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "shmat", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "shmctl", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "shmdt", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "shmget", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "shutdown", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "sigaltstack", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "signalfd", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "signalfd4", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "socket", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "socketpair", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "splice", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "stat", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "stat64", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "statfs", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "statfs64", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "symlink", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "symlinkat", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "sync", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "sync_file_range", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "syncfs", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "sysinfo", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "syslog", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "tee", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "tgkill", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "time", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "timer_create", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "timer_delete", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "timerfd_create", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "timerfd_gettime", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "timerfd_settime", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "timer_getoverrun", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "timer_gettime", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "timer_settime", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "times", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "tkill", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "truncate", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "truncate64", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "ugetrlimit", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "umask", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "uname", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "unlink", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "unlinkat", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "unshare", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "utime", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "utimensat", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "utimes", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "vfork", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "vhangup", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "vmsplice", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "wait4", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "waitid", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "write", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { - "name": "writev", - "action": "SCMP_ACT_ALLOW", - "args": null - }, - { "name": "io_uring_setup", "action": "SCMP_ACT_ALLOW", "args": null }, - { "name": "io_uring_enter", "action": "SCMP_ACT_ALLOW", "args": null }, - { "name": "io_uring_register", "action": "SCMP_ACT_ALLOW", "args": null }, - { "name": "copy_file_range", "action": "SCMP_ACT_ALLOW", "args": null } - ] -} diff --git a/.docker/chromium.pref b/.docker/chromium.pref deleted file mode 100644 index fc8e464bd1..0000000000 --- a/.docker/chromium.pref +++ /dev/null @@ -1,14 +0,0 @@ -# Note: 2 blank lines are required between entries - Package: * - Pin: release a=eoan - Pin-Priority: 500 - - Package: * - Pin: origin "ftp.debian.org" - Pin-Priority: 300 - - # Pattern includes 'chromium', 'chromium-browser' and similarly - # named dependencies: - Package: chromium* - Pin: origin "ftp.debian.org" - Pin-Priority: 700 \ No newline at end of file diff --git a/.docker/copy-bun-binary.sh b/.docker/copy-bun-binary.sh deleted file mode 100644 index 5fce2ac5b8..0000000000 --- a/.docker/copy-bun-binary.sh +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/bash - -set -euxo pipefail - -name=$(openssl rand -hex 12) -id=$(docker create --name=bun-binary-$name $CONTAINER_TAG) -docker container cp bun-binary-$name:$BUN_RELEASE_DIR bun-binary -echo -e "bun-binary-$name" diff --git a/.docker/debian.list b/.docker/debian.list deleted file mode 100644 index 4954b36f30..0000000000 --- a/.docker/debian.list +++ /dev/null @@ -1,3 +0,0 @@ - deb http://deb.debian.org/debian buster main - deb http://deb.debian.org/debian buster-updates main - deb http://deb.debian.org/debian-security buster/updates main \ No newline at end of file diff --git a/.docker/dockerfile-common.sh b/.docker/dockerfile-common.sh deleted file mode 100644 index c9c1a4efa3..0000000000 --- a/.docker/dockerfile-common.sh +++ /dev/null @@ -1,34 +0,0 @@ -export DOCKER_BUILDKIT=1 - -export BUILDKIT_ARCH=$(uname -m) -export ARCH=${BUILDKIT_ARCH} - -if [ "$BUILDKIT_ARCH" == "amd64" ]; then - export BUILDKIT_ARCH="amd64" - export ARCH=x64 -fi - -if [ "$BUILDKIT_ARCH" == "x86_64" ]; then - export BUILDKIT_ARCH="amd64" - export ARCH=x64 -fi - -if [ "$BUILDKIT_ARCH" == "arm64" ]; then - export BUILDKIT_ARCH="arm64" - export ARCH=aarch64 -fi - -if [ "$BUILDKIT_ARCH" == "aarch64" ]; then - export BUILDKIT_ARCH="arm64" - export ARCH=aarch64 -fi - -if [ "$BUILDKIT_ARCH" == "armv7l" ]; then - echo "Unsupported platform: $BUILDKIT_ARCH" - exit 1 -fi - -export BUILD_ID=$(cat build-id) -export CONTAINER_NAME=bun-linux-$ARCH -export DEBUG_CONTAINER_NAME=debug-bun-linux-$ARCH -export TEMP=/tmp/bun-0.0.$BUILD_ID diff --git a/.docker/pull.sh b/.docker/pull.sh deleted file mode 100644 index 96c6922514..0000000000 --- a/.docker/pull.sh +++ /dev/null @@ -1,11 +0,0 @@ -#!/bin/bash - -set -euxo pipefail - -docker pull bunbunbunbun/bun-test-base:latest --platform=linux/amd64 -docker pull bunbunbunbun/bun-base:latest --platform=linux/amd64 -docker pull bunbunbunbun/bun-base-with-zig-and-webkit:latest --platform=linux/amd64 - -docker tag bunbunbunbun/bun-test-base:latest bun-base:latest -docker tag bunbunbunbun/bun-base:latest bun-base:latest -docker tag bunbunbunbun/bun-base-with-zig-and-webkit:latest bun-base-with-zig-and-webkit:latest diff --git a/.docker/run-dockerfile.sh b/.docker/run-dockerfile.sh deleted file mode 100644 index df22cd2b61..0000000000 --- a/.docker/run-dockerfile.sh +++ /dev/null @@ -1,47 +0,0 @@ -#!/bin/bash - -source "dockerfile-common.sh" - -export $CONTAINER_NAME=$CONTAINER_NAME-local - -rm -rf $TEMP -mkdir -p $TEMP - -docker build . --target release --progress=plain -t $CONTAINER_NAME:latest --build-arg BUILDKIT_INLINE_CACHE=1 --platform=linux/$BUILDKIT_ARCH --cache-from $CONTAINER_NAME:latest - -if (($?)); then - echo "Failed to build container" - exit 1 -fi - -id=$(docker create $CONTAINER_NAME:latest) -docker cp $id:/home/ubuntu/bun-release $TEMP/$CONTAINER_NAME -if (($?)); then - echo "Failed to cp container" - exit 1 -fi - -cd $TEMP -mkdir -p $TEMP/$CONTAINER_NAME $TEMP/$DEBUG_CONTAINER_NAME -mv $CONTAINER_NAME/bun-profile $DEBUG_CONTAINER_NAME/bun -zip -r $CONTAINER_NAME.zip $CONTAINER_NAME -zip -r $DEBUG_CONTAINER_NAME.zip $DEBUG_CONTAINER_NAME -docker rm -v $id -abs=$(realpath $TEMP/$CONTAINER_NAME.zip) -debug_abs=$(realpath $TEMP/$DEBUG_CONTAINER_NAME.zip) - -case $(uname -s) in -"Linux") target="linux" ;; -*) target="other" ;; -esac - -if [ "$target" = "linux" ]; then - if command -v bun --version >/dev/null; then - cp $TEMP/$CONTAINER_NAME/bun $(which bun) - cp $TEMP/$DEBUG_CONTAINER_NAME/bun $(which bun-profile) - fi -fi - -echo "Saved to:" -echo $debug_abs -echo $abs diff --git a/.docker/run-test.sh b/.docker/run-test.sh deleted file mode 100755 index c088adf5ae..0000000000 --- a/.docker/run-test.sh +++ /dev/null @@ -1,9 +0,0 @@ -#!/bin/bash - -set -euxo pipefail - -bun install -bun install --cwd ./test/snippets -bun install --cwd ./test/scripts - -make $BUN_TEST_NAME diff --git a/.docker/runner.sh b/.docker/runner.sh deleted file mode 100644 index 837ff85663..0000000000 --- a/.docker/runner.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/bash - -set -euxo pipefail - -docker container run --security-opt seccomp=.docker/chrome.json --env GITHUB_WORKSPACE=$GITHUB_WORKSPACE --env BUN_TEST_NAME=$BUN_TEST_NAME --ulimit memlock=-1:-1 --init --rm bun-test:latest diff --git a/.docker/unit-tests.sh b/.docker/unit-tests.sh deleted file mode 100644 index 2917a5ef0d..0000000000 --- a/.docker/unit-tests.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/bash - -set -euxo pipefail - -docker container run --security-opt seccomp=.docker/chrome.json --env GITHUB_WORKSPACE=$GITHUB_WORKSPACE --ulimit memlock=-1:-1 --init --rm bun-unit-tests:latest diff --git a/.dockerignore b/.dockerignore index 239d9da881..6a0ae98134 100644 --- a/.dockerignore +++ b/.dockerignore @@ -11,5 +11,8 @@ packages/**/bun-profile src/bun.js/WebKit src/bun.js/WebKit/LayoutTests zig-build -zig-cache -zig-out \ No newline at end of file +.zig-cache +zig-out +build +vendor +node_modules diff --git a/.gitattributes b/.gitattributes index 6c3caa3fe5..1b3908f258 100644 --- a/.gitattributes +++ b/.gitattributes @@ -7,6 +7,7 @@ *.cpp text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2 *.cc text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2 *.yml text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2 +*.toml text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2 *.zig text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2 *.rs text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2 *.h text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2 @@ -43,5 +44,10 @@ test/**/* linguist-documentation bench/**/* linguist-documentation examples/**/* linguist-documentation -src/deps/*.c linguist-vendored -src/deps/brotli/** linguist-vendored +vendor/*.c linguist-vendored +vendor/brotli/** linguist-vendored + +test/js/node/test/fixtures linguist-vendored +test/js/node/test/common linguist-vendored + +test/js/bun/css/files linguist-vendored diff --git a/.github/ISSUE_TEMPLATE/3-typescript-bug-report.yml b/.github/ISSUE_TEMPLATE/3-typescript-bug-report.yml index 7b745a4aef..3913e25272 100644 --- a/.github/ISSUE_TEMPLATE/3-typescript-bug-report.yml +++ b/.github/ISSUE_TEMPLATE/3-typescript-bug-report.yml @@ -1,6 +1,6 @@ name: 🇹 TypeScript Type Bug Report description: Report an issue with TypeScript types -labels: [bug, typescript] +labels: [bug, types] body: - type: markdown attributes: diff --git a/.github/ISSUE_TEMPLATE/7-install-crash-report.yml b/.github/ISSUE_TEMPLATE/7-install-crash-report.yml index 9239188ca0..e88397b393 100644 --- a/.github/ISSUE_TEMPLATE/7-install-crash-report.yml +++ b/.github/ISSUE_TEMPLATE/7-install-crash-report.yml @@ -2,11 +2,18 @@ name: bun install crash report description: Report a crash in bun install labels: - npm + - crash body: - type: markdown attributes: value: | **Thank you so much** for submitting a crash report. You're helping us make Bun more reliable for everyone! + - type: textarea + id: package_json + attributes: + label: "`package.json` file" + description: "Can you upload your `package.json` file? This helps us reproduce the crash." + render: json - type: textarea id: repro attributes: diff --git a/.github/actions/setup-bun/action.yml b/.github/actions/setup-bun/action.yml index 9d04e28cc4..0e6e6103c8 100644 --- a/.github/actions/setup-bun/action.yml +++ b/.github/actions/setup-bun/action.yml @@ -42,9 +42,10 @@ runs: canary) release="canary";; *) release="bun-v${{ inputs.bun-version }}";; esac - curl -LO "${{ inputs.download-url }}/${release}/${target}.zip" + curl -LO "${{ inputs.download-url }}/${release}/${target}.zip" --retry 5 unzip ${target}.zip mkdir -p ${{ runner.temp }}/.bun/bin mv ${target}/bun* ${{ runner.temp }}/.bun/bin/ chmod +x ${{ runner.temp }}/.bun/bin/* + ln -fs ${{ runner.temp }}/.bun/bin/bun ${{ runner.temp }}/.bun/bin/bunx echo "${{ runner.temp }}/.bun/bin" >> ${GITHUB_PATH} diff --git a/.github/workflows/clang-format.yml b/.github/workflows/clang-format.yml new file mode 100644 index 0000000000..bb2cca1880 --- /dev/null +++ b/.github/workflows/clang-format.yml @@ -0,0 +1,41 @@ +name: clang-format + +permissions: + contents: write + +on: + workflow_call: + workflow_dispatch: + pull_request: + merge_group: + +env: + BUN_VERSION: "1.1.27" + LLVM_VERSION: "18.1.8" + LLVM_VERSION_MAJOR: "18" + +jobs: + clang-format: + name: clang-format + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: Setup Bun + uses: ./.github/actions/setup-bun + with: + bun-version: ${{ env.BUN_VERSION }} + - name: Install LLVM + run: | + curl -fsSL https://apt.llvm.org/llvm.sh | sudo bash -s -- ${{ env.LLVM_VERSION_MAJOR }} all + - name: Clang Format + env: + LLVM_VERSION: ${{ env.LLVM_VERSION }} + run: | + bun run clang-format + - name: Commit + uses: stefanzweifel/git-auto-commit-action@v5 + with: + commit_message: "`bun run clang-format`" diff --git a/.github/workflows/clang-tidy.yml b/.github/workflows/clang-tidy.yml new file mode 100644 index 0000000000..a6f06ad620 --- /dev/null +++ b/.github/workflows/clang-tidy.yml @@ -0,0 +1,41 @@ +name: clang-tidy + +permissions: + contents: write + +on: + workflow_call: + workflow_dispatch: + pull_request: + merge_group: + +env: + BUN_VERSION: "1.1.27" + LLVM_VERSION: "18.1.8" + LLVM_VERSION_MAJOR: "18" + +jobs: + clang-tidy: + name: clang-tidy + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: Setup Bun + uses: ./.github/actions/setup-bun + with: + bun-version: ${{ env.BUN_VERSION }} + - name: Install LLVM + run: | + curl -fsSL https://apt.llvm.org/llvm.sh | sudo bash -s -- ${{ env.LLVM_VERSION_MAJOR }} all + - name: Clang Tidy + env: + LLVM_VERSION: ${{ env.LLVM_VERSION }} + run: | + bun run clang-tidy:diff + - name: Commit + uses: stefanzweifel/git-auto-commit-action@v5 + with: + commit_message: "`bun run clang-tidy`" diff --git a/.github/workflows/labeled.yml b/.github/workflows/labeled.yml index 961ae7926f..3529f724b4 100644 --- a/.github/workflows/labeled.yml +++ b/.github/workflows/labeled.yml @@ -7,6 +7,42 @@ on: types: [labeled] jobs: + # on-bug: + # runs-on: ubuntu-latest + # if: github.event.label.name == 'bug' || github.event.label.name == 'crash' + # permissions: + # issues: write + # steps: + # - name: Checkout + # uses: actions/checkout@v4 + # with: + # sparse-checkout: | + # scripts + # .github + # CMakeLists.txt + # - name: Setup Bun + # uses: ./.github/actions/setup-bun + # with: + # bun-version: "1.1.24" + # - name: "categorize bug" + # id: add-labels + # env: + # GITHUB_ISSUE_BODY: ${{ github.event.issue.body }} + # GITHUB_ISSUE_TITLE: ${{ github.event.issue.title }} + # ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} + # shell: bash + # run: | + # echo '{"dependencies": { "@anthropic-ai/sdk": "latest" }}' > scripts/package.json && bun install --cwd=./scripts + # LABELS=$(bun scripts/label-issue.ts) + # echo "labels=$LABELS" >> $GITHUB_OUTPUT + # - name: Add labels + # uses: actions-cool/issues-helper@v3 + # if: steps.add-labels.outputs.labels != '' + # with: + # actions: "add-labels" + # token: ${{ secrets.GITHUB_TOKEN }} + # issue-number: ${{ github.event.issue.number }} + # labels: ${{ steps.add-labels.outputs.labels }} on-labeled: runs-on: ubuntu-latest if: github.event.label.name == 'crash' || github.event.label.name == 'needs repro' @@ -47,6 +83,26 @@ jobs: echo "latest=$(cat LATEST)" >> $GITHUB_OUTPUT rm -rf is-outdated.txt outdated.txt latest.txt + - name: Generate comment text with Sentry Link + if: github.event.label.name == 'crash' + # ignore if fail + continue-on-error: true + id: generate-comment-text + env: + GITHUB_ISSUE_BODY: ${{ github.event.issue.body }} + SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_EVENTS_SECRET }} + shell: bash + run: | + bun scripts/associate-issue-with-sentry.ts + + if [[ -f "sentry-link.txt" ]]; then + echo "sentry-link=$(cat sentry-link.txt)" >> $GITHUB_OUTPUT + fi + + if [[ -f "sentry-id.txt" ]]; then + echo "sentry-id=$(cat sentry-id.txt)" >> $GITHUB_OUTPUT + fi + - name: Add labels uses: actions-cool/issues-helper@v3 if: github.event.label.name == 'crash' @@ -56,7 +112,7 @@ jobs: issue-number: ${{ github.event.issue.number }} labels: ${{ steps.add-labels.outputs.labels }} - name: Comment outdated - if: steps.add-labels.outputs.is-outdated == 'true' && github.event.label.name == 'crash' + if: steps.add-labels.outputs.is-outdated == 'true' && github.event.label.name == 'crash' && steps.generate-comment-text.outputs.sentry-link == '' uses: actions-cool/issues-helper@v3 with: actions: "create-comment" @@ -70,6 +126,40 @@ jobs: ```sh bun upgrade ``` + - name: Comment with Sentry Link and outdated version + if: steps.generate-comment-text.outputs.sentry-link != '' && github.event.label.name == 'crash' && steps.add-labels.outputs.is-outdated == 'true' + uses: actions-cool/issues-helper@v3 + with: + actions: "create-comment" + token: ${{ secrets.GITHUB_TOKEN }} + issue-number: ${{ github.event.issue.number }} + body: | + @${{ github.event.issue.user.login }}, thank you for reporting this crash. The latest version of Bun is v${{ steps.add-labels.outputs.latest }}, but this crash was reported on Bun v${{ steps.add-labels.outputs.oudated }}. + + Are you able to reproduce this crash on the latest version of Bun? + + ```sh + bun upgrade + ``` + + For Bun's internal tracking, this issue is [${{ steps.generate-comment-text.outputs.sentry-id }}](${{ steps.generate-comment-text.outputs.sentry-link }}). + + + + - name: Comment with Sentry Link + if: steps.generate-comment-text.outputs.sentry-link != '' && github.event.label.name == 'crash' && steps.add-labels.outputs.is-outdated != 'true' + uses: actions-cool/issues-helper@v3 + with: + actions: "create-comment" + token: ${{ secrets.GITHUB_TOKEN }} + issue-number: ${{ github.event.issue.number }} + body: | + Thank you for reporting this crash. + + For Bun's internal tracking, this issue is [${{ steps.generate-comment-text.outputs.sentry-id }}](${{ steps.generate-comment-text.outputs.sentry-link }}). + + + - name: Comment needs repro if: github.event.label.name == 'needs repro' uses: actions-cool/issues-helper@v3 @@ -78,4 +168,4 @@ jobs: token: ${{ secrets.GITHUB_TOKEN }} issue-number: ${{ github.event.issue.number }} body: | - Hello @${{ github.event.issue.user.login }}. Please provide a [minimal reproduction](https://stackoverflow.com/help/minimal-reproducible-example) using a GitHub repository, [Replit](https://replit.com/@replit/Bun), or [CodeSandbox](https://codesandbox.io/templates/bun). Issues marked with `needs repro` will be closed if they have no activity within 3 days. + Hello @${{ github.event.issue.user.login }}. Please provide a [minimal reproduction](https://stackoverflow.com/help/minimal-reproducible-example) using a GitHub repository, [Replit](https://replit.com/@replit/Bun), [CodeSandbox](https://codesandbox.io/templates/bun), or provide a bulleted list of commands to run that reproduce this issue. Issues marked with `needs repro` will be closed if they have no activity within 3 days. diff --git a/.github/workflows/lint-cpp.yml b/.github/workflows/lint-cpp.yml deleted file mode 100644 index 6dffbf0599..0000000000 --- a/.github/workflows/lint-cpp.yml +++ /dev/null @@ -1,30 +0,0 @@ -name: lint-cpp - -permissions: - contents: read - -concurrency: - group: ${{ github.workflow }}-${{ github.event_name == 'workflow_dispatch' && inputs.run-id || github.ref }} - cancel-in-progress: true - -on: - workflow_dispatch: - inputs: - run-id: - type: string - description: The workflow ID to download artifacts (skips the build step) - pull_request: - paths: - - ".github/workflows/lint-cpp.yml" - - "**/*.cpp" - - "src/deps/**/*" - - "CMakeLists.txt" - -jobs: - lint-cpp: - if: ${{ !inputs.run-id }} - name: Lint C++ - uses: ./.github/workflows/run-lint-cpp.yml - secrets: inherit - with: - pr-number: ${{ github.event.number }} diff --git a/.github/workflows/prettier-format.yml b/.github/workflows/prettier-format.yml new file mode 100644 index 0000000000..43a407443e --- /dev/null +++ b/.github/workflows/prettier-format.yml @@ -0,0 +1,37 @@ +name: prettier-format + +permissions: + contents: write + +on: + workflow_call: + workflow_dispatch: + pull_request: + merge_group: + +env: + BUN_VERSION: "1.1.27" + +jobs: + prettier-format: + name: prettier-format + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: Setup Bun + uses: ./.github/actions/setup-bun + with: + bun-version: ${{ env.BUN_VERSION }} + - name: Setup Dependencies + run: | + bun install + - name: Prettier Format + run: | + bun run prettier:diff + - name: Commit + uses: stefanzweifel/git-auto-commit-action@v5 + with: + commit_message: "`bun run prettier:extra`" diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index cbe6b3e93a..ab0bf70103 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -88,6 +88,9 @@ jobs: steps: - name: Checkout uses: actions/checkout@v4 + with: + # To workaround issue + ref: main - name: Setup Bun uses: ./.github/actions/setup-bun with: diff --git a/.github/workflows/run-format.yml b/.github/workflows/run-format.yml deleted file mode 100644 index 4d03ce22e8..0000000000 --- a/.github/workflows/run-format.yml +++ /dev/null @@ -1,52 +0,0 @@ -name: Format - -permissions: - contents: write - -on: - workflow_call: - inputs: - zig-version: - type: string - required: true - -jobs: - format: - name: Format - runs-on: ubuntu-latest - if: ${{ github.ref != 'refs/heads/main' }} - steps: - - name: Checkout - uses: actions/checkout@v4 - with: - sparse-checkout: | - .github - src - scripts - packages - test - bench - - name: Setup Bun - uses: ./.github/actions/setup-bun - with: - bun-version: "1.1.20" - - name: Setup Zig - uses: mlugg/setup-zig@v1 - with: - version: ${{ inputs.zig-version }} - - name: Install Dependencies - run: | - bun install - - name: Format - run: | - bun fmt - - name: Format Zig - run: | - bun fmt:zig - - name: Generate submodule versions - run: | - bash ./scripts/write-versions.sh - - name: Commit - uses: stefanzweifel/git-auto-commit-action@v5 - with: - commit_message: Apply formatting changes diff --git a/.github/workflows/run-lint-cpp.yml b/.github/workflows/run-lint-cpp.yml deleted file mode 100644 index 3c394a46a3..0000000000 --- a/.github/workflows/run-lint-cpp.yml +++ /dev/null @@ -1,84 +0,0 @@ -name: lint-cpp - -permissions: - contents: read -env: - LLVM_VERSION: 18 - LC_CTYPE: "en_US.UTF-8" - LC_ALL: "en_US.UTF-8" - -on: - workflow_call: - inputs: - pr-number: - required: true - type: number - -jobs: - lint-cpp: - name: Lint C++ - runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-13-xlarge' || 'macos-13' }} - steps: - - name: Checkout - uses: actions/checkout@v4 - with: - submodules: recursive - - name: Setup Bun - uses: ./.github/actions/setup-bun - with: - bun-version: latest - - name: Install Dependencies - env: - HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1 - HOMEBREW_NO_AUTO_UPDATE: 1 - HOMEBREW_NO_INSTALL_CLEANUP: 1 - run: | - brew install \ - llvm@${{ env.LLVM_VERSION }} \ - ninja \ - coreutils \ - openssl@1.1 \ - libiconv \ - gnu-sed --force --overwrite - echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH - echo "$(brew --prefix llvm@$LLVM_VERSION)/bin" >> $GITHUB_PATH - brew link --overwrite llvm@$LLVM_VERSION - - name: Bun install - run: | - bun install - - name: clang-tidy - id: format - env: - CPU_TARGET: native - BUN_SILENT: 1 - run: | - rm -f did_fail format.log - echo "${{ inputs.pr-number }}" > pr-number.txt - echo "pr_number=$(cat pr-number.txt)" >> $GITHUB_OUTPUT - bun run --silent build:tidy &> >(tee -p format.log) && echo 0 > did_succeed.txt - # Upload format.log as github artifact for the workflow - if [ -f did_succeed.txt ]; then - echo "0" > did_fail.txt - else - echo "1" > did_fail.txt - fi - echo "did_fail=$(cat did_fail.txt)" >> $GITHUB_OUTPUT - - - name: Upload format.log - uses: actions/upload-artifact@v2 - with: - name: format.log - path: format.log - - name: Upload PR - uses: actions/upload-artifact@v2 - with: - name: pr-number.txt - path: pr-number.txt - - name: Upload PR - uses: actions/upload-artifact@v2 - with: - name: did_fail.txt - path: did_fail.txt - - name: Fail if formatting failed - if: ${{ steps.format.outputs.did_fail == '1' }} - run: exit 1 diff --git a/.github/workflows/stale.yaml b/.github/workflows/stale.yaml new file mode 100644 index 0000000000..a24bb2cd6b --- /dev/null +++ b/.github/workflows/stale.yaml @@ -0,0 +1,30 @@ +name: Close inactive issues +on: + # schedule: + # - cron: "15 * * * *" + workflow_dispatch: + +jobs: + close-issues: + runs-on: ubuntu-latest + permissions: + issues: write + pull-requests: write + steps: + - uses: actions/stale@v5 + with: + days-before-issue-close: 5 + any-of-issue-labels: "needs repro,waiting-for-author" + exempt-issue-labels: "neverstale" + exempt-pr-labels: "neverstale" + remove-stale-when-updated: true + stale-issue-label: "stale" + stale-pr-label: "stale" + stale-issue-message: "This issue is stale and may be closed due to inactivity. If you're still running into this, please leave a comment." + close-issue-message: "This issue was closed because it has been inactive for 5 days since being marked as stale." + days-before-pr-stale: 30 + days-before-pr-close: 14 + stale-pr-message: "This pull request is stale and may be closed due to inactivity." + close-pr-message: "This pull request has been closed due to inactivity." + repo-token: ${{ github.token }} + operations-per-run: 1000 diff --git a/.github/workflows/zig-format.yml b/.github/workflows/zig-format.yml new file mode 100644 index 0000000000..24d5577ad7 --- /dev/null +++ b/.github/workflows/zig-format.yml @@ -0,0 +1,34 @@ +name: zig-format + +permissions: + contents: write + +on: + workflow_call: + workflow_dispatch: + pull_request: + merge_group: + +env: + BUN_VERSION: "1.1.27" + +jobs: + zig-format: + name: zig-format + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: Setup Bun + uses: ./.github/actions/setup-bun + with: + bun-version: ${{ env.BUN_VERSION }} + - name: Zig Format + run: | + bun run zig-format:diff + - name: Commit + uses: stefanzweifel/git-auto-commit-action@v5 + with: + commit_message: "`bun run zig-format`" diff --git a/.gitignore b/.gitignore index a0c4419668..28ddadeb87 100644 --- a/.gitignore +++ b/.gitignore @@ -49,7 +49,6 @@ /build-*/ /bun-webkit /kcov-out -/src/deps/libuv /test-report.json /test-report.md /test.js @@ -117,11 +116,6 @@ sign.json src/bun.js/bindings-obj src/bun.js/bindings/GeneratedJS2Native.zig src/bun.js/debug-bindings-obj -src/deps/c-ares/build -src/deps/libiconv -src/deps/openssl -src/deps/PLCrashReporter/ -src/deps/s2n-tls src/deps/zig-clap/.gitattributes src/deps/zig-clap/.github src/deps/zig-clap/example @@ -146,3 +140,31 @@ zig-out test/node.js/upstream .zig-cache scripts/env.local +*.generated.ts +src/bake/generated.ts + +# Dependencies +/vendor + +# Dependencies (before CMake) +# These can be removed in the far future +/src/bun.js/WebKit +/src/deps/WebKit +/src/deps/boringssl +/src/deps/brotli +/src/deps/c*ares +/src/deps/lol*html +/src/deps/libarchive +/src/deps/libdeflate +/src/deps/libuv +/src/deps/ls*hpack +/src/deps/mimalloc +/src/deps/picohttpparser +/src/deps/tinycc +/src/deps/zstd +/src/deps/zlib +/src/deps/zig + +# Generated files + +.buildkite/ci.yml diff --git a/.gitmodules b/.gitmodules deleted file mode 100644 index c5069240a4..0000000000 --- a/.gitmodules +++ /dev/null @@ -1,88 +0,0 @@ -[submodule "src/javascript/jsc/WebKit"] -path = src/bun.js/WebKit -url = https://github.com/oven-sh/WebKit.git -ignore = dirty -depth = 1 -update = none -shallow = true -fetchRecurseSubmodules = false -[submodule "src/deps/picohttpparser"] -path = src/deps/picohttpparser -url = https://github.com/h2o/picohttpparser.git -ignore = dirty -depth = 1 -shallow = true -fetchRecurseSubmodules = false -[submodule "src/deps/mimalloc"] -path = src/deps/mimalloc -url = https://github.com/Jarred-Sumner/mimalloc.git -ignore = dirty -depth = 1 -shallow = true -fetchRecurseSubmodules = false -[submodule "src/deps/zlib"] -path = src/deps/zlib -url = https://github.com/cloudflare/zlib.git -ignore = dirty -depth = 1 -shallow = true -fetchRecurseSubmodules = false -[submodule "src/deps/libarchive"] -path = src/deps/libarchive -url = https://github.com/libarchive/libarchive.git -ignore = dirty -depth = 1 -shallow = true -fetchRecurseSubmodules = false -[submodule "src/deps/boringssl"] -path = src/deps/boringssl -url = https://github.com/oven-sh/boringssl.git -ignore = dirty -depth = 1 -shallow = true -fetchRecurseSubmodules = false -[submodule "src/deps/lol-html"] -path = src/deps/lol-html -url = https://github.com/cloudflare/lol-html -ignore = dirty -depth = 1 -shallow = true -fetchRecurseSubmodules = false -[submodule "src/deps/tinycc"] -path = src/deps/tinycc -url = https://github.com/Jarred-Sumner/tinycc.git -ignore = dirty -depth = 1 -shallow = true -fetchRecurseSubmodules = false -[submodule "src/deps/c-ares"] -path = src/deps/c-ares -url = https://github.com/c-ares/c-ares.git -ignore = dirty -depth = 1 -shallow = true -fetchRecurseSubmodules = false -[submodule "src/deps/zstd"] -path = src/deps/zstd -url = https://github.com/facebook/zstd.git -ignore = dirty -depth = 1 -shallow = true -fetchRecurseSubmodules = false -[submodule "src/deps/ls-hpack"] -path = src/deps/ls-hpack -url = https://github.com/litespeedtech/ls-hpack.git -ignore = dirty -depth = 1 -shallow = true -fetchRecurseSubmodules = false -[submodule "zig"] -path = src/deps/zig -url = https://github.com/oven-sh/zig -depth = 1 -shallow = true -fetchRecurseSubmodules = false -[submodule "src/deps/libdeflate"] -path = src/deps/libdeflate -url = https://github.com/ebiggers/libdeflate -ignore = "dirty" diff --git a/.lldbinit b/.lldbinit index 5a59503d5d..b54a4195c3 100644 --- a/.lldbinit +++ b/.lldbinit @@ -1,4 +1,4 @@ -command script import src/deps/zig/tools/lldb_pretty_printers.py -command script import src/bun.js/WebKit/Tools/lldb/lldb_webkit.py +command script import vendor/zig/tools/lldb_pretty_printers.py +command script import vendor/WebKit/Tools/lldb/lldb_webkit.py # type summary add --summary-string "${var} | inner=${var[0-30]}, source=${var[33-64]}, tag=${var[31-32]}" "unsigned long" diff --git a/.prettierignore b/.prettierignore index d7360d9d2f..da765c9c28 100644 --- a/.prettierignore +++ b/.prettierignore @@ -1,7 +1,10 @@ src/bun.js/WebKit -src/deps +vendor test/snapshots test/js/deno test/node.js src/react-refresh.js *.min.js +test/js/node/test/fixtures +test/js/node/test/common +test/snippets diff --git a/.vscode/c_cpp_properties.json b/.vscode/c_cpp_properties.json index 3bd648a1ff..8e3517a2e3 100644 --- a/.vscode/c_cpp_properties.json +++ b/.vscode/c_cpp_properties.json @@ -3,18 +3,18 @@ { "name": "Debug", "forcedInclude": ["${workspaceFolder}/src/bun.js/bindings/root.h"], - "compileCommands": "${workspaceFolder}/build/compile_commands.json", + "compileCommands": "${workspaceFolder}/build/debug/compile_commands.json", "includePath": [ "${workspaceFolder}/build/bun-webkit/include", - "${workspaceFolder}/build/codegen", + "${workspaceFolder}/build/debug/codegen", "${workspaceFolder}/src/bun.js/bindings/", "${workspaceFolder}/src/bun.js/bindings/webcore/", "${workspaceFolder}/src/bun.js/bindings/sqlite/", "${workspaceFolder}/src/bun.js/bindings/webcrypto/", "${workspaceFolder}/src/bun.js/modules/", "${workspaceFolder}/src/js/builtins/", - "${workspaceFolder}/src/deps/boringssl/include/", - "${workspaceFolder}/src/deps", + "${workspaceFolder}/vendor/boringssl/include/", + "${workspaceFolder}/vendor", "${workspaceFolder}/src/napi/*", "${workspaceFolder}/packages/bun-usockets/src", "${workspaceFolder}/packages/", @@ -26,8 +26,8 @@ "${workspaceFolder}/src/napi/*", "${workspaceFolder}/src/js/builtins/*", "${workspaceFolder}/src/bun.js/modules/*", - "${workspaceFolder}/src/deps/*", - "${workspaceFolder}/src/deps/boringssl/include/*", + "${workspaceFolder}/vendor/*", + "${workspaceFolder}/vendor/boringssl/include/*", "${workspaceFolder}/packages/bun-usockets/*", "${workspaceFolder}/packages/bun-uws/*", "${workspaceFolder}/src/napi/*", @@ -55,12 +55,12 @@ "name": "BunWithJSCDebug", "forcedInclude": ["${workspaceFolder}/src/bun.js/bindings/root.h"], "includePath": [ - "${workspaceFolder}/build/codegen", - "${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/", - "${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/ICU/Headers/", - "${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/JavaScriptCore/PrivateHeaders/", - "${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/WTF/Headers", - "${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/bmalloc/Headers/", + "${workspaceFolder}/build/debug/codegen", + "${workspaceFolder}/vendor/WebKit/WebKitBuild/Debug/", + "${workspaceFolder}/vendor/WebKit/WebKitBuild/Debug/ICU/Headers/", + "${workspaceFolder}/vendor/WebKit/WebKitBuild/Debug/JavaScriptCore/PrivateHeaders/", + "${workspaceFolder}/vendor/WebKit/WebKitBuild/Debug/WTF/Headers", + "${workspaceFolder}/vendor/WebKit/WebKitBuild/Debug/bmalloc/Headers/", "${workspaceFolder}/src/bun.js/bindings/", "${workspaceFolder}/src/bun.js/bindings/webcore/", "${workspaceFolder}/src/bun.js/bindings/sqlite/", @@ -68,19 +68,19 @@ "${workspaceFolder}/src/bun.js/modules/", "${workspaceFolder}/src/js/builtins/", "${workspaceFolder}/src/js/out", - "${workspaceFolder}/src/deps/boringssl/include/", - "${workspaceFolder}/src/deps", + "${workspaceFolder}/vendor/boringssl/include/", + "${workspaceFolder}/vendor", "${workspaceFolder}/src/napi/*", "${workspaceFolder}/packages/bun-usockets/src", "${workspaceFolder}/packages/", ], "browse": { "path": [ - "${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/", - "${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/ICU/Headers/", - "${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/JavaScriptCore/PrivateHeaders/**", - "${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/WTF/Headers/**", - "${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/bmalloc/Headers/**", + "${workspaceFolder}/vendor/WebKit/WebKitBuild/Debug/", + "${workspaceFolder}/vendor/WebKit/WebKitBuild/Debug/ICU/Headers/", + "${workspaceFolder}/vendor/WebKit/WebKitBuild/Debug/JavaScriptCore/PrivateHeaders/**", + "${workspaceFolder}/vendor/WebKit/WebKitBuild/Debug/WTF/Headers/**", + "${workspaceFolder}/vendor/WebKit/WebKitBuild/Debug/bmalloc/Headers/**", "${workspaceFolder}/src/bun.js/bindings/*", "${workspaceFolder}/src/bun.js/bindings/*", "${workspaceFolder}/src/napi/*", @@ -90,8 +90,8 @@ "${workspaceFolder}/src/js/builtins/*", "${workspaceFolder}/src/js/out/*", "${workspaceFolder}/src/bun.js/modules/*", - "${workspaceFolder}/src/deps", - "${workspaceFolder}/src/deps/boringssl/include/", + "${workspaceFolder}/vendor", + "${workspaceFolder}/vendor/boringssl/include/", "${workspaceFolder}/packages/bun-usockets/", "${workspaceFolder}/packages/bun-uws/", "${workspaceFolder}/src/napi", diff --git a/.vscode/launch.json b/.vscode/launch.json index 5326a6f15f..191c0a815e 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -12,31 +12,35 @@ "type": "lldb", "request": "launch", "name": "bun test [file]", - "program": "${workspaceFolder}/build/bun-debug", + "program": "${workspaceFolder}/build/debug/bun-debug", "args": ["test", "${file}"], - "cwd": "${workspaceFolder}/test", + "cwd": "${workspaceFolder}", "env": { "FORCE_COLOR": "1", "BUN_DEBUG_QUIET_LOGS": "1", + "BUN_DEBUG_jest": "1", "BUN_GARBAGE_COLLECTOR_LEVEL": "1", }, "console": "internalConsole", + // Don't pause when the GC runs while the debugger is open. + "postRunCommands": ["process handle -p true -s false -n false SIGUSR1"], }, { "type": "lldb", "request": "launch", "name": "bun test [file] --only", - "program": "${workspaceFolder}/build/bun-debug", + "program": "${workspaceFolder}/build/debug/bun-debug", "args": ["test", "--only", "${file}"], - "cwd": "${workspaceFolder}/test", + "cwd": "${workspaceFolder}", "env": { "FORCE_COLOR": "1", "BUN_DEBUG_QUIET_LOGS": "1", "BUN_GARBAGE_COLLECTOR_LEVEL": "1", - "BUN_DEBUG_FileReader": "1", "BUN_DEBUG_jest": "1", }, "console": "internalConsole", + // Don't pause when the GC runs while the debugger is open. + "postRunCommands": ["process handle -p true -s false -n false SIGUSR1"], }, { "type": "lldb", @@ -48,72 +52,87 @@ "type": "lldb", "request": "launch", "name": "bun test [file] (fast)", - "program": "${workspaceFolder}/build/bun-debug", + "program": "${workspaceFolder}/build/debug/bun-debug", "args": ["test", "${file}"], - "cwd": "${workspaceFolder}/test", + "cwd": "${workspaceFolder}", "env": { "FORCE_COLOR": "1", "BUN_DEBUG_QUIET_LOGS": "1", + "BUN_DEBUG_jest": "1", "BUN_GARBAGE_COLLECTOR_LEVEL": "0", }, "console": "internalConsole", + // Don't pause when the GC runs while the debugger is open. + "postRunCommands": ["process handle -p true -s false -n false SIGUSR1"], }, { "type": "lldb", "request": "launch", "name": "bun test [file] (verbose)", - "program": "${workspaceFolder}/build/bun-debug", + "program": "${workspaceFolder}/build/debug/bun-debug", "args": ["test", "${file}"], - "cwd": "${workspaceFolder}/test", + "cwd": "${workspaceFolder}", "env": { "FORCE_COLOR": "1", "BUN_DEBUG_QUIET_LOGS": "0", + "BUN_DEBUG_jest": "1", "BUN_GARBAGE_COLLECTOR_LEVEL": "2", }, "console": "internalConsole", + // Don't pause when the GC runs while the debugger is open. + "postRunCommands": ["process handle -p true -s false -n false SIGUSR1"], }, { "type": "lldb", "request": "launch", "name": "bun test [file] --watch", - "program": "${workspaceFolder}/build/bun-debug", + "program": "${workspaceFolder}/build/debug/bun-debug", "args": ["test", "--watch", "${file}"], - "cwd": "${workspaceFolder}/test", + "cwd": "${workspaceFolder}", "env": { "FORCE_COLOR": "1", "BUN_DEBUG_QUIET_LOGS": "1", + "BUN_DEBUG_jest": "1", "BUN_GARBAGE_COLLECTOR_LEVEL": "2", }, "console": "internalConsole", + // Don't pause when the GC runs while the debugger is open. + "postRunCommands": ["process handle -p true -s false -n false SIGUSR1"], }, { "type": "lldb", "request": "launch", "name": "bun test [file] --hot", - "program": "${workspaceFolder}/build/bun-debug", + "program": "${workspaceFolder}/build/debug/bun-debug", "args": ["test", "--hot", "${file}"], - "cwd": "${workspaceFolder}/test", + "cwd": "${workspaceFolder}", "env": { "FORCE_COLOR": "1", "BUN_DEBUG_QUIET_LOGS": "1", + "BUN_DEBUG_jest": "1", "BUN_GARBAGE_COLLECTOR_LEVEL": "2", }, "console": "internalConsole", + // Don't pause when the GC runs while the debugger is open. + "postRunCommands": ["process handle -p true -s false -n false SIGUSR1"], }, { "type": "lldb", "request": "launch", "name": "bun test [file] --inspect", - "program": "${workspaceFolder}/build/bun-debug", + "program": "${workspaceFolder}/build/debug/bun-debug", "args": ["test", "${file}"], - "cwd": "${workspaceFolder}/test", + "cwd": "${workspaceFolder}", "env": { "FORCE_COLOR": "1", "BUN_DEBUG_QUIET_LOGS": "1", + "BUN_DEBUG_jest": "1", "BUN_GARBAGE_COLLECTOR_LEVEL": "2", "BUN_INSPECT": "ws://localhost:0/?wait=1", }, "console": "internalConsole", + // Don't pause when the GC runs while the debugger is open. + "postRunCommands": ["process handle -p true -s false -n false SIGUSR1"], "serverReadyAction": { "pattern": "https://debug.bun.sh/#localhost:([0-9]+)/", "uriFormat": "https://debug.bun.sh/#ws://localhost:%s/", @@ -124,16 +143,19 @@ "type": "lldb", "request": "launch", "name": "bun test [file] --inspect-brk", - "program": "${workspaceFolder}/build/bun-debug", + "program": "${workspaceFolder}/build/debug/bun-debug", "args": ["test", "${file}"], - "cwd": "${workspaceFolder}/test", + "cwd": "${workspaceFolder}", "env": { "FORCE_COLOR": "1", "BUN_DEBUG_QUIET_LOGS": "1", + "BUN_DEBUG_jest": "1", "BUN_GARBAGE_COLLECTOR_LEVEL": "2", "BUN_INSPECT": "ws://localhost:0/?break=1", }, "console": "internalConsole", + // Don't pause when the GC runs while the debugger is open. + "postRunCommands": ["process handle -p true -s false -n false SIGUSR1"], "serverReadyAction": { "pattern": "https://debug.bun.sh/#localhost:([0-9]+)/", "uriFormat": "https://debug.bun.sh/#ws://localhost:%s/", @@ -145,36 +167,43 @@ "type": "lldb", "request": "launch", "name": "bun run [file]", - "program": "${workspaceFolder}/build/bun-debug", + "program": "${workspaceFolder}/build/debug/bun-debug", "args": ["run", "${fileBasename}"], "cwd": "${fileDirname}", "env": { "FORCE_COLOR": "0", "BUN_DEBUG_QUIET_LOGS": "1", - "BUN_DEBUG_EventLoop": "1", "BUN_GARBAGE_COLLECTOR_LEVEL": "2", }, "console": "internalConsole", + // Don't pause when the GC runs while the debugger is open. + "postRunCommands": ["process handle -p true -s false -n false SIGUSR1"], }, { "type": "lldb", "request": "launch", "name": "bun run [file] (fast)", - "program": "${workspaceFolder}/build/bun-debug", + "program": "${workspaceFolder}/build/debug/bun-debug", "args": ["run", "${fileBasename}"], "cwd": "${fileDirname}", "env": { "FORCE_COLOR": "1", "BUN_DEBUG_QUIET_LOGS": "1", "BUN_GARBAGE_COLLECTOR_LEVEL": "0", + "BUN_DEBUG_IncrementalGraph": "1", + "BUN_DEBUG_Bake": "1", + "BUN_DEBUG_reload_file_list": "1", + "GOMAXPROCS": "1", }, "console": "internalConsole", + // Don't pause when the GC runs while the debugger is open. + "postRunCommands": ["process handle -p true -s false -n false SIGUSR1"], }, { "type": "lldb", "request": "launch", "name": "bun run [file] (verbose)", - "program": "${workspaceFolder}/build/bun-debug", + "program": "${workspaceFolder}/build/debug/bun-debug", "args": ["run", "${fileBasename}"], "cwd": "${fileDirname}", "env": { @@ -183,12 +212,14 @@ "BUN_GARBAGE_COLLECTOR_LEVEL": "2", }, "console": "internalConsole", + // Don't pause when the GC runs while the debugger is open. + "postRunCommands": ["process handle -p true -s false -n false SIGUSR1"], }, { "type": "lldb", "request": "launch", "name": "bun run [file] --watch", - "program": "${workspaceFolder}/build/bun-debug", + "program": "${workspaceFolder}/build/debug/bun-debug", "args": ["run", "--watch", "${fileBasename}"], "cwd": "${fileDirname}", "env": { @@ -197,12 +228,14 @@ "BUN_GARBAGE_COLLECTOR_LEVEL": "2", }, "console": "internalConsole", + // Don't pause when the GC runs while the debugger is open. + "postRunCommands": ["process handle -p true -s false -n false SIGUSR1"], }, { "type": "lldb", "request": "launch", "name": "bun run [file] --hot", - "program": "${workspaceFolder}/build/bun-debug", + "program": "${workspaceFolder}/build/debug/bun-debug", "args": ["run", "--hot", "${fileBasename}"], "cwd": "${fileDirname}", "env": { @@ -211,12 +244,14 @@ "BUN_GARBAGE_COLLECTOR_LEVEL": "2", }, "console": "internalConsole", + // Don't pause when the GC runs while the debugger is open. + "postRunCommands": ["process handle -p true -s false -n false SIGUSR1"], }, { "type": "lldb", "request": "launch", "name": "bun run [file] --inspect", - "program": "${workspaceFolder}/build/bun-debug", + "program": "${workspaceFolder}/build/debug/bun-debug", "args": ["run", "${fileBasename}"], "cwd": "${fileDirname}", "env": { @@ -226,6 +261,8 @@ "BUN_INSPECT": "ws://localhost:0/?wait=1", }, "console": "internalConsole", + // Don't pause when the GC runs while the debugger is open. + "postRunCommands": ["process handle -p true -s false -n false SIGUSR1"], "serverReadyAction": { "pattern": "https://debug.bun.sh/#localhost:([0-9]+)/", "uriFormat": "https://debug.bun.sh/#ws://localhost:%s/", @@ -236,7 +273,7 @@ "type": "lldb", "request": "launch", "name": "bun run [file] --inspect-brk", - "program": "${workspaceFolder}/build/bun-debug", + "program": "${workspaceFolder}/build/debug/bun-debug", "args": ["run", "${fileBasename}"], "cwd": "${fileDirname}", "env": { @@ -246,6 +283,8 @@ "BUN_INSPECT": "ws://localhost:0/?break=1", }, "console": "internalConsole", + // Don't pause when the GC runs while the debugger is open. + "postRunCommands": ["process handle -p true -s false -n false SIGUSR1"], "serverReadyAction": { "pattern": "https://debug.bun.sh/#localhost:([0-9]+)/", "uriFormat": "https://debug.bun.sh/#ws://localhost:%s/", @@ -257,86 +296,104 @@ "type": "lldb", "request": "launch", "name": "bun test [...]", - "program": "${workspaceFolder}/build/bun-debug", + "program": "${workspaceFolder}/build/debug/bun-debug", "args": ["test", "${input:testName}"], - "cwd": "${workspaceFolder}/test", + "cwd": "${workspaceFolder}", "env": { "FORCE_COLOR": "1", "BUN_DEBUG_QUIET_LOGS": "1", + "BUN_DEBUG_jest": "1", "BUN_GARBAGE_COLLECTOR_LEVEL": "2", }, "console": "internalConsole", + // Don't pause when the GC runs while the debugger is open. + "postRunCommands": ["process handle -p true -s false -n false SIGUSR1"], }, { "type": "lldb", "request": "launch", "name": "bun test [...] (fast)", - "program": "${workspaceFolder}/build/bun-debug", + "program": "${workspaceFolder}/build/debug/bun-debug", "args": ["test", "${input:testName}"], - "cwd": "${workspaceFolder}/test", + "cwd": "${workspaceFolder}", "env": { "FORCE_COLOR": "1", "BUN_DEBUG_QUIET_LOGS": "1", + "BUN_DEBUG_jest": "1", "BUN_GARBAGE_COLLECTOR_LEVEL": "0", }, "console": "internalConsole", + // Don't pause when the GC runs while the debugger is open. + "postRunCommands": ["process handle -p true -s false -n false SIGUSR1"], }, { "type": "lldb", "request": "launch", "name": "bun test [...] (verbose)", - "program": "${workspaceFolder}/build/bun-debug", + "program": "${workspaceFolder}/build/debug/bun-debug", "args": ["test", "${input:testName}"], - "cwd": "${workspaceFolder}/test", + "cwd": "${workspaceFolder}", "env": { "FORCE_COLOR": "1", - "BUN_DEBUG_QUIET_LOGS": "0", + "BUN_DEBUG_QUIET_LOGS": "1", + "BUN_DEBUG_jest": "1", "BUN_GARBAGE_COLLECTOR_LEVEL": "2", }, "console": "internalConsole", + // Don't pause when the GC runs while the debugger is open. + "postRunCommands": ["process handle -p true -s false -n false SIGUSR1"], }, { "type": "lldb", "request": "launch", "name": "bun test [...] --watch", - "program": "${workspaceFolder}/build/bun-debug", + "program": "${workspaceFolder}/build/debug/bun-debug", "args": ["test", "--watch", "${input:testName}"], - "cwd": "${workspaceFolder}/test", + "cwd": "${workspaceFolder}", "env": { "FORCE_COLOR": "1", "BUN_DEBUG_QUIET_LOGS": "1", + "BUN_DEBUG_jest": "1", "BUN_GARBAGE_COLLECTOR_LEVEL": "2", }, "console": "internalConsole", + // Don't pause when the GC runs while the debugger is open. + "postRunCommands": ["process handle -p true -s false -n false SIGUSR1"], }, { "type": "lldb", "request": "launch", "name": "bun test [...] --hot", - "program": "${workspaceFolder}/build/bun-debug", + "program": "${workspaceFolder}/build/debug/bun-debug", "args": ["test", "--hot", "${input:testName}"], - "cwd": "${workspaceFolder}/test", + "cwd": "${workspaceFolder}", "env": { "FORCE_COLOR": "1", "BUN_DEBUG_QUIET_LOGS": "1", + "BUN_DEBUG_jest": "1", "BUN_GARBAGE_COLLECTOR_LEVEL": "2", }, "console": "internalConsole", + // Don't pause when the GC runs while the debugger is open. + "postRunCommands": ["process handle -p true -s false -n false SIGUSR1"], }, { "type": "lldb", "request": "launch", "name": "bun test [...] --inspect", - "program": "${workspaceFolder}/build/bun-debug", + "program": "${workspaceFolder}/build/debug/bun-debug", "args": ["test", "${input:testName}"], - "cwd": "${workspaceFolder}/test", + "cwd": "${workspaceFolder}", "env": { "FORCE_COLOR": "1", "BUN_DEBUG_QUIET_LOGS": "1", + "BUN_DEBUG_jest": "1", "BUN_GARBAGE_COLLECTOR_LEVEL": "2", "BUN_INSPECT": "ws://localhost:0/?wait=1", }, "console": "internalConsole", + // Don't pause when the GC runs while the debugger is open. + "postRunCommands": ["process handle -p true -s false -n false SIGUSR1"], "serverReadyAction": { "pattern": "https://debug.bun.sh/#localhost:([0-9]+)/", "uriFormat": "https://debug.bun.sh/#ws://localhost:%s/", @@ -347,16 +404,19 @@ "type": "lldb", "request": "launch", "name": "bun test [...] --inspect-brk", - "program": "${workspaceFolder}/build/bun-debug", + "program": "${workspaceFolder}/build/debug/bun-debug", "args": ["test", "${input:testName}"], - "cwd": "${workspaceFolder}/test", + "cwd": "${workspaceFolder}", "env": { "FORCE_COLOR": "1", "BUN_DEBUG_QUIET_LOGS": "1", + "BUN_DEBUG_jest": "1", "BUN_GARBAGE_COLLECTOR_LEVEL": "2", "BUN_INSPECT": "ws://localhost:0/?break=1", }, "console": "internalConsole", + // Don't pause when the GC runs while the debugger is open. + "postRunCommands": ["process handle -p true -s false -n false SIGUSR1"], "serverReadyAction": { "pattern": "https://debug.bun.sh/#localhost:([0-9]+)/", "uriFormat": "https://debug.bun.sh/#ws://localhost:%s/", @@ -368,7 +428,7 @@ "type": "lldb", "request": "launch", "name": "bun exec [...]", - "program": "${workspaceFolder}/build/bun-debug", + "program": "${workspaceFolder}/build/debug/bun-debug", "args": ["exec", "${input:testName}"], "cwd": "${workspaceFolder}", "env": { @@ -377,43 +437,49 @@ "BUN_GARBAGE_COLLECTOR_LEVEL": "2", }, "console": "internalConsole", + // Don't pause when the GC runs while the debugger is open. + "postRunCommands": ["process handle -p true -s false -n false SIGUSR1"], }, // bun test [*] { "type": "lldb", "request": "launch", "name": "bun test [*]", - "program": "${workspaceFolder}/build/bun-debug", + "program": "${workspaceFolder}/build/debug/bun-debug", "args": ["test"], - "cwd": "${workspaceFolder}/test", + "cwd": "${workspaceFolder}", "env": { "FORCE_COLOR": "1", "BUN_DEBUG_QUIET_LOGS": "1", "BUN_GARBAGE_COLLECTOR_LEVEL": "2", }, "console": "internalConsole", + // Don't pause when the GC runs while the debugger is open. + "postRunCommands": ["process handle -p true -s false -n false SIGUSR1"], }, { "type": "lldb", "request": "launch", "name": "bun test [*] (fast)", - "program": "${workspaceFolder}/build/bun-debug", + "program": "${workspaceFolder}/build/debug/bun-debug", "args": ["test"], - "cwd": "${workspaceFolder}/test", + "cwd": "${workspaceFolder}", "env": { "FORCE_COLOR": "1", "BUN_DEBUG_QUIET_LOGS": "1", "BUN_GARBAGE_COLLECTOR_LEVEL": "0", }, "console": "internalConsole", + // Don't pause when the GC runs while the debugger is open. + "postRunCommands": ["process handle -p true -s false -n false SIGUSR1"], }, { "type": "lldb", "request": "launch", "name": "bun test [*] --inspect", - "program": "${workspaceFolder}/build/bun-debug", + "program": "${workspaceFolder}/build/debug/bun-debug", "args": ["test"], - "cwd": "${workspaceFolder}/test", + "cwd": "${workspaceFolder}", "env": { "FORCE_COLOR": "1", "BUN_DEBUG_QUIET_LOGS": "1", @@ -421,6 +487,8 @@ "BUN_INSPECT": "ws://localhost:0/", }, "console": "internalConsole", + // Don't pause when the GC runs while the debugger is open. + "postRunCommands": ["process handle -p true -s false -n false SIGUSR1"], "serverReadyAction": { "pattern": "https://debug.bun.sh/#localhost:([0-9]+)/", "uriFormat": "https://debug.bun.sh/#ws://localhost:%s/", @@ -431,7 +499,7 @@ "type": "lldb", "request": "launch", "name": "bun install [folder]", - "program": "${workspaceFolder}/build/bun-debug", + "program": "${workspaceFolder}/build/debug/bun-debug", "args": ["install"], "cwd": "${fileDirname}", "env": { @@ -440,6 +508,8 @@ "BUN_GARBAGE_COLLECTOR_LEVEL": "2", }, "console": "internalConsole", + // Don't pause when the GC runs while the debugger is open. + "postRunCommands": ["process handle -p true -s false -n false SIGUSR1"], }, { "type": "lldb", @@ -448,7 +518,14 @@ "program": "node", "args": ["test/runner.node.mjs"], "cwd": "${workspaceFolder}", + "env": { + "FORCE_COLOR": "1", + "BUN_DEBUG_QUIET_LOGS": "1", + "BUN_GARBAGE_COLLECTOR_LEVEL": "2", + }, "console": "internalConsole", + // Don't pause when the GC runs while the debugger is open. + "postRunCommands": ["process handle -p true -s false -n false SIGUSR1"], }, // Windows: bun test [file] { @@ -458,9 +535,9 @@ }, "request": "launch", "name": "Windows: bun test [file]", - "program": "${workspaceFolder}/build/bun-debug.exe", + "program": "${workspaceFolder}/build/debug/bun-debug.exe", "args": ["test", "${file}"], - "cwd": "${workspaceFolder}/test", + "cwd": "${workspaceFolder}", "environment": [ { "name": "FORCE_COLOR", @@ -474,7 +551,6 @@ "name": "BUN_DEBUG_jest", "value": "1", }, - { "name": "BUN_GARBAGE_COLLECTOR_LEVEL", "value": "1", @@ -488,9 +564,9 @@ }, "request": "launch", "name": "Windows: bun test --only [file]", - "program": "${workspaceFolder}/build/bun-debug.exe", + "program": "${workspaceFolder}/build/debug/bun-debug.exe", "args": ["test", "--only", "${file}"], - "cwd": "${workspaceFolder}/test", + "cwd": "${workspaceFolder}", "environment": [ { "name": "FORCE_COLOR", @@ -501,19 +577,7 @@ "value": "1", }, { - "name": "BUN_DEBUG_EventLoop", - "value": "1", - }, - { - "name": "BUN_DEBUG_uv", - "value": "1", - }, - { - "name": "BUN_DEBUG_SYS", - "value": "1", - }, - { - "name": "BUN_DEBUG_PipeWriter", + "name": "BUN_DEBUG_jest", "value": "1", }, { @@ -529,9 +593,9 @@ }, "request": "launch", "name": "Windows: bun test [file] (fast)", - "program": "${workspaceFolder}/build/bun-debug.exe", + "program": "${workspaceFolder}/build/debug/bun-debug.exe", "args": ["test", "${file}"], - "cwd": "${workspaceFolder}/test", + "cwd": "${workspaceFolder}", "environment": [ { "name": "FORCE_COLOR", @@ -541,6 +605,10 @@ "name": "BUN_DEBUG_QUIET_LOGS", "value": "1", }, + { + "name": "BUN_DEBUG_jest", + "value": "1", + }, { "name": "BUN_GARBAGE_COLLECTOR_LEVEL", "value": "0", @@ -554,9 +622,9 @@ }, "request": "launch", "name": "Windows: bun test [file] (verbose)", - "program": "${workspaceFolder}/build/bun-debug.exe", + "program": "${workspaceFolder}/build/debug/bun-debug.exe", "args": ["test", "${file}"], - "cwd": "${workspaceFolder}/test", + "cwd": "${workspaceFolder}", "environment": [ { "name": "FORCE_COLOR", @@ -566,6 +634,10 @@ "name": "BUN_DEBUG_QUIET_LOGS", "value": "0", }, + { + "name": "BUN_DEBUG_jest", + "value": "1", + }, { "name": "BUN_GARBAGE_COLLECTOR_LEVEL", "value": "2", @@ -579,9 +651,9 @@ }, "request": "launch", "name": "Windows: bun test [file] --inspect", - "program": "${workspaceFolder}/build/bun-debug.exe", + "program": "${workspaceFolder}/build/debug/bun-debug.exe", "args": ["test", "${file}"], - "cwd": "${workspaceFolder}/test", + "cwd": "${workspaceFolder}", "environment": [ { "name": "FORCE_COLOR", @@ -591,6 +663,10 @@ "name": "BUN_DEBUG_QUIET_LOGS", "value": "1", }, + { + "name": "BUN_DEBUG_jest", + "value": "1", + }, { "name": "BUN_GARBAGE_COLLECTOR_LEVEL", "value": "2", @@ -613,9 +689,9 @@ }, "request": "launch", "name": "Windows: bun test [file] --inspect-brk", - "program": "${workspaceFolder}/build/bun-debug.exe", + "program": "${workspaceFolder}/build/debug/bun-debug.exe", "args": ["test", "${file}"], - "cwd": "${workspaceFolder}/test", + "cwd": "${workspaceFolder}", "environment": [ { "name": "FORCE_COLOR", @@ -625,6 +701,10 @@ "name": "BUN_DEBUG_QUIET_LOGS", "value": "1", }, + { + "name": "BUN_DEBUG_jest", + "value": "1", + }, { "name": "BUN_GARBAGE_COLLECTOR_LEVEL", "value": "2", @@ -648,7 +728,7 @@ }, "request": "launch", "name": "Windows: bun run [file]", - "program": "${workspaceFolder}/build/bun-debug.exe", + "program": "${workspaceFolder}/build/debug/bun-debug.exe", "args": ["run", "${fileBasename}"], "cwd": "${fileDirname}", "environment": [ @@ -660,6 +740,10 @@ "name": "BUN_DEBUG_QUIET_LOGS", "value": "1", }, + { + "name": "BUN_DEBUG_jest", + "value": "1", + }, { "name": "BUN_GARBAGE_COLLECTOR_LEVEL", "value": "2", @@ -673,7 +757,7 @@ }, "request": "launch", "name": "Windows: bun install", - "program": "${workspaceFolder}/build/bun-debug.exe", + "program": "${workspaceFolder}/build/debug/bun-debug.exe", "args": ["install"], "cwd": "${fileDirname}", "environment": [ @@ -681,7 +765,10 @@ "name": "FORCE_COLOR", "value": "1", }, - + { + "name": "BUN_DEBUG_QUIET_LOGS", + "value": "1", + }, { "name": "BUN_GARBAGE_COLLECTOR_LEVEL", "value": "0", @@ -695,7 +782,7 @@ }, "request": "launch", "name": "Windows: bun run [file] (verbose)", - "program": "${workspaceFolder}/build/bun-debug.exe", + "program": "${workspaceFolder}/build/debug/bun-debug.exe", "args": ["run", "${fileBasename}"], "cwd": "${fileDirname}", "environment": [ @@ -705,7 +792,7 @@ }, { "name": "BUN_DEBUG_QUIET_LOGS", - "value": "0", + "value": "1", }, { "name": "BUN_GARBAGE_COLLECTOR_LEVEL", @@ -720,7 +807,7 @@ }, "request": "launch", "name": "Windows: bun run [file] --inspect", - "program": "${workspaceFolder}/build/bun-debug.exe", + "program": "${workspaceFolder}/build/debug/bun-debug.exe", "args": ["run", "${fileBasename}"], "cwd": "${fileDirname}", "environment": [ @@ -754,7 +841,7 @@ }, "request": "launch", "name": "Windows: bun run [file] --inspect-brk", - "program": "${workspaceFolder}/build/bun-debug.exe", + "program": "${workspaceFolder}/build/debug/bun-debug.exe", "args": ["run", "${fileBasename}"], "cwd": "${fileDirname}", "environment": [ @@ -789,9 +876,9 @@ }, "request": "launch", "name": "Windows: bun test [...]", - "program": "${workspaceFolder}/build/bun-debug.exe", + "program": "${workspaceFolder}/build/debug/bun-debug.exe", "args": ["test", "${input:testName}"], - "cwd": "${workspaceFolder}/test", + "cwd": "${workspaceFolder}", "environment": [ { "name": "FORCE_COLOR", @@ -801,6 +888,10 @@ "name": "BUN_DEBUG_QUIET_LOGS", "value": "1", }, + { + "name": "BUN_DEBUG_jest", + "value": "1", + }, { "name": "BUN_GARBAGE_COLLECTOR_LEVEL", "value": "2", @@ -814,9 +905,9 @@ }, "request": "launch", "name": "Windows: bun test [...] (fast)", - "program": "${workspaceFolder}/build/bun-debug.exe", + "program": "${workspaceFolder}/build/debug/bun-debug.exe", "args": ["test", "${input:testName}"], - "cwd": "${workspaceFolder}/test", + "cwd": "${workspaceFolder}", "environment": [ { "name": "FORCE_COLOR", @@ -826,6 +917,10 @@ "name": "BUN_DEBUG_QUIET_LOGS", "value": "1", }, + { + "name": "BUN_DEBUG_jest", + "value": "1", + }, { "name": "BUN_GARBAGE_COLLECTOR_LEVEL", "value": "0", @@ -839,9 +934,9 @@ }, "request": "launch", "name": "Windows: bun test [...] (verbose)", - "program": "${workspaceFolder}/build/bun-debug.exe", + "program": "${workspaceFolder}/build/debug/bun-debug.exe", "args": ["test", "${input:testName}"], - "cwd": "${workspaceFolder}/test", + "cwd": "${workspaceFolder}", "environment": [ { "name": "FORCE_COLOR", @@ -851,6 +946,10 @@ "name": "BUN_DEBUG_QUIET_LOGS", "value": "0", }, + { + "name": "BUN_DEBUG_jest", + "value": "1", + }, { "name": "BUN_GARBAGE_COLLECTOR_LEVEL", "value": "2", @@ -864,9 +963,9 @@ }, "request": "launch", "name": "Windows: bun test [...] --watch", - "program": "${workspaceFolder}/build/bun-debug.exe", + "program": "${workspaceFolder}/build/debug/bun-debug.exe", "args": ["test", "--watch", "${input:testName}"], - "cwd": "${workspaceFolder}/test", + "cwd": "${workspaceFolder}", "environment": [ { "name": "FORCE_COLOR", @@ -876,6 +975,10 @@ "name": "BUN_DEBUG_QUIET_LOGS", "value": "1", }, + { + "name": "BUN_DEBUG_jest", + "value": "1", + }, { "name": "BUN_GARBAGE_COLLECTOR_LEVEL", "value": "2", @@ -889,9 +992,9 @@ }, "request": "launch", "name": "Windows: bun test [...] --hot", - "program": "${workspaceFolder}/build/bun-debug.exe", + "program": "${workspaceFolder}/build/debug/bun-debug.exe", "args": ["test", "--hot", "${input:testName}"], - "cwd": "${workspaceFolder}/test", + "cwd": "${workspaceFolder}", "environment": [ { "name": "FORCE_COLOR", @@ -901,6 +1004,10 @@ "name": "BUN_DEBUG_QUIET_LOGS", "value": "1", }, + { + "name": "BUN_DEBUG_jest", + "value": "1", + }, { "name": "BUN_GARBAGE_COLLECTOR_LEVEL", "value": "2", @@ -914,9 +1021,9 @@ }, "request": "launch", "name": "Windows: bun test [...] --inspect", - "program": "${workspaceFolder}/build/bun-debug.exe", + "program": "${workspaceFolder}/build/debug/bun-debug.exe", "args": ["test", "${input:testName}"], - "cwd": "${workspaceFolder}/test", + "cwd": "${workspaceFolder}", "environment": [ { "name": "FORCE_COLOR", @@ -926,6 +1033,10 @@ "name": "BUN_DEBUG_QUIET_LOGS", "value": "1", }, + { + "name": "BUN_DEBUG_jest", + "value": "1", + }, { "name": "BUN_GARBAGE_COLLECTOR_LEVEL", "value": "2", @@ -948,9 +1059,9 @@ }, "request": "launch", "name": "Windows: bun test [...] --inspect-brk", - "program": "${workspaceFolder}/build/bun-debug.exe", + "program": "${workspaceFolder}/build/debug/bun-debug.exe", "args": ["test", "${input:testName}"], - "cwd": "${workspaceFolder}/test", + "cwd": "${workspaceFolder}", "environment": [ { "name": "FORCE_COLOR", @@ -960,6 +1071,10 @@ "name": "BUN_DEBUG_QUIET_LOGS", "value": "1", }, + { + "name": "BUN_DEBUG_jest", + "value": "1", + }, { "name": "BUN_GARBAGE_COLLECTOR_LEVEL", "value": "2", @@ -983,7 +1098,7 @@ }, "request": "launch", "name": "Windows: bun exec [...]", - "program": "${workspaceFolder}/build/bun-debug.exe", + "program": "${workspaceFolder}/build/debug/bun-debug.exe", "args": ["exec", "${input:testName}"], "cwd": "${workspaceFolder}", "environment": [ @@ -1009,9 +1124,9 @@ }, "request": "launch", "name": "Windows: bun test [*]", - "program": "${workspaceFolder}/build/bun-debug.exe", + "program": "${workspaceFolder}/build/debug/bun-debug.exe", "args": ["test"], - "cwd": "${workspaceFolder}/test", + "cwd": "${workspaceFolder}", "environment": [ { "name": "FORCE_COLOR", @@ -1034,9 +1149,9 @@ }, "request": "launch", "name": "Windows: bun test [*] (fast)", - "program": "${workspaceFolder}/build/bun-debug.exe", + "program": "${workspaceFolder}/build/debug/bun-debug.exe", "args": ["test"], - "cwd": "${workspaceFolder}/test", + "cwd": "${workspaceFolder}", "environment": [ { "name": "FORCE_COLOR", @@ -1046,6 +1161,10 @@ "name": "BUN_DEBUG_QUIET_LOGS", "value": "1", }, + { + "name": "BUN_DEBUG_jest", + "value": "1", + }, { "name": "BUN_GARBAGE_COLLECTOR_LEVEL", "value": "0", @@ -1059,9 +1178,9 @@ }, "request": "launch", "name": "Windows: bun test [*] --inspect", - "program": "${workspaceFolder}/build/bun-debug.exe", + "program": "${workspaceFolder}/build/debug/bun-debug.exe", "args": ["test"], - "cwd": "${workspaceFolder}/test", + "cwd": "${workspaceFolder}", "environment": [ { "name": "FORCE_COLOR", @@ -1069,7 +1188,11 @@ }, { "name": "BUN_DEBUG_QUIET_LOGS", - "value": "0", + "value": "1", + }, + { + "name": "BUN_DEBUG_jest", + "value": "1", }, { "name": "BUN_GARBAGE_COLLECTOR_LEVEL", @@ -1096,7 +1219,27 @@ "program": "node", "args": ["test/runner.node.mjs"], "cwd": "${workspaceFolder}", + "environment": [ + { + "name": "FORCE_COLOR", + "value": "1", + }, + { + "name": "BUN_DEBUG_QUIET_LOGS", + "value": "1", + }, + { + "name": "BUN_DEBUG_jest", + "value": "1", + }, + { + "name": "BUN_GARBAGE_COLLECTOR_LEVEL", + "value": "2", + }, + ], "console": "internalConsole", + // Don't pause when the GC runs while the debugger is open. + "postRunCommands": ["process handle -p true -s false -n false SIGUSR1"], }, ], "inputs": [ diff --git a/.vscode/settings.json b/.vscode/settings.json index 1701cb55df..0fd8800e63 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -12,9 +12,11 @@ "search.exclude": { "node_modules": true, ".git": true, - "src/bun.js/WebKit": true, - "src/deps/*/**": true, + "vendor/*/**": true, "test/node.js/upstream": true, + // This will fill up your whole search history. + "test/js/node/test/fixtures": true, + "test/js/node/test/common": true, }, "search.followSymlinks": false, "search.useIgnoreFiles": true, @@ -27,13 +29,13 @@ // Zig "zig.initialSetupDone": true, "zig.buildOption": "build", - "zig.zls.zigLibPath": "${workspaceFolder}/src/deps/zig/lib", - "zig.buildArgs": ["-Dgenerated-code=./build/codegen"], + "zig.zls.zigLibPath": "${workspaceFolder}/vendor/zig/lib", + "zig.buildArgs": ["-Dgenerated-code=./build/debug/codegen"], "zig.zls.buildOnSaveStep": "check", // "zig.zls.enableBuildOnSave": true, // "zig.buildOnSave": true, "zig.buildFilePath": "${workspaceFolder}/build.zig", - "zig.path": "${workspaceFolder}/.cache/zig/zig.exe", + "zig.path": "${workspaceFolder}/vendor/zig/zig.exe", "zig.formattingProvider": "zls", "zig.zls.enableInlayHints": false, "[zig]": { @@ -50,14 +52,18 @@ "cmake.configureOnOpen": false, "C_Cpp.errorSquiggles": "enabled", "[cpp]": { + "editor.tabSize": 4, "editor.defaultFormatter": "xaver.clang-format", }, "[c]": { + "editor.tabSize": 4, "editor.defaultFormatter": "xaver.clang-format", }, "[h]": { + "editor.tabSize": 4, "editor.defaultFormatter": "xaver.clang-format", }, + "clangd.arguments": ["-header-insertion=never"], // JavaScript "prettier.enable": true, @@ -135,6 +141,7 @@ }, "files.associations": { "*.idl": "cpp", + "array": "cpp", }, "C_Cpp.files.exclude": { "**/.vscode": true, diff --git a/CMakeLists.txt b/CMakeLists.txt index 1d1248557a..b9249d8993 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,1636 +1,51 @@ -cmake_minimum_required(VERSION 3.22) -cmake_policy(SET CMP0091 NEW) -cmake_policy(SET CMP0067 NEW) - -set(CMAKE_POLICY_DEFAULT_CMP0069 NEW) -set(Bun_VERSION "1.1.22") -set(WEBKIT_TAG a060f087c2232fb20d82c321d21e074e735d3261) - -set(BUN_WORKDIR "${CMAKE_CURRENT_BINARY_DIR}") -message(STATUS "Configuring Bun ${Bun_VERSION} in ${BUN_WORKDIR}") - -set(CMAKE_COLOR_DIAGNOSTICS ON) -set(CMAKE_CXX_STANDARD 20) -set(CMAKE_C_STANDARD 17) -set(CMAKE_CXX_STANDARD_REQUIRED ON) -set(CMAKE_C_STANDARD_REQUIRED ON) - -option(ZIG_CACHE_DIR "Path to the Zig cache directory" "") - -if(NOT ZIG_CACHE_DIR) - SET(ZIG_CACHE_DIR "${BUN_WORKDIR}") - cmake_path(APPEND ZIG_CACHE_DIR "zig-cache") -endif() - -set(LOCAL_ZIG_CACHE_DIR "${ZIG_CACHE_DIR}") -set(GLOBAL_ZIG_CACHE_DIR "${ZIG_CACHE_DIR}") - -cmake_path(APPEND LOCAL_ZIG_CACHE_DIR "local") -cmake_path(APPEND GLOBAL_ZIG_CACHE_DIR "global") - -# Used in process.version, process.versions.node, napi, and elsewhere -set(REPORTED_NODEJS_VERSION "22.3.0") - -# WebKit uses -std=gnu++20 on non-macOS non-Windows -# If we do not set this, it will crash at startup on the first memory allocation. -if(NOT WIN32 AND NOT APPLE) - set(CMAKE_CXX_EXTENSIONS ON) - set(CMAKE_POSITION_INDEPENDENT_CODE FALSE) -endif() - -# --- Build Type --- -if(NOT CMAKE_BUILD_TYPE) - message(WARNING "No CMAKE_BUILD_TYPE value specified, defaulting to Debug.\nSet a build type with -DCMAKE_BUILD_TYPE=") - set(CMAKE_BUILD_TYPE "Debug" CACHE STRING "Choose the type of build (Debug, Release)" FORCE) -else() - if(NOT CMAKE_BUILD_TYPE MATCHES "^(Debug|Release)$") - message(FATAL_ERROR - "Invalid CMAKE_BUILD_TYPE value specified: ${CMAKE_BUILD_TYPE}\n" - "CMAKE_BUILD_TYPE must be Debug or Release.") - endif() - - message(STATUS "The CMake build type is: ${CMAKE_BUILD_TYPE}") -endif() - -if(WIN32 AND NOT CMAKE_CL_SHOWINCLUDES_PREFIX) - # workaround until cmake fix is shipped https://github.com/ninja-build/ninja/issues/2280 - # './build/.ninja_deps' may need to be deleted, the bug is "Note: including file: ..." is saved - # as part of some file paths - set(CMAKE_CL_SHOWINCLUDES_PREFIX "Note: including file:") -endif() - -if(CMAKE_BUILD_TYPE STREQUAL "Debug") - set(DEBUG ON) - set(DEFAULT_ZIG_OPTIMIZE "Debug") - set(bun "bun-debug") - - # COMPILE_COMMANDS - set(CMAKE_EXPORT_COMPILE_COMMANDS ON) -elseif(CMAKE_BUILD_TYPE STREQUAL "Release") - set(DEBUG OFF) - set(DEFAULT_ZIG_OPTIMIZE "ReleaseFast") - - if(WIN32) - # Debug symbols are in a separate file: bun.pdb - set(bun "bun") - set(DEFAULT_ZIG_OPTIMIZE "ReleaseSafe") - else() - if(ZIG_OPTIMIZE STREQUAL "Debug") - set(bun "bun-debug") - else() - set(bun "bun-profile") - endif() - endif() -endif() - -# --- MacOS SDK --- -if(APPLE AND DEFINED ENV{CI}) - set(CMAKE_OSX_DEPLOYMENT_TARGET "13.0") -endif() - -if(APPLE AND NOT CMAKE_OSX_DEPLOYMENT_TARGET) - execute_process(COMMAND xcrun --show-sdk-path OUTPUT_VARIABLE SDKROOT) - string(STRIP ${SDKROOT} SDKROOT) - message(STATUS "MacOS SDK path: ${SDKROOT}") - SET(CMAKE_OSX_SYSROOT ${SDKROOT}) - - execute_process(COMMAND xcrun --sdk macosx --show-sdk-version OUTPUT_VARIABLE MACOSX_DEPLOYMENT_TARGET) - string(STRIP ${MACOSX_DEPLOYMENT_TARGET} MACOSX_DEPLOYMENT_TARGET) - set(CMAKE_OSX_DEPLOYMENT_TARGET ${MACOSX_DEPLOYMENT_TARGET}) - - # Check if current version of macOS is less than the deployment target and if so, raise an error - execute_process(COMMAND sw_vers -productVersion OUTPUT_VARIABLE MACOS_VERSION) - string(STRIP ${MACOS_VERSION} MACOS_VERSION) - - if(MACOS_VERSION VERSION_LESS ${MACOSX_DEPLOYMENT_TARGET}) - message(WARNING - "The current version of macOS (${MACOS_VERSION}) is less than the deployment target (${MACOSX_DEPLOYMENT_TARGET}).\n" - "The build will be incompatible with your current device due to mismatches in `icucore` versions.\n" - "To fix this, please either:\n" - " - Upgrade to at least macOS ${MACOSX_DEPLOYMENT_TARGET}\n" - " - Use `xcode-select` to switch to an SDK version <= ${MACOS_VERSION}\n" - " - Set CMAKE_OSX_DEPLOYMENT_TARGET=${MACOS_VERSION} (make sure to build all dependencies with this variable set too)" - ) - endif() -endif() - -if(APPLE) - message(STATUS "Building for macOS v${CMAKE_OSX_DEPLOYMENT_TARGET}") -endif() - -# --- LLVM --- -# This detection is a little overkill, but it ensures that the set LLVM_VERSION matches under -# any case possible. Sorry for the complexity... -# -# Bun and WebKit must be compiled with the same compiler, so we do as much as we can to ensure that -# the compiler used for the prebuilt WebKit is the same as we install as a dependency. -# -# It has to be done before project() is called, so that CMake doesnt pick a compiler for us, but even then -# we do some extra work afterwards to double-check, and we will rerun BUN_FIND_LLVM if the compiler did not match. -# -# If the user passes -DLLVM_PREFIX, most of this logic is skipped, but we still warn if invalid. -if(WIN32 OR APPLE) - set(LLVM_VERSION 18) -else() - set(LLVM_VERSION 16) -endif() - -macro(BUN_FIND_LLVM) - find_program( - _LLVM_CXX_PATH - NAMES clang++-${LLVM_VERSION} clang++ - PATHS ENV PATH ${PLATFORM_LLVM_SEARCH_PATHS} - DOC "Path to LLVM ${LLVM_VERSION}'s clang++ binary. Please pass -DLLVM_PREFIX with the path to LLVM" - ) - - if(NOT _LLVM_CXX_PATH) - message(FATAL_ERROR "Could not find LLVM ${LLVM_VERSION}, search paths: ${PLATFORM_LLVM_SEARCH_PATHS}") - endif() - - set(CMAKE_CXX_COMPILER "${_LLVM_CXX_PATH}") - find_program( - _LLVM_C_PATH - NAMES clang-${LLVM_VERSION} clang - PATHS ENV PATH ${PLATFORM_LLVM_SEARCH_PATHS} - DOC "Path to LLVM ${LLVM_VERSION}'s clang binary. Please pass -DLLVM_PREFIX with the path to LLVM" - ) - - if(NOT _LLVM_C_PATH) - message(FATAL_ERROR "Could not find LLVM ${LLVM_VERSION}, search paths: ${PLATFORM_LLVM_SEARCH_PATHS}") - endif() - - set(CMAKE_C_COMPILER "${_LLVM_C_PATH}") - - find_program( - STRIP - NAMES llvm-strip - PATHS ENV PATH ${PLATFORM_LLVM_SEARCH_PATHS} - DOC "Path to LLVM ${LLVM_VERSION}'s llvm-strip binary" - ) - - find_program( - STRIP - NAMES strip - PATHS ENV PATH ${PLATFORM_LLVM_SEARCH_PATHS} - DOC "Path to strip binary" - ) - find_program( - DSYMUTIL - NAMES dsymutil - PATHS ENV PATH ${PLATFORM_LLVM_SEARCH_PATHS} - DOC "Path to LLVM ${LLVM_VERSION}'s dsymutil binary" - ) - find_program( - AR - NAMES llvm-ar - PATHS ENV PATH ${PLATFORM_LLVM_SEARCH_PATHS} - DOC "Path to LLVM ${LLVM_VERSION}'s llvm-ar binary" - ) - find_program( - AR - NAMES ar - PATHS ENV PATH ${PLATFORM_LLVM_SEARCH_PATHS} - DOC "Path to LLVM ${LLVM_VERSION}'s llvm-ar binary" - ) - find_program( - RANLIB - NAMES llvm-ranlib - PATHS ENV PATH ${PLATFORM_LLVM_SEARCH_PATHS} - DOC "Path to LLVM ${LLVM_VERSION}'s llvm-ar binary" - ) - - execute_process(COMMAND ${CMAKE_CXX_COMPILER} --version OUTPUT_VARIABLE _tmp) - string(REGEX MATCH "version ([0-9]+)\\.([0-9]+)\\.([0-9]+)" CMAKE_CXX_COMPILER_VERSION "${_tmp}") - set(CMAKE_CXX_COMPILER_VERSION "${CMAKE_MATCH_1}.${CMAKE_MATCH_2}.${CMAKE_MATCH_3}") -endmacro() - -if(UNIX) - if(LLVM_PREFIX) - set(PLATFORM_LLVM_SEARCH_PATHS ${LLVM_PREFIX}/bin) - else() - set(PLATFORM_LLVM_SEARCH_PATHS /usr/lib/llvm-${LLVM_VERSION}/bin /usr/lib/llvm${LLVM_VERSION}/bin /usr/bin /usr/local/bin) - - if(APPLE) - set(PLATFORM_LLVM_SEARCH_PATHS /opt/homebrew/opt/llvm@${LLVM_VERSION}/bin /opt/homebrew/bin ${PLATFORM_LLVM_SEARCH_PATHS}) - endif() - endif() - - if(CMAKE_CXX_COMPILER) - set(_LLVM_CXX_PATH "${CMAKE_CXX_COMPILER}") - endif() - - if(CMAKE_C_COMPILER) - set(_LLVM_C_PATH "${CMAKE_C_COMPILER}") - endif() - - BUN_FIND_LLVM() -else() - # Windows uses Clang-CL - # TODO: good configuration in this regard. -G Ninja will pick clang-cl if possible, which should be fine for most users. - if(NOT CMAKE_C_COMPILER) - set(CMAKE_C_COMPILER "clang-cl") - endif() - - if(NOT CMAKE_CXX_COMPILER) - set(CMAKE_CXX_COMPILER "clang-cl") - endif() - - find_program( - STRIP - NAMES llvm-strip - PATHS ENV PATH ${PLATFORM_LLVM_SEARCH_PATHS} - DOC "Path to LLVM ${LLVM_VERSION}'s llvm-strip binary" - ) - find_program( - AR - NAMES llvm-ar - PATHS ENV PATH ${PLATFORM_LLVM_SEARCH_PATHS} - DOC "Path to LLVM ${LLVM_VERSION}'s llvm-ar binary" - ) -endif() - -project(Bun VERSION "${Bun_VERSION}") - -# if(MSVC) -# message(FATAL_ERROR "Bun does not support building with MSVC. Please use `cmake -G Ninja` with LLVM ${LLVM_VERSION} and Ninja.") -# endif() - -# More effort to prevent using the wrong C++ compiler -if(UNIX) - if((NOT CMAKE_CXX_COMPILER_ID STREQUAL "Clang") OR(NOT CMAKE_CXX_COMPILER_VERSION MATCHES "^${LLVM_VERSION}\.")) - # Attempt to auto-correct the compiler - message(STATUS "Compiler mismatch, attempting to auto-correct") - unset(_LLVM_CXX_PATH) - BUN_FIND_LLVM() - - if((NOT CMAKE_CXX_COMPILER_ID STREQUAL "Clang") OR(NOT CMAKE_CXX_COMPILER_VERSION MATCHES "^${LLVM_VERSION}\.")) - message(WARNING "Expected LLVM ${LLVM_VERSION} as the C++ compiler, build may fail or break at runtime.") - endif() - endif() -endif() - -message(STATUS "C++ Compiler: ${CMAKE_CXX_COMPILER_ID} ${CMAKE_CXX_COMPILER_VERSION} at ${CMAKE_CXX_COMPILER}") - -# --- End LLVM --- -if(NOT WIN32) - set(SHELL "bash") - set(SCRIPT_EXTENSION "sh") -else() - set(SCRIPT_EXTENSION "ps1") - - # pwsh is the new powershell, powershell is the old one. - find_program(SHELL NAMES pwsh powershell) -endif() - -set(DEFAULT_ON_UNLESS_APPLE ON) - -if(APPLE) - set(DEFAULT_ON_UNLESS_APPLE OFF) -endif() - -set(CI OFF) - -if(DEFINED ENV{CI} OR DEFINED ENV{GITHUB_ACTIONS}) - set(CI ON) -endif() - -set(DEFAULT_USE_STATIC_LIBATOMIC ON) -set(DEFAULT_USE_DEBUG_JSC, OFF) - -if(CMAKE_BUILD_TYPE STREQUAL "Debug") - set(DEFAULT_USE_DEBUG_JSC ON) - set(DEFAULT_LTO OFF) -elseif(CMAKE_BUILD_TYPE STREQUAL "Release") - if(CI) - set(DEFAULT_LTO ON) - else() - set(DEFAULT_LTO OFF) - endif() -endif() - -if(WIN32) - set(DEFAULT_USE_DEBUG_JSC OFF) -endif() - -if(UNIX AND NOT APPLE) - execute_process(COMMAND grep -w "NAME" /etc/os-release OUTPUT_VARIABLE LINUX_DISTRO) - - if(${LINUX_DISTRO} MATCHES "NAME=\"(Arch|Manjaro|Artix) Linux\"|NAME=\"openSUSE Tumbleweed\"\n") - set(DEFAULT_USE_STATIC_LIBATOMIC OFF) - endif() -endif() - -# -- Build Flags -- -option(USE_STATIC_SQLITE "Statically link SQLite?" ${DEFAULT_ON_UNLESS_APPLE}) -option(USE_CUSTOM_ZLIB "Use Bun's recommended version of zlib" ON) -option(USE_CUSTOM_LIBDEFLATE "Use Bun's recommended version of libdeflate" ON) -option(USE_CUSTOM_BORINGSSL "Use Bun's recommended version of BoringSSL" ON) -option(USE_CUSTOM_LIBARCHIVE "Use Bun's recommended version of libarchive" ON) -option(USE_CUSTOM_MIMALLOC "Use Bun's recommended version of Mimalloc" ON) -option(USE_CUSTOM_ZSTD "Use Bun's recommended version of zstd" ON) -option(USE_CUSTOM_CARES "Use Bun's recommended version of c-ares" ON) -option(USE_CUSTOM_LOLHTML "Use Bun's recommended version of lolhtml" ON) -option(USE_CUSTOM_TINYCC "Use Bun's recommended version of tinycc" ON) -option(USE_CUSTOM_LIBUV "Use Bun's recommended version of libuv (Windows only)" ON) -option(USE_CUSTOM_LSHPACK "Use Bun's recommended version of ls-hpack" ON) -option(USE_BASELINE_BUILD "Build Bun for baseline (older) CPUs" OFF) -option(USE_SYSTEM_ICU "Use the system-provided libicu. May fix startup crashes when building WebKit yourself." OFF) - -option(USE_VALGRIND "Build Bun with Valgrind support (Linux only)" OFF) - -option(ZIG_OPTIMIZE "Optimization level for Zig" ${DEFAULT_ZIG_OPTIMIZE}) -option(USE_DEBUG_JSC "Enable assertions and use a debug build of JavaScriptCore" ${DEFAULT_USE_DEBUG_JSC}) -option(USE_UNIFIED_SOURCES "Use unified sources to speed up the build" OFF) -option(USE_STATIC_LIBATOMIC "Statically link libatomic, requires the presence of libatomic.a" ${DEFAULT_USE_STATIC_LIBATOMIC}) - -option(USE_LTO "Enable Link-Time Optimization" ${DEFAULT_LTO}) - -if(APPLE AND USE_LTO) - set(USE_LTO OFF) - message(FATAL_ERROR "Link-Time Optimization is not supported on macOS because it requires -fuse-ld=lld and lld causes many segfaults on macOS (likely related to stack size)") -endif() - -if(WIN32 AND USE_LTO) - set(CMAKE_LINKER_TYPE LLD) - set(CMAKE_INTERPROCEDURAL_OPTIMIZATION OFF) -endif() - -option(BUN_TIDY_ONLY "Only run clang-tidy" OFF) -option(BUN_TIDY_ONLY_EXTRA " Only run clang-tidy, with extra checks for local development" OFF) - -if(NOT ZIG_LIB_DIR) - cmake_path(SET ZIG_LIB_DIR NORMALIZE "${CMAKE_CURRENT_SOURCE_DIR}/src/deps/zig/lib") -endif() - -if(USE_VALGRIND) - # Disable SIMD - set(USE_BASELINE_BUILD ON) - - if(ARCH STREQUAL "x86_64") - # This is for picohttpparser - # Valgrind cannot handle SSE4.2 instructions - add_compile_definitions("__SSE4_2__=0") - endif() -endif() - -if(NOT CANARY) - set(CANARY 0) -endif() - -if(NOT ENABLE_LOGS) - set(ENABLE_LOGS false) -endif() - -if(NOT ZIG_OPTIMIZE) - set(ZIG_OPTIMIZE ${DEFAULT_ZIG_OPTIMIZE}) -endif() - -set(ERROR_LIMIT 100 CACHE STRING "Maximum number of errors to show when compiling C++ code") - -set(ARCH x86_64) -set(HOMEBREW_PREFIX "/usr/local") - -if(CMAKE_SYSTEM_PROCESSOR MATCHES "aarch64|arm64|arm") - set(ARCH aarch64) - set(HOMEBREW_PREFIX "/opt/homebrew") -endif() - -if(NOT CPU_TARGET) - if(DEFINED ENV{CPU_TARGET}) - set(CPU_TARGET $ENV{CPU_TARGET}) - else() - set(CPU_TARGET "native" CACHE STRING "CPU target for the compiler" FORCE) - - if(ARCH STREQUAL "x86_64") - if(USE_BASELINE_BUILD) - set(CPU_TARGET "nehalem") - else() - set(CPU_TARGET "haswell") - endif() - endif() - endif() -endif() - -message(STATUS "Building for CPU Target: ${CPU_TARGET}") - -if(NOT ZIG_TARGET) - set(ZIG_TARGET "native") - - if(WIN32) - set(ZIG_TARGET "${ARCH}-windows-msvc") - endif() -endif() - -set(CONFIGURE_DEPENDS "CONFIGURE_DEPENDS") - -if(NO_CONFIGURE_DEPENDS) - set(CONFIGURE_DEPENDS "") -endif() - -# --- CLI Paths --- -set(REQUIRED_IF_NOT_ONLY_CPP_OR_LINK "") - -if(NOT BUN_CPP_ONLY AND NOT BUN_LINK_ONLY) - set(REQUIRED_IF_NOT_ONLY_CPP_OR_LINK "REQUIRED") -endif() - -# Zig Compiler -function(validate_zig validator_result_var item) - set(${validator_result_var} FALSE PARENT_SCOPE) - - # We will allow any valid zig compiler, as long as it contains some text from `zig zen` - # Ideally we would do a version or feature check, but that would be quite slow - execute_process(COMMAND ${item} zen OUTPUT_VARIABLE ZIG_ZEN_OUTPUT) - - if(ZIG_ZEN_OUTPUT MATCHES "Together we serve the users") - set(${validator_result_var} TRUE PARENT_SCOPE) - else() - set(${validator_result_var} FALSE PARENT_SCOPE) - endif() -endfunction() - -if(ZIG_COMPILER) - if(ZIG_COMPILER STREQUAL "system") - message(STATUS "Using system Zig compiler") - unset(ZIG_COMPILER_) - endif() - - find_program(ZIG_COMPILER_ zig ${REQUIRED_IF_NOT_ONLY_CPP_OR_LINK} DOC "Path to the Zig compiler" VALIDATOR validate_zig) - set(ZIG_COMPILER "${ZIG_COMPILER_}") - message(STATUS "Found Zig Compiler: ${ZIG_COMPILER}") -elseif(NOT BUN_CPP_ONLY AND NOT BUN_LINK_ONLY AND NOT BUN_TIDY_ONLY AND NOT BUN_TIDY_ONLY_EXTRA) - execute_process( - COMMAND "${SHELL}" - "${CMAKE_CURRENT_SOURCE_DIR}/scripts/download-zig.${SCRIPT_EXTENSION}" - ) - set(ZIG_COMPILER "${CMAKE_CURRENT_SOURCE_DIR}/.cache/zig/zig") - - if(WIN32) - set(ZIG_COMPILER "${ZIG_COMPILER}.exe") - endif() - - if(NOT EXISTS "${ZIG_COMPILER}") - unset(ZIG_COMPILER) - message(FATAL_ERROR "Auto-installation of Zig failed. Please pass -DZIG_COMPILER=system or a path to the Zig") - endif() - - message(STATUS "Installed Zig Compiler: ${ZIG_COMPILER}") - set_property(DIRECTORY APPEND PROPERTY CMAKE_CONFIGURE_DEPENDS "build.zig") - - message(STATUS "Using zig cache directory: ${ZIG_CACHE_DIR}") -endif() - -# Bun -find_program(BUN_EXECUTABLE bun ${REQUIRED_IF_NOT_ONLY_CPP_OR_LINK} DOC "Path to an already built release of Bun") -message(STATUS "Found Bun: ${BUN_EXECUTABLE}") - -if(WIN32 AND NO_CODEGEN) - # TODO(@paperdave): remove this, see bun-windows.yml's comment. - set(BUN_EXECUTABLE "echo") -endif() - -# Prettier -find_program(PRETTIER prettier DOC "Path to prettier" PATHS ./node_modules/.bin ENV PATH) - -# Esbuild (TODO: switch these to "bun build") -find_program(ESBUILD esbuild DOC "Path to esbuild" PATHS ./node_modules/.bin ENV PATH) - -# Ruby (only needed for unified sources) -if(USE_UNIFIED_SOURCES) - # ruby 'WebKit/Source/WTF/Scripts/generate-unified-source-bundles.rb' source_list.txt --source-tree-path . --derived-sources-path build/unified-sources - find_program(RUBY ruby DOC "Path to ruby") -endif() - -# CCache -# find_program(CCACHE_PROGRAM sccache) -find_program(CCACHE_PROGRAM ccache) - -if(CCACHE_PROGRAM) - set(CMAKE_CXX_COMPILER_LAUNCHER "${CCACHE_PROGRAM}") - set(CMAKE_C_COMPILER_LAUNCHER "${CCACHE_PROGRAM}") - message(STATUS "Using ccache: ${CCACHE_PROGRAM}") -endif() - -# --- WebKit --- -# WebKit is either prebuilt and distributed via NPM, or you can pass WEBKIT_DIR to use a local build. -# We cannot include their CMake build files (TODO: explain why, for now ask @paperdave why) -# -# On Unix, this will pull from NPM the single package that is needed and use that -if(WIN32) - set(STATIC_LIB_EXT "lib") - set(libJavaScriptCore "JavaScriptCore") - set(libWTF "WTF") -else() - set(STATIC_LIB_EXT "a") - set(libJavaScriptCore "libJavaScriptCore") - set(libWTF "libWTF") -endif() - -if(NOT WEBKIT_DIR) - set(BUN_WEBKIT_PACKAGE_NAME_SUFFIX "") - set(ASSERT_ENABLED "0") - - if(USE_DEBUG_JSC) - add_compile_definitions("BUN_DEBUG=1") - set(BUN_WEBKIT_PACKAGE_NAME_SUFFIX "-debug") - set(ASSERT_ENABLED "1") - elseif(NOT DEBUG AND NOT WIN32) - # Avoid waiting for LTO in local release builds outside of CI - if(USE_LTO) - set(BUN_WEBKIT_PACKAGE_NAME_SUFFIX "-lto") - else() - set(BUN_WEBKIT_PACKAGE_NAME_SUFFIX "") - endif() - - set(ASSERT_ENABLED "0") - endif() - - if(WIN32) - set(BUN_WEBKIT_PACKAGE_PLATFORM "windows") - elseif(APPLE) - set(BUN_WEBKIT_PACKAGE_PLATFORM "macos") - else() - set(BUN_WEBKIT_PACKAGE_PLATFORM "linux") - endif() - - if(ARCH STREQUAL "x86_64") - set(BUN_WEBKIT_PACKAGE_ARCH "amd64") - elseif(ARCH MATCHES "aarch64|arm64|arm") - set(BUN_WEBKIT_PACKAGE_ARCH "arm64") - endif() - - set(BUN_WEBKIT_PACKAGE_NAME "bun-webkit-${BUN_WEBKIT_PACKAGE_PLATFORM}-${BUN_WEBKIT_PACKAGE_ARCH}${BUN_WEBKIT_PACKAGE_NAME_SUFFIX}") - - message(STATUS "Using Pre-built WebKit: ${BUN_WEBKIT_PACKAGE_NAME}") - execute_process( - COMMAND "${SHELL}" - "${CMAKE_CURRENT_SOURCE_DIR}/scripts/download-webkit.${SCRIPT_EXTENSION}" - "${BUN_WORKDIR}/bun-webkit" - "${WEBKIT_TAG}" - "${BUN_WEBKIT_PACKAGE_NAME}" - WORKING_DIRECTORY ${BUN_WORKDIR} - ) - - if(NOT EXISTS "${BUN_WORKDIR}/bun-webkit") - message(FATAL_ERROR "Prebuilt WebKit package ${BUN_WEBKIT_PACKAGE_NAME} failed to install") - endif() - - set(WEBKIT_INCLUDE_DIR "${BUN_WORKDIR}/bun-webkit/include") - - if(APPLE) - set(ICU_INCLUDE_DIR "") - else() - set(ICU_INCLUDE_DIR "${BUN_WORKDIR}/bun-webkit/include/wtf/unicode") - endif() - - set(WEBKIT_LIB_DIR "${BUN_WORKDIR}/bun-webkit/lib") -elseif(WEBKIT_DIR STREQUAL "omit") - message(STATUS "Not using WebKit. This is only valid if you are only trying to build Zig code") -else() - # Expected to be WebKit/WebKitBuild/${CMAKE_BUILD_TYPE} - if(EXISTS "${WEBKIT_DIR}/cmakeconfig.h") - # You may need to run: - # make jsc-compile-debug jsc-copy-headers - include_directories( - "${WEBKIT_DIR}/" - "${WEBKIT_DIR}/JavaScriptCore/Headers/JavaScriptCore" - "${WEBKIT_DIR}/JavaScriptCore/PrivateHeaders" - "${WEBKIT_DIR}/bmalloc/Headers" - "${WEBKIT_DIR}/WTF/Headers" - ) - set(WEBKIT_LIB_DIR "${WEBKIT_DIR}/lib") - - if(USE_DEBUG_JSC) - add_compile_definitions("BUN_DEBUG=1") - set(ASSERT_ENABLED "1") - endif() - - message(STATUS "Using WebKit from ${WEBKIT_DIR}") - else() - if(NOT EXISTS "${WEBKIT_DIR}/lib/${libWTF}.${STATIC_LIB_EXT}" OR NOT EXISTS "${WEBKIT_DIR}/lib/${libJavaScriptCore}.${STATIC_LIB_EXT}") - if(WEBKIT_DIR MATCHES "src/bun.js/WebKit$") - message(FATAL_ERROR "WebKit directory ${WEBKIT_DIR} does not contain all the required files for Bun. Did you forget to init submodules?") - endif() - - message(FATAL_ERROR "WebKit directory ${WEBKIT_DIR} does not contain all the required files for Bun. Expected a path to the oven-sh/WebKit repository, or a path to a folder containing `include` and `lib`.") - endif() - - set(WEBKIT_INCLUDE_DIR "${WEBKIT_DIR}/include") - set(WEBKIT_LIB_DIR "${WEBKIT_DIR}/lib") - - message(STATUS "Using specified WebKit directory: ${WEBKIT_DIR}") - - set(ASSERT_ENABLED "0") - message(STATUS "WebKit assertions: OFF") - endif() -endif() - -# --- CMake Macros --- - -# Append the given dependencies to the source file -macro(WEBKIT_ADD_SOURCE_DEPENDENCIES _source _deps) - set(_tmp) - get_source_file_property(_tmp ${_source} OBJECT_DEPENDS) - - if(NOT _tmp) - set(_tmp "") - endif() - - foreach(f ${_deps}) - list(APPEND _tmp "${f}") - endforeach() - - set_source_files_properties(${_source} PROPERTIES OBJECT_DEPENDS "${_tmp}") - unset(_tmp) -endmacro() - -# --- BUILD --- -set(BUN_SRC "${CMAKE_CURRENT_SOURCE_DIR}/src") -set(BUN_DEPS_DIR "${BUN_SRC}/deps") -set(BUN_CODEGEN_SRC "${BUN_SRC}/codegen") - -if(NOT BUN_DEPS_OUT_DIR) - set(BUN_DEPS_OUT_DIR "${CMAKE_CURRENT_SOURCE_DIR}/build/bun-deps") -endif() - -set(BUN_RAW_SOURCES, "") - -file(GLOB BUN_CPP ${CONFIGURE_DEPENDS} - "${BUN_SRC}/deps/*.cpp" - "${BUN_SRC}/io/*.cpp" - "${BUN_SRC}/bun.js/modules/*.cpp" - "${BUN_SRC}/bun.js/bindings/*.cpp" - "${BUN_SRC}/bun.js/bindings/webcore/*.cpp" - "${BUN_SRC}/bun.js/bindings/sqlite/*.cpp" - "${BUN_SRC}/bun.js/bindings/webcrypto/*.cpp" - "${BUN_SRC}/bun.js/bindings/webcrypto/*/*.cpp" - "${BUN_SRC}/deps/picohttpparser/picohttpparser.c" -) -list(APPEND BUN_RAW_SOURCES ${BUN_CPP}) - -# -- uSockets -- -set(USOCKETS_SRC "${CMAKE_CURRENT_SOURCE_DIR}/packages/bun-usockets/src") -file(GLOB USOCKETS_FILES ${CONFIGURE_DEPENDS} - "${USOCKETS_SRC}/*.c" - "${USOCKETS_SRC}/eventing/*.c" - "${USOCKETS_SRC}/internal/*.c" - "${USOCKETS_SRC}/crypto/*.c" - "${USOCKETS_SRC}/crypto/*.cpp" -) -list(APPEND BUN_RAW_SOURCES ${USOCKETS_FILES}) - -# --- Classes Generator --- -file(GLOB BUN_CLASSES_TS ${CONFIGURE_DEPENDS} - "${BUN_SRC}/bun.js/*.classes.ts" - "${BUN_SRC}/bun.js/api/*.classes.ts" - "${BUN_SRC}/bun.js/test/*.classes.ts" - "${BUN_SRC}/bun.js/webcore/*.classes.ts" - "${BUN_SRC}/bun.js/node/*.classes.ts" -) -add_custom_command( - OUTPUT "${BUN_WORKDIR}/codegen/ZigGeneratedClasses.h" - "${BUN_WORKDIR}/codegen/ZigGeneratedClasses.cpp" - "${BUN_WORKDIR}/codegen/ZigGeneratedClasses+lazyStructureHeader.h" - "${BUN_WORKDIR}/codegen/ZigGeneratedClasses+DOMClientIsoSubspaces.h" - "${BUN_WORKDIR}/codegen/ZigGeneratedClasses+DOMIsoSubspaces.h" - "${BUN_WORKDIR}/codegen/ZigGeneratedClasses+lazyStructureImpl.h" - "${BUN_WORKDIR}/codegen/ZigGeneratedClasses.zig" - COMMAND ${BUN_EXECUTABLE} run "${BUN_CODEGEN_SRC}/generate-classes.ts" ${BUN_CLASSES_TS} "${BUN_WORKDIR}/codegen" - WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} - MAIN_DEPENDENCY "${BUN_CODEGEN_SRC}/generate-classes.ts" - DEPENDS ${BUN_CLASSES_TS} - VERBATIM - COMMENT "Generating *.classes.ts bindings" -) -list(APPEND BUN_RAW_SOURCES "${BUN_WORKDIR}/codegen/ZigGeneratedClasses.cpp") - -if(NOT NO_CODEGEN) - # --- ErrorCode Generator --- - file(GLOB NODE_ERRORS_TS ${CONFIGURE_DEPENDS} - "${BUN_SRC}/bun.js/bindings/ErrorCode.ts" - ) - add_custom_command( - OUTPUT "${BUN_WORKDIR}/codegen/ErrorCode+List.h" "${BUN_WORKDIR}/codegen/ErrorCode+Data.h" "${BUN_WORKDIR}/codegen/ErrorCode.zig" - COMMAND ${BUN_EXECUTABLE} run "${BUN_CODEGEN_SRC}/generate-node-errors.ts" "${BUN_WORKDIR}/codegen" - WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} - MAIN_DEPENDENCY "${BUN_CODEGEN_SRC}/generate-node-errors.ts" - DEPENDS ${NODE_ERRORS_TS} - VERBATIM - COMMENT "Generating ErrorCode.zig" - ) - - # This needs something to force it to be regenerated - WEBKIT_ADD_SOURCE_DEPENDENCIES( - "${BUN_SRC}/bun.js/bindings/ErrorCode.cpp" - "${BUN_WORKDIR}/codegen/ErrorCode+List.h" - ) - WEBKIT_ADD_SOURCE_DEPENDENCIES( - "${BUN_SRC}/bun.js/bindings/ErrorCode.h" - "${BUN_WORKDIR}/codegen/ErrorCode+Data.h" - ) -endif() - -# --- JSSink Generator --- -add_custom_command( - OUTPUT "${BUN_WORKDIR}/codegen/JSSink.cpp" - "${BUN_WORKDIR}/codegen/JSSink.h" - COMMAND ${BUN_EXECUTABLE} run "src/codegen/generate-jssink.ts" "${BUN_WORKDIR}/codegen" - VERBATIM - MAIN_DEPENDENCY "src/codegen/generate-jssink.ts" - WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} - COMMENT "Generating JSSink" - USES_TERMINAL -) -list(APPEND BUN_RAW_SOURCES "${BUN_WORKDIR}/codegen/JSSink.cpp") - -# --- .lut.h Generator --- -set(BUN_OBJECT_LUT_SOURCES - bun.js/bindings/BunObject.cpp - bun.js/bindings/ZigGlobalObject.lut.txt - bun.js/bindings/JSBuffer.cpp - bun.js/bindings/BunProcess.cpp - bun.js/bindings/ProcessBindingConstants.cpp - bun.js/bindings/ProcessBindingNatives.cpp -) -set(BUN_OBJECT_LUT_OUTPUTS "") -set(BUN_HASH_LUT_GENERATOR "${BUN_CODEGEN_SRC}/create-hash-table.ts") - -if(NOT BUN_LINK_ONLY) - macro(GENERATE_HASH_LUT _input _output _display_name) - if(NOT NO_CODEGEN) - add_custom_command( - OUTPUT ${_output} - MAIN_DEPENDENCY ${BUN_HASH_LUT_GENERATOR} - DEPENDS ${_input} - COMMAND ${BUN_EXECUTABLE} run ${BUN_HASH_LUT_GENERATOR} ${_input} ${_output} - VERBATIM - COMMENT "Generating ${_display_name}" - ) - endif() - - list(APPEND BUN_OBJECT_LUT_OUTPUTS "${_output}") - - # list(APPEND Bun_HEADERS ${_output}) - WEBKIT_ADD_SOURCE_DEPENDENCIES(${_input} ${_output}) - endmacro() - - foreach(_file ${BUN_OBJECT_LUT_SOURCES}) - if(NOT EXISTS "${BUN_SRC}/${_file}") - message(FATAL_ERROR "Could not find ${_file} needed for LUT generation") - endif() - - get_filename_component(_name ${_file} NAME_WE) - - # workaround for ZigGlobalObject - if(_name MATCHES "ZigGlobalObject") - set(_name "ZigGlobalObject") - endif() - - GENERATE_HASH_LUT(${BUN_SRC}/${_file} ${BUN_WORKDIR}/codegen/${_name}.lut.h ${_name}.lut.h) - endforeach() - - WEBKIT_ADD_SOURCE_DEPENDENCIES(${BUN_SRC}/bun.js/bindings/ZigGlobalObject.cpp ${BUN_WORKDIR}/codegen/ZigGlobalObject.lut.h) -endif() - -# --- Identifier Cache --- -if(NOT NO_CODEGEN) - set(BUN_IDENTIFIER_CACHE_OUT - "${BUN_SRC}/js_lexer/id_continue_bitset.blob" - "${BUN_SRC}/js_lexer/id_continue_bitset.meta.blob" - "${BUN_SRC}/js_lexer/id_start_bitset.blob" - "${BUN_SRC}/js_lexer/id_start_bitset.meta.blob") - add_custom_command( - OUTPUT ${BUN_IDENTIFIER_CACHE_OUT} - MAIN_DEPENDENCY "${BUN_SRC}/js_lexer/identifier_data.zig" - DEPENDS "${BUN_SRC}/js_lexer/identifier_cache.zig" - COMMAND ${ZIG_COMPILER} run "--zig-lib-dir" "${ZIG_LIB_DIR}" "--cache-dir" "${LOCAL_ZIG_CACHE_DIR}" "--global-cache-dir" "${GLOBAL_ZIG_CACHE_DIR}" "${BUN_SRC}/js_lexer/identifier_data.zig" - VERBATIM - COMMENT "Building Identifier Cache" - ) -endif() - -# --- Bundled TS/JS --- -# Note: It's not worth doing this in parallel at the CMake/Ninja level, because this bundling -# requires all the JS files to be known, but also Bun will use all cores during bundling anyways. -if(NOT NO_CODEGEN) - file(GLOB BUN_TS_MODULES ${CONFIGURE_DEPENDS} - "${BUN_SRC}/js/node/*.ts" - "${BUN_SRC}/js/node/*.js" - "${BUN_SRC}/js/bun/*.ts" - "${BUN_SRC}/js/bun/*.js" - "${BUN_SRC}/js/builtins/*.ts" - "${BUN_SRC}/js/builtins/*.js" - "${BUN_SRC}/js/thirdparty/*.js" - "${BUN_SRC}/js/thirdparty/*.ts" - "${BUN_SRC}/js/internal/*.js" - "${BUN_SRC}/js/internal/*.ts" - "${BUN_SRC}/js/internal/util/*.js" - "${BUN_SRC}/js/internal/fs/*.ts" - "${BUN_SRC}/js/node/*.js" - "${BUN_SRC}/js/node/*.ts" - "${BUN_SRC}/js/thirdparty/*.js" - "${BUN_SRC}/js/thirdparty/*.ts" - "${BUN_SRC}/js/internal-for-testing.ts" - ) - - file(GLOB CODEGEN_FILES ${CONFIGURE_DEPENDS} "${BUN_CODEGEN_SRC}/*.ts") - - add_custom_command( - OUTPUT - "${BUN_WORKDIR}/codegen/WebCoreJSBuiltins.cpp" - "${BUN_WORKDIR}/codegen/WebCoreJSBuiltins.h" - "${BUN_WORKDIR}/codegen/InternalModuleRegistryConstants.h" - "${BUN_WORKDIR}/codegen/InternalModuleRegistry+createInternalModuleById.h" - "${BUN_WORKDIR}/codegen/InternalModuleRegistry+enum.h" - "${BUN_WORKDIR}/codegen/InternalModuleRegistry+numberOfModules.h" - "${BUN_WORKDIR}/codegen/NativeModuleImpl.h" - "${BUN_WORKDIR}/codegen/ResolvedSourceTag.zig" - "${BUN_WORKDIR}/codegen/SyntheticModuleType.h" - "${BUN_WORKDIR}/codegen/GeneratedJS2Native.h" - "${BUN_SRC}/bun.js/bindings/GeneratedJS2Native.zig" - COMMAND ${BUN_EXECUTABLE} run "${BUN_SRC}/codegen/bundle-modules.ts" "--debug=${DEBUG}" "${BUN_WORKDIR}" - DEPENDS ${BUN_TS_MODULES} ${CODEGEN_FILES} - WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} - COMMENT "Bundling JS" - ) -endif() - -WEBKIT_ADD_SOURCE_DEPENDENCIES( - "${BUN_SRC}/bun.js/bindings/InternalModuleRegistry.cpp" - "${BUN_WORKDIR}/codegen/InternalModuleRegistryConstants.h" -) -list(APPEND BUN_RAW_SOURCES "${BUN_WORKDIR}/codegen/WebCoreJSBuiltins.cpp") - -# --- Peechy API --- -# if(NOT NO_CODEGEN) -# add_custom_command( -# OUTPUT "${BUN_SRC}/api/schema.js" -# "${BUN_SRC}/api/schema.d.ts" -# "${BUN_SRC}/api/schema.zig" -# WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}" -# COMMAND "${CMAKE_CURRENT_SOURCE_DIR}/node_modules/.bin/peechy" -# "--schema" "${BUN_SRC}/api/schema.peechy" -# "--esm" "${BUN_SRC}/api/schema.js" -# "--ts" "${BUN_SRC}/api/schema.d.ts" -# "--zig" "${BUN_SRC}/api/schema.zig" -# COMMAND "${ZIG_COMPILER}" "fmt" "src/api/schema.zig" -# COMMAND "${PRETTIER}" "--config=.prettierrc.cjs" "--write" "src/api/schema.js" "src/api/schema.d.ts" -# DEPENDS "${BUN_SRC}/api/schema.peechy" -# COMMENT "Building schema" -# ) -# add_custom_command( -# OUTPUT "${BUN_SRC}/analytics/analytics_schema.zig" -# WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}" -# COMMAND "${CMAKE_CURRENT_SOURCE_DIR}/node_modules/.bin/peechy" -# "--schema" "${BUN_SRC}/analytics/schema.peechy" -# "--zig" "${BUN_SRC}/analytics/analytics_schema.zig" -# COMMAND "${ZIG_COMPILER}" "fmt" "${BUN_SRC}/analytics/analytics_schema.zig" -# DEPENDS "${BUN_SRC}/api/schema.peechy" -# COMMENT "Building analytics_schema.zig" -# ) -# endif() - -# --- Runtime.js --- -if(NOT NO_CODEGEN) - add_custom_command( - OUTPUT "src/fallback.out.js" - WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}" - COMMAND "${ESBUILD}" "--target=esnext" "--bundle" "src/fallback.ts" "--format=iife" "--platform=browser" "--minify" "--outfile=src/fallback.out.js" - DEPENDS "src/fallback.ts" - ) -endif() - -# --- Zig Object --- -file(GLOB ZIG_FILES - "${BUN_SRC}/*.zig" - "${BUN_SRC}/*/*.zig" - "${BUN_SRC}/*/*/*.zig" - "${BUN_SRC}/*/*/*/*.zig" - "${BUN_SRC}/*/*/*/*/*.zig" +cmake_minimum_required(VERSION 3.24) +message(STATUS "Configuring Bun") + +list(APPEND CMAKE_MODULE_PATH + ${CMAKE_SOURCE_DIR}/cmake + ${CMAKE_SOURCE_DIR}/cmake/targets + ${CMAKE_SOURCE_DIR}/cmake/tools + ${CMAKE_SOURCE_DIR}/cmake/analysis + ${CMAKE_SOURCE_DIR}/cmake/scripts ) -if(NOT BUN_ZIG_OBJ_FORMAT) - # To use LLVM bitcode from Zig, more work needs to be done. Currently, an install of - # LLVM 18.1.7 does not compatible with what bitcode Zig 0.13 outputs (has LLVM 18.1.7) - # Change to "bc" to experiment, "Invalid record" means it is not valid output. - set(BUN_ZIG_OBJ_FORMAT "obj") +include(Policies) +include(Globals) + +# --- Compilers --- + +if(CMAKE_HOST_APPLE) + include(SetupMacSDK) endif() +include(SetupLLVM) +include(SetupCcache) -if(NOT BUN_ZIG_OBJ_DIR) - set(BUN_ZIG_OBJ_DIR "${BUN_WORKDIR}/CMakeFiles") -endif() - -get_filename_component(BUN_ZIG_OBJ_DIR "${BUN_ZIG_OBJ_DIR}" REALPATH BASE_DIR "${CMAKE_BINARY_DIR}") - -if(WIN32) - set(BUN_ZIG_OBJ "${BUN_ZIG_OBJ_DIR}/bun-zig.o") -else() - set(BUN_ZIG_OBJ "${BUN_ZIG_OBJ_DIR}/bun-zig.o") -endif() - -set(USES_TERMINAL_NOT_IN_CI "") - -if(NOT CI) - set(USES_TERMINAL_NOT_IN_CI "USES_TERMINAL") -endif() - -if(NOT BUN_LINK_ONLY AND NOT BUN_CPP_ONLY) - add_custom_command( - OUTPUT "${BUN_ZIG_OBJ}" - COMMAND - "${ZIG_COMPILER}" "build" "obj" - "--zig-lib-dir" "${ZIG_LIB_DIR}" - "--prefix" "${BUN_ZIG_OBJ_DIR}" - "--verbose" - "-Dgenerated-code=${BUN_WORKDIR}/codegen" - "-freference-trace=10" - "-Dversion=${Bun_VERSION}" - "-Dcanary=${CANARY}" - "-Doptimize=${ZIG_OPTIMIZE}" - "-Dcpu=${CPU_TARGET}" - "-Dtarget=${ZIG_TARGET}" - "-Denable_logs=${ENABLE_LOGS}" - "-Dreported_nodejs_version=${REPORTED_NODEJS_VERSION}" - "-Dobj_format=${BUN_ZIG_OBJ_FORMAT}" - "--cache-dir" "${LOCAL_ZIG_CACHE_DIR}" - "--global-cache-dir" "${GLOBAL_ZIG_CACHE_DIR}" - DEPENDS - "${CMAKE_CURRENT_SOURCE_DIR}/build.zig" - "${ZIG_FILES}" - "${BUN_WORKDIR}/codegen/ZigGeneratedClasses.zig" - "${BUN_WORKDIR}/codegen/ErrorCode.zig" - "${BUN_WORKDIR}/codegen/ResolvedSourceTag.zig" - "${BUN_IDENTIFIER_CACHE_OUT}" - "${BUN_SRC}/api/schema.zig" - "${BUN_SRC}/bun.js/bindings/GeneratedJS2Native.zig" - WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} - COMMENT "Building zig code" - VERBATIM - - # This is here to show Zig's progress indicator - ${USES_TERMINAL_NOT_IN_CI} - ) -endif() - -if(WIN32) - list(APPEND BUN_RAW_SOURCES "${BUN_SRC}/bun.js/bindings/windows/musl-memmem.c") - include_directories("${BUN_SRC}/bun.js/bindings/windows") -endif() - -if(NOT BUN_CPP_ARCHIVE) - # TODO: unified sources - set(BUN_SOURCES ${BUN_RAW_SOURCES}) -else() - # used by ci - set(BUN_SOURCES "") - add_link_options("${BUN_CPP_ARCHIVE}") -endif() - -# -- Windows resources (app icon) -- -if(CANARY GREATER 0) - set(Bun_VERSION_WITH_TAG "${Bun_VERSION}-canary.${CANARY}") -else() - set(Bun_VERSION_WITH_TAG "${Bun_VERSION}") -endif() - -if(WIN32) - set(BUN_ICO_PATH "${BUN_SRC}/bun.ico") - configure_file("${BUN_SRC}/windows-app-info.rc" "${BUN_WORKDIR}/CMakeFiles/windows-app-info.rc") - list(APPEND BUN_SOURCES "${BUN_WORKDIR}/CMakeFiles/windows-app-info.rc") -endif() - -# -- The Buntime™️ --- -if(BUN_TIDY_ONLY OR BUN_TIDY_ONLY_EXTRA) - add_library(${bun} OBJECT "${BUN_SOURCES}") -elseif(NOT BUN_CPP_ONLY) - add_executable(${bun} "${BUN_SOURCES}" "${BUN_ZIG_OBJ}") -else() - add_executable(${bun} "${BUN_SOURCES}") -endif() - -set_target_properties(${bun} PROPERTIES - CXX_STANDARD 20 - CXX_STANDARD_REQUIRED YES - CXX_EXTENSIONS YES - CXX_VISIBILITY_PRESET hidden - C_STANDARD 17 - C_STANDARD_REQUIRED YES - VISIBILITY_INLINES_HIDDEN YES -) - -if(APPLE) - add_compile_definitions("__DARWIN_NON_CANCELABLE=1") -endif() - -add_compile_definitions( - - # TODO: are all of these variables strictly necessary? - "_HAS_EXCEPTIONS=0" - "LIBUS_USE_OPENSSL=1" - "LIBUS_USE_BORINGSSL=1" - "WITH_BORINGSSL=1" - "STATICALLY_LINKED_WITH_JavaScriptCore=1" - "STATICALLY_LINKED_WITH_WTF=1" - "STATICALLY_LINKED_WITH_BMALLOC=1" - "BUILDING_WITH_CMAKE=1" - "JSC_OBJC_API_ENABLED=0" - "BUN_SINGLE_THREADED_PER_VM_ENTRY_SCOPE=1" - "NAPI_EXPERIMENTAL=ON" - "NOMINMAX" - "IS_BUILD" - "BUILDING_JSCONLY__" - "BUN_DYNAMIC_JS_LOAD_PATH=\"${BUN_WORKDIR}/js\"" - "REPORTED_NODEJS_VERSION=\"${REPORTED_NODEJS_VERSION}\"" -) - -if(NOT ASSERT_ENABLED) - if(APPLE) - add_compile_definitions("_LIBCXX_ENABLE_ASSERTIONS=0") - add_compile_definitions("_LIBCPP_HARDENING_MODE=_LIBCPP_HARDENING_MODE_NONE") - endif() - - add_compile_definitions("NDEBUG=1") -else() - if(APPLE) - add_compile_definitions("_LIBCXX_ENABLE_ASSERTIONS=1") - add_compile_definitions("_LIBCPP_HARDENING_MODE=_LIBCPP_HARDENING_MODE_DEBUG") - elseif(CMAKE_SYSTEM_NAME STREQUAL "Linux") - add_compile_definitions("_GLIBCXX_ASSERTIONS=1") - endif() - - add_compile_definitions("ASSERT_ENABLED=1") -endif() - -if(ICU_INCLUDE_DIR) - include_directories(${ICU_INCLUDE_DIR}) -endif() - -include_directories( - ${CMAKE_CURRENT_SOURCE_DIR}/packages/ - ${CMAKE_CURRENT_SOURCE_DIR}/packages/bun-usockets - ${CMAKE_CURRENT_SOURCE_DIR}/packages/bun-usockets/src - ${CMAKE_CURRENT_SOURCE_DIR}/src/bun.js/bindings - ${CMAKE_CURRENT_SOURCE_DIR}/src/bun.js/bindings/webcore - ${CMAKE_CURRENT_SOURCE_DIR}/src/bun.js/bindings/webcrypto - ${CMAKE_CURRENT_SOURCE_DIR}/src/bun.js/bindings/sqlite - ${CMAKE_CURRENT_SOURCE_DIR}/src/bun.js/modules - ${CMAKE_CURRENT_SOURCE_DIR}/src/js/builtins - ${CMAKE_CURRENT_SOURCE_DIR}/src/napi - ${CMAKE_CURRENT_SOURCE_DIR}/src/deps - ${CMAKE_CURRENT_SOURCE_DIR}/src/deps/picohttpparser - ${WEBKIT_INCLUDE_DIR} - "${BUN_WORKDIR}/codegen" -) - -# -- BUN_CPP_ONLY Target -if(NOT BUN_CPP_ARCHIVE) - if(BUN_CPP_ONLY) - if(NOT WIN32) - string(REPLACE ";" ".o\n " BUN_OBJECT_LIST "${BUN_SOURCES}.o") - string(REPLACE "${BUN_WORKDIR}/" "CMakeFiles/${bun}.dir/" BUN_OBJECT_LIST "${BUN_OBJECT_LIST}") - string(REPLACE "${CMAKE_CURRENT_SOURCE_DIR}/" "CMakeFiles/${bun}.dir/" BUN_OBJECT_LIST "${BUN_OBJECT_LIST}") - write_file("${BUN_WORKDIR}/compile-cpp-only.sh" - "#!/usr/bin/env bash\n" - "# this file is generated in CMakeLists.txt\n" - "set -ex\n" - "OBJ_LIST=(\n ${BUN_OBJECT_LIST}\n)\n" - "ninja \${OBJ_LIST[@]} $@\n" - "\"${AR}\" rcvs bun-cpp-objects.a \${OBJ_LIST[@]}\n" - "echo '-> bun-cpp-objects.a'\n" - ) - else() - string(REPLACE ";" ".obj\",\n \"" BUN_OBJECT_LIST "\"${BUN_SOURCES}.obj\"") - string(REPLACE "rc.obj" "rc.res" BUN_OBJECT_LIST "${BUN_OBJECT_LIST}") - string(REPLACE "${BUN_WORKDIR}/" "CMakeFiles/${bun}.dir/" BUN_OBJECT_LIST "${BUN_OBJECT_LIST}") - string(REPLACE "${CMAKE_CURRENT_SOURCE_DIR}/" "CMakeFiles/${bun}.dir/" BUN_OBJECT_LIST "${BUN_OBJECT_LIST}") - write_file("${BUN_WORKDIR}/compile-cpp-only.ps1" - "# this file is generated in CMakeLists.txt\n" - "$ErrorActionPreference = \"Stop\"\n" - "$ObjectFiles=@(\n ${BUN_OBJECT_LIST}\n)\n" - "ninja @ObjectFiles @args\n" - "& \"${AR}\" rcvs bun-cpp-objects.a @ObjectFiles\n" - "Write-Host '-> bun-cpp-objects.a'\n" - ) - endif() - endif() -else() - set_target_properties(${bun} PROPERTIES LINKER_LANGUAGE CXX) -endif() - -# --- clang and linker flags --- -if(CMAKE_BUILD_TYPE STREQUAL "Debug") - if(NOT WIN32) - target_compile_options(${bun} PUBLIC -O0 -g -g3 -ggdb -gdwarf-4 - -Werror=return-type - -Werror=return-stack-address - -Werror=implicit-function-declaration - -Werror=uninitialized - -Werror=conditional-uninitialized - -Werror=suspicious-memaccess - -Werror=int-conversion - -Werror=nonnull - -Werror=move - -Werror=sometimes-uninitialized - -Werror=unused - -Wno-unused-function - -Wno-nullability-completeness - -Werror - -fsanitize=null - -fsanitize-recover=all - -fsanitize=bounds - -fsanitize=return - -fsanitize=nullability-arg - -fsanitize=nullability-assign - -fsanitize=nullability-return - -fsanitize=returns-nonnull-attribute - -fsanitize=unreachable - ) - target_link_libraries(${bun} PRIVATE -fsanitize=null) - else() - target_compile_options(${bun} PUBLIC /Od /Z7) - endif() - - add_compile_definitions("BUN_DEBUG=1") -elseif(CMAKE_BUILD_TYPE STREQUAL "Release") - set(LTO_FLAG "") - - if(NOT WIN32) - if(USE_LTO) - list(APPEND LTO_FLAG "-flto=full" "-emit-llvm" "-fwhole-program-vtables" "-fforce-emit-vtables") - endif() - - # Leave -Werror=unused off in release builds so we avoid errors from being used in ASSERT - target_compile_options(${bun} PUBLIC -O3 ${LTO_FLAG} -g1 - -Werror=return-type - -Werror=return-stack-address - -Werror=implicit-function-declaration - -Werror=uninitialized - -Werror=conditional-uninitialized - -Werror=suspicious-memaccess - -Werror=int-conversion - -Werror=nonnull - -Werror=move - -Werror=sometimes-uninitialized - -Wno-nullability-completeness - -Werror - ) - else() - set(LTO_LINK_FLAG "") - - if(USE_LTO) - target_compile_options(${bun} PUBLIC -Xclang -emit-llvm-bc) - - list(APPEND LTO_FLAG "-flto=full") - list(APPEND LTO_LINK_FLAG "-flto=full") - list(APPEND LTO_LINK_FLAG "/LTCG") - list(APPEND LTO_LINK_FLAG "/OPT:REF") - list(APPEND LTO_LINK_FLAG "/OPT:NOICF") - endif() - - target_compile_options(${bun} PUBLIC - /O2 - ${LTO_FLAG} - /Gy - /Gw - /GF - /GA - ) - target_link_options(${bun} PUBLIC - ${LTO_LINK_FLAG} - /DEBUG:FULL - - /delayload:ole32.dll - /delayload:WINMM.dll - /delayload:dbghelp.dll - /delayload:VCRUNTIME140_1.dll - - # libuv loads these two immediately, but for some reason it seems to still be slightly faster to delayload them - /delayload:WS2_32.dll - /delayload:WSOCK32.dll - /delayload:ADVAPI32.dll - /delayload:IPHLPAPI.dll - ) - endif() -endif() - -if(NOT CI AND NOT WIN32) - target_compile_options(${bun} PRIVATE -fdiagnostics-color=always) -endif() - -if(NOT CPU_TARGET STREQUAL "native") - # passing -march=native to clang will break older systems - # by default on x64, CPU_TARGET is set to "haswell" or "nehalem" depending on baseline - # on arm, this argument will not be passed. - target_compile_options(${bun} PUBLIC "-march=${CPU_TARGET}") -else() - if(APPLE AND ARCH STREQUAL "aarch64") - # On arm macOS, we can set it to a minimum of the M1 cpu set. this might be the default already. - target_compile_options(${bun} PUBLIC "-mcpu=apple-m1") - endif() - - if(NOT WIN32 AND NOT APPLE AND ARCH STREQUAL "aarch64") - # on arm64 linux, we set a minimum of armv8 - target_compile_options(${bun} PUBLIC -march=armv8-a+crc -mtune=ampere1) - endif() -endif() - -target_compile_options(${bun} PUBLIC -ferror-limit=${ERROR_LIMIT}) - -if(WIN32) - add_compile_definitions( - "WIN32" - "_WINDOWS" - "WIN32_LEAN_AND_MEAN=1" - "_CRT_SECURE_NO_WARNINGS" - "BORINGSSL_NO_CXX=1" # lol - ) - - # set_property(TARGET ${bun} PROPERTY MSVC_RUNTIME_LIBRARY "MultiThreaded$<$:Debug>") - set_property(TARGET ${bun} PROPERTY MSVC_RUNTIME_LIBRARY "MultiThreaded") - - target_compile_options(${bun} PUBLIC "/EHsc" "/GR-" -Xclang -fno-c++-static-destructors) - - target_link_options(${bun} PUBLIC "/STACK:0x1200000,0x100000" "/DEF:${BUN_SRC}/symbols.def" "/errorlimit:0") -else() - target_compile_options(${bun} PUBLIC - -mtune=${CPU_TARGET} - -fconstexpr-steps=2542484 - -fconstexpr-depth=54 - -fno-exceptions - -fno-asynchronous-unwind-tables - -fno-unwind-tables - -fno-c++-static-destructors - -fvisibility=hidden - -fvisibility-inlines-hidden - -fno-rtti - -fno-omit-frame-pointer - -mno-omit-leaf-frame-pointer - -fno-pic - -fno-pie - -faddrsig - ) -endif() - -if(APPLE) - target_link_options(${bun} PUBLIC "-dead_strip") - target_link_options(${bun} PUBLIC "-dead_strip_dylibs") - target_link_options(${bun} PUBLIC "-Wl,-stack_size,0x1200000") - target_link_options(${bun} PUBLIC "-exported_symbols_list" "${BUN_SRC}/symbols.txt") - set_target_properties(${bun} PROPERTIES LINK_DEPENDS "${BUN_SRC}/symbols.txt") - target_link_options(${bun} PUBLIC "-fno-keep-static-consts") - target_link_libraries(${bun} PRIVATE "resolv") -endif() - -if(UNIX AND NOT APPLE) - target_link_options(${bun} PUBLIC - -fuse-ld=lld-${LLVM_VERSION} - -fno-pic - -static-libstdc++ - -static-libgcc - "-Wl,-no-pie" - "-Wl,-icf=safe" - "-Wl,--as-needed" - "-Wl,--gc-sections" - "-Wl,-z,stack-size=12800000" - "-Wl,--wrap=fcntl" - "-Wl,--wrap=fcntl64" - "-Wl,--wrap=stat64" - "-Wl,--wrap=pow" - "-Wl,--wrap=exp" - "-Wl,--wrap=expf" - "-Wl,--wrap=log" - "-Wl,--wrap=log2" - "-Wl,--wrap=lstat" - "-Wl,--wrap=stat64" - "-Wl,--wrap=stat" - "-Wl,--wrap=fstat" - "-Wl,--wrap=fstatat" - "-Wl,--wrap=lstat64" - "-Wl,--wrap=fstat64" - "-Wl,--wrap=fstatat64" - "-Wl,--wrap=mknod" - "-Wl,--wrap=mknodat" - "-Wl,--wrap=statx" - "-Wl,--wrap=fmod" - "-Wl,--compress-debug-sections=zlib" - "-Bsymbolics-functions" - "-rdynamic" - "-Wl,--dynamic-list=${BUN_SRC}/symbols.dyn" - "-Wl,--version-script=${BUN_SRC}/linker.lds" - -Wl,-z,lazy - -Wl,-z,norelro - ) - - target_link_libraries(${bun} PRIVATE "c") - target_link_libraries(${bun} PRIVATE "pthread") - target_link_libraries(${bun} PRIVATE "dl") - - if(NOT USE_STATIC_LIBATOMIC) - target_link_libraries(${bun} PUBLIC "libatomic.so") - else() - target_link_libraries(${bun} PRIVATE "libatomic.a") - endif() - - if(USE_SYSTEM_ICU) - target_link_libraries(${bun} PRIVATE "libicudata.a") - target_link_libraries(${bun} PRIVATE "libicui18n.a") - target_link_libraries(${bun} PRIVATE "libicuuc.a") - else() - target_link_libraries(${bun} PRIVATE "${WEBKIT_LIB_DIR}/libicudata.a") - target_link_libraries(${bun} PRIVATE "${WEBKIT_LIB_DIR}/libicui18n.a") - target_link_libraries(${bun} PRIVATE "${WEBKIT_LIB_DIR}/libicuuc.a") - endif() - - set_target_properties(${bun} PROPERTIES LINK_DEPENDS "${BUN_SRC}/linker.lds") - set_target_properties(${bun} PROPERTIES LINK_DEPENDS "${BUN_SRC}/symbols.dyn") -endif() - -# --- ICU --- -if(APPLE) - target_link_libraries(${bun} PRIVATE "icucore") -endif() - -# --- Stripped Binary "bun" -if(CMAKE_BUILD_TYPE STREQUAL "Release" AND NOT WIN32 AND NOT ASSERT_ENABLED) - # if(CI AND APPLE) - if(APPLE) - add_custom_command( - TARGET ${bun} - POST_BUILD - COMMAND ${DSYMUTIL} -o ${BUN_WORKDIR}/${bun}.dSYM ${BUN_WORKDIR}/${bun} - COMMENT "Generating .dSYM" - ) - endif() - - add_custom_command( - TARGET ${bun} - POST_BUILD - COMMAND ${STRIP} -s -x -S -o ${BUN_WORKDIR}/bun ${BUN_WORKDIR}/${bun} - COMMENT "Stripping Symbols" - ) -endif() - -if(WIN32) - # Kill all instances of bun before linking. - # This is necessary because the file is locked by the process. - add_custom_command( - TARGET ${bun} - PRE_LINK - COMMAND - "powershell" - "/C" - "Stop-Process -Name '${bun}' -Force -ErrorAction SilentlyContinue; exit 0" - ) -endif() - -# --- Dependencies --- -if(USE_CUSTOM_ZLIB) - include_directories(${BUN_DEPS_DIR}/zlib) - - if(WIN32) - target_link_libraries(${bun} PRIVATE "${BUN_DEPS_OUT_DIR}/zlib.lib") - else() - target_link_libraries(${bun} PRIVATE "${BUN_DEPS_OUT_DIR}/libz.a") - endif() -else() - if(WIN32) - target_link_libraries(${bun} PRIVATE "${BUN_DEPS_DIR}/zlib_maybethisworks.lib") - else() - find_package(ZLIB REQUIRED) - target_link_libraries(${bun} PRIVATE ZLIB::ZLIB) - endif() -endif() - -if(USE_CUSTOM_BORINGSSL) - include_directories(${BUN_DEPS_DIR}/boringssl/include) - - if(WIN32) - target_link_libraries(${bun} PRIVATE "${BUN_DEPS_OUT_DIR}/crypto.lib") - target_link_libraries(${bun} PRIVATE "${BUN_DEPS_OUT_DIR}/ssl.lib") - target_link_libraries(${bun} PRIVATE "${BUN_DEPS_OUT_DIR}/decrepit.lib") - else() - target_link_libraries(${bun} PRIVATE "${BUN_DEPS_OUT_DIR}/libcrypto.a") - target_link_libraries(${bun} PRIVATE "${BUN_DEPS_OUT_DIR}/libssl.a") - target_link_libraries(${bun} PRIVATE "${BUN_DEPS_OUT_DIR}/libdecrepit.a") - endif() -else() - include(FindBoringSSL) - FindBoringSSL(${bun}) -endif() - -if(USE_CUSTOM_LIBARCHIVE) - include_directories(${BUN_DEPS_DIR}/libarchive/include) - - if(WIN32) - target_link_libraries(${bun} PRIVATE "${BUN_DEPS_OUT_DIR}/archive.lib") - else() - target_link_libraries(${bun} PRIVATE "${BUN_DEPS_OUT_DIR}/libarchive.a") - endif() -else() - find_package(LibArchive REQUIRED) - target_link_libraries(${bun} PRIVATE LibArchive::LibArchive) -endif() - -if(USE_CUSTOM_LIBDEFLATE) - include_directories(${BUN_DEPS_DIR}/libdeflate) - - if(WIN32) - target_link_libraries(${bun} PRIVATE "${BUN_DEPS_OUT_DIR}/deflate.lib") - else() - target_link_libraries(${bun} PRIVATE "${BUN_DEPS_OUT_DIR}/libdeflate.a") - endif() -else() - find_package(LibDeflate REQUIRED) - target_link_libraries(${bun} PRIVATE LibDeflate::LibDeflate) -endif() - -if(USE_CUSTOM_MIMALLOC) - include_directories(${BUN_DEPS_DIR}/mimalloc/include) - - if(WIN32) - target_link_libraries(${bun} PRIVATE "${BUN_DEPS_OUT_DIR}/mimalloc.lib") - elseif(APPLE) - if(USE_DEBUG_JSC OR CMAKE_BUILD_TYPE STREQUAL "Debug") - message(STATUS "Using debug mimalloc") - target_link_libraries(${bun} PRIVATE "${BUN_DEPS_OUT_DIR}/libmimalloc-debug.o") - else() - # Note: https://github.com/microsoft/mimalloc/issues/512 - # It may have been a bug in our code at the time. - target_link_libraries(${bun} PRIVATE "${BUN_DEPS_OUT_DIR}/libmimalloc.o") - endif() - else() - if(USE_DEBUG_JSC OR CMAKE_BUILD_TYPE STREQUAL "Debug") - message(STATUS "Using debug mimalloc") - target_link_libraries(${bun} PRIVATE "${BUN_DEPS_OUT_DIR}/libmimalloc-debug.a") - else() - target_link_libraries(${bun} PRIVATE "${BUN_DEPS_OUT_DIR}/libmimalloc.o") - endif() - endif() -else() - find_package(mimalloc REQUIRED) - target_link_libraries(${bun} PRIVATE mimalloc) -endif() - -if(USE_CUSTOM_ZSTD) - include_directories(${BUN_DEPS_DIR}/zstd/include) - - if(WIN32) - target_link_libraries(${bun} PRIVATE "${BUN_DEPS_OUT_DIR}/zstd.lib") - else() - target_link_libraries(${bun} PRIVATE "${BUN_DEPS_OUT_DIR}/libzstd.a") - endif() -else() - find_package(zstd CONFIG REQUIRED) - target_link_libraries(${bun} PRIVATE zstd::libzstd) -endif() - -if(USE_CUSTOM_CARES) - include_directories(${BUN_DEPS_DIR}/c-ares/include) - - if(WIN32) - target_link_libraries(${bun} PRIVATE "${BUN_DEPS_OUT_DIR}/cares.lib") - else() - target_link_libraries(${bun} PRIVATE "${BUN_DEPS_OUT_DIR}/libcares.a") - endif() -else() - find_package(c-ares CONFIG REQUIRED) - target_link_libraries(${bun} PRIVATE c-ares::cares) -endif() - -if(USE_CUSTOM_TINYCC) - if(WIN32) - target_link_libraries(${bun} PRIVATE "${BUN_DEPS_OUT_DIR}/tcc.lib") - else() - target_link_libraries(${bun} PRIVATE "${BUN_DEPS_OUT_DIR}/libtcc.a") - endif() -else() - find_package(tinycc REQUIRED) - target_link_libraries(${bun} PRIVATE tinycc::tinycc) -endif() - -if(USE_CUSTOM_LOLHTML) - if(WIN32) - target_link_libraries(${bun} PRIVATE "${BUN_DEPS_OUT_DIR}/lolhtml.lib") - else() - target_link_libraries(${bun} PRIVATE "${BUN_DEPS_OUT_DIR}/liblolhtml.a") - endif() -else() - find_package(lolhtml REQUIRED) - target_link_libraries(${bun} PRIVATE lolhtml::lolhtml) -endif() - -if(WIN32) - if(USE_CUSTOM_LIBUV) - target_link_libraries(${bun} PRIVATE "${BUN_DEPS_OUT_DIR}/libuv.lib") - include_directories(${bun} PRIVATE "${BUN_DEPS_DIR}/libuv/include") - else() - find_package(libuv CONFIG REQUIRED) - target_link_libraries(${bun} PRIVATE $,libuv::uv_a,libuv::uv>) - endif() -endif() - -if(USE_STATIC_SQLITE) - add_library(sqlite3 STATIC src/bun.js/bindings/sqlite/sqlite3.c) - target_include_directories(sqlite3 PUBLIC src/bun.js/bindings/sqlite) - target_compile_definitions(sqlite3 PRIVATE - "SQLITE_ENABLE_COLUMN_METADATA=" - "SQLITE_MAX_VARIABLE_NUMBER=250000" - "SQLITE_ENABLE_RTREE=1" - "SQLITE_ENABLE_FTS3=1" - "SQLITE_ENABLE_FTS3_PARENTHESIS=1" - "SQLITE_ENABLE_FTS5=1" - "SQLITE_ENABLE_JSON1=1" - "SQLITE_ENABLE_MATH_FUNCTIONS=1" - ) - - if(WIN32) - target_compile_options(sqlite3 PRIVATE /MT /U_DLL) - endif() - - target_link_libraries(${bun} PRIVATE sqlite3) - message(STATUS "Using static sqlite3") - target_compile_definitions(${bun} PRIVATE "LAZY_LOAD_SQLITE=0") -else() - message(STATUS "Using dynamicly linked sqlite3") - target_compile_definitions(${bun} PRIVATE "LAZY_LOAD_SQLITE=1") -endif() - -# -- Brotli -- -set(BROTLI_SRC "${CMAKE_CURRENT_SOURCE_DIR}/src/deps/brotli") -file(GLOB BROTLI_FILES ${CONFIGURE_DEPENDS} - "${BROTLI_SRC}/common/*.c" - "${BROTLI_SRC}/enc/*.c" - "${BROTLI_SRC}/dec/*.c" -) -add_library(brotli STATIC ${BROTLI_FILES}) -target_include_directories(brotli PRIVATE "${BROTLI_SRC}/include") -target_compile_definitions(brotli PRIVATE "BROTLI_STATIC") - -if(WIN32) - target_compile_options(brotli PRIVATE /MT /U_DLL) -endif() - -target_link_libraries(${bun} PRIVATE brotli) -include_directories("${BUN_DEPS_DIR}/brotli/include") - -if(USE_CUSTOM_LSHPACK) - include_directories(${BUN_DEPS_DIR}/ls-hpack) - - if(WIN32) - include_directories(${BUN_DEPS_DIR}/ls-hpack/compat/queue) - target_link_libraries(${bun} PRIVATE "${BUN_DEPS_OUT_DIR}/lshpack.lib") - else() - target_link_libraries(${bun} PRIVATE "${BUN_DEPS_OUT_DIR}/liblshpack.a") - endif() -else() - find_package(lshpack REQUIRED) - target_link_libraries(${bun} PRIVATE lshpack) -endif() - -if(NOT WIN32) - target_link_libraries(${bun} PRIVATE "${WEBKIT_LIB_DIR}/libWTF.a") - target_link_libraries(${bun} PRIVATE "${WEBKIT_LIB_DIR}/libJavaScriptCore.a") - - if(NOT APPLE OR EXISTS "${WEBKIT_LIB_DIR}/libbmalloc.a") - target_link_libraries(${bun} PRIVATE "${WEBKIT_LIB_DIR}/libbmalloc.a") - endif() -else() - target_link_libraries(${bun} PRIVATE - "${WEBKIT_LIB_DIR}/WTF.lib" - "${WEBKIT_LIB_DIR}/JavaScriptCore.lib" - "${WEBKIT_LIB_DIR}/sicudt.lib" - "${WEBKIT_LIB_DIR}/sicuin.lib" - "${WEBKIT_LIB_DIR}/sicuuc.lib" - winmm - bcrypt - ntdll - userenv - dbghelp - wsock32 # ws2_32 required by TransmitFile aka sendfile on windows - delayimp.lib - ) -endif() - -if(BUN_LINK_ONLY) - message(STATUS "NOTE: BUN_LINK_ONLY is ON, this build config will only link the Bun executable") -endif() - -if(BUN_CPP_ONLY) - message(STATUS "NOTE: BUN_CPP_ONLY is ON, this build will only work with 'compile-cpp-only.${SCRIPT_EXTENSION}'") -endif() - -if(NO_CODEGEN) - message(STATUS "NOTE: NO_CODEGEN is ON, this build expects ./codegen to exist") -endif() - -if(BUN_TIDY_ONLY) - find_program(CLANG_TIDY_EXE NAMES "clang-tidy") - set(CLANG_TIDY_COMMAND "${CLANG_TIDY_EXE}" "-checks=-*,clang-analyzer-*,-clang-analyzer-webkit.UncountedLambdaCapturesChecker" "--fix" "--fix-errors" "--format-style=webkit" "--warnings-as-errors=*") - set_target_properties(${bun} PROPERTIES CXX_CLANG_TIDY "${CLANG_TIDY_COMMAND}") -endif() - -if(BUN_TIDY_ONLY_EXTRA) - find_program(CLANG_TIDY_EXE NAMES "clang-tidy") - set(CLANG_TIDY_COMMAND "${CLANG_TIDY_EXE}" "-checks=-*,clang-analyzer-*,performance-*,-clang-analyzer-webkit.UncountedLambdaCapturesChecker" "--fix" "--fix-errors" "--format-style=webkit" "--warnings-as-errors=*") - set_target_properties(${bun} PROPERTIES CXX_CLANG_TIDY "${CLANG_TIDY_COMMAND}") +# --- Project --- + +parse_package_json(VERSION_VARIABLE DEFAULT_VERSION) +optionx(VERSION STRING "The version of Bun" DEFAULT ${DEFAULT_VERSION}) +project(Bun VERSION ${VERSION}) +include(Options) +include(CompilerFlags) + +# --- Tools --- + +include(SetupGit) +include(SetupBuildkite) +include(SetupBun) +include(SetupEsbuild) +include(SetupZig) +include(SetupRust) + +# --- Targets --- + +include(BuildBun) + +# --- Analysis --- + +if(ENABLE_ANALYSIS) + include(RunClangFormat) + include(RunClangTidy) + include(RunZigFormat) + include(RunPrettier) endif() diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 86d148847d..e3ff1de2e3 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -30,7 +30,7 @@ $ sudo dnf install cargo ccache cmake git golang libtool ninja-build pkg-config ``` ```bash#openSUSE Tumbleweed -$ sudo zypper install go cmake ninja automake git rustup && rustup toolchain install stable +$ sudo zypper install go cmake ninja automake git icu rustup && rustup toolchain install stable ``` {% /codetabs %} @@ -63,7 +63,7 @@ Bun requires LLVM 16 (`clang` is part of LLVM). This version requirement is to m {% codetabs %} ```bash#macOS (Homebrew) -$ brew install llvm@16 +$ brew install llvm@18 ``` ```bash#Ubuntu/Debian @@ -77,8 +77,8 @@ $ sudo pacman -S llvm clang lld ```bash#Fedora $ sudo dnf install 'dnf-command(copr)' -$ sudo dnf copr enable -y @fedora-llvm-team/llvm-snapshots -$ sudo dnf install llvm clang lld +$ sudo dnf copr enable -y @fedora-llvm-team/llvm17 +$ sudo dnf install llvm16 clang16 lld16-devel ``` ```bash#openSUSE Tumbleweed @@ -116,42 +116,26 @@ $ export PATH="$PATH:/usr/lib/llvm16/bin" ## Building Bun -After cloning the repository, run the following command to run the first build. This may take a while as it will clone submodules and build dependencies. - -```bash -$ bun setup -``` - -The binary will be located at `./build/bun-debug`. It is recommended to add this to your `$PATH`. To verify the build worked, let's print the version number on the development build of Bun. - -```bash -$ build/bun-debug --version -x.y.z_debug -``` - -To rebuild, you can invoke `bun run build` +After cloning the repository, run the following command to build. This may take a while as it will clone submodules and build dependencies. ```bash $ bun run build ``` -These two scripts, `setup` and `build`, are aliases to do roughly the following: +The binary will be located at `./build/debug/bun-debug`. It is recommended to add this to your `$PATH`. To verify the build worked, let's print the version number on the development build of Bun. ```bash -$ ./scripts/setup.sh -$ cmake -S . -B build -G Ninja -DCMAKE_BUILD_TYPE=Debug -$ ninja -C build # 'bun run build' runs just this +$ build/debug/bun-debug --version +x.y.z_debug ``` -Advanced users can pass CMake flags to customize the build. - ## VSCode VSCode is the recommended IDE for working on Bun, as it has been configured. Once opening, you can run `Extensions: Show Recommended Extensions` to install the recommended extensions for Zig and C++. ZLS is automatically configured. -If you use a different editor, make sure that you tell ZLS to use the automatically installed Zig compiler, which is located at `./.cache/zig/zig.exe`. The filename is `zig.exe` so that it works as expected on Windows, but it still works on macOS/Linux (it just has a surprising file extension). +If you use a different editor, make sure that you tell ZLS to use the automatically installed Zig compiler, which is located at `./vendor/zig/zig.exe`. The filename is `zig.exe` so that it works as expected on Windows, but it still works on macOS/Linux (it just has a surprising file extension). -We recommend adding `./build` to your `$PATH` so that you can run `bun-debug` in your terminal: +We recommend adding `./build/debug` to your `$PATH` so that you can run `bun-debug` in your terminal: ```sh $ bun-debug @@ -163,8 +147,8 @@ Several code generation scripts are used during Bun's build process. These are r In particular, these are: -- `./src/codegen/generate-jssink.ts` -- Generates `build/codegen/JSSink.cpp`, `build/codegen/JSSink.h` which implement various classes for interfacing with `ReadableStream`. This is internally how `FileSink`, `ArrayBufferSink`, `"type": "direct"` streams and other code related to streams works. -- `./src/codegen/generate-classes.ts` -- Generates `build/codegen/ZigGeneratedClasses*`, which generates Zig & C++ bindings for JavaScriptCore classes implemented in Zig. In `**/*.classes.ts` files, we define the interfaces for various classes, methods, prototypes, getters/setters etc which the code generator reads to generate boilerplate code implementing the JavaScript objects in C++ and wiring them up to Zig +- `./src/codegen/generate-jssink.ts` -- Generates `build/debug/codegen/JSSink.cpp`, `build/debug/codegen/JSSink.h` which implement various classes for interfacing with `ReadableStream`. This is internally how `FileSink`, `ArrayBufferSink`, `"type": "direct"` streams and other code related to streams works. +- `./src/codegen/generate-classes.ts` -- Generates `build/debug/codegen/ZigGeneratedClasses*`, which generates Zig & C++ bindings for JavaScriptCore classes implemented in Zig. In `**/*.classes.ts` files, we define the interfaces for various classes, methods, prototypes, getters/setters etc which the code generator reads to generate boilerplate code implementing the JavaScript objects in C++ and wiring them up to Zig - `./src/codegen/bundle-modules.ts` -- Bundles built-in modules like `node:fs`, `bun:ffi` into files we can include in the final binary. In development, these can be reloaded without rebuilding Zig (you still need to run `bun run build`, but it re-reads the transpiled files from disk afterwards). In release builds, these are embedded into the binary. - `./src/codegen/bundle-functions.ts` -- Bundles globally-accessible functions implemented in JavaScript/TypeScript like `ReadableStream`, `WritableStream`, and a handful more. These are used similarly to the builtin modules, but the output more closely aligns with what WebKit/Safari does for Safari's built-in functions so that we can copy-paste the implementations from WebKit as a starting point. @@ -180,7 +164,7 @@ To compile a release build of Bun, run: $ bun run build:release ``` -The binary will be located at `./build-release/bun` and `./build-release/bun-profile`. +The binary will be located at `./build/release/bun` and `./build/release/bun-profile`. ### Download release build from pull requests @@ -189,8 +173,8 @@ To save you time spent building a release build locally, we provide a way to run To run a release build from a pull request, you can use the `bun-pr` npm package: ```sh -bunx bun-pr pr-number -bunx bun-pr branch/branch-name +bunx bun-pr +bunx bun-pr bunx bun-pr "https://github.com/oven-sh/bun/pull/1234566" ``` @@ -222,24 +206,18 @@ $ valgrind --fair-sched=try --track-origins=yes bun-debug ## Building WebKit locally + Debug mode of JSC -{% callout %} - -**TODO**: This is out of date. TLDR is pass `-DUSE_DEBUG_JSC=1` or `-DWEBKIT_DIR=...` to CMake. it will probably need more fiddling. ask @paperdave if you need this. - -{% /callout %} - WebKit is not cloned by default (to save time and disk space). To clone and build WebKit locally, run: ```bash -# once you run this, `make submodule` can be used to automatically -# update WebKit and the other submodules -$ git submodule update --init --depth 1 --checkout src/bun.js/WebKit -# to make a jsc release build -$ make jsc -# JSC debug build does not work perfectly with Bun yet, this is actively being -# worked on and will eventually become the default. -$ make jsc-build-linux-compile-debug cpp -$ make jsc-build-mac-compile-debug cpp +# Clone WebKit into ./vendor/WebKit +$ git clone https://github.com/oven-sh/WebKit vendor/WebKit + +# Make a debug build of JSC. This will output build artifacts in ./vendor/WebKit/WebKitBuild/Debug +# Optionally, you can use `make jsc` for a release build +$ make jsc-debug + +# Build bun with the local JSC build +$ bun run build:local ``` Note that the WebKit folder, including build artifacts, is 8GB+ in size. @@ -307,17 +285,17 @@ If you see this error when compiling, run: $ xcode-select --install ``` -## Cannot find `libatomic.a` +### Cannot find `libatomic.a` Bun defaults to linking `libatomic` statically, as not all systems have it. If you are building on a distro that does not have a static libatomic available, you can run the following command to enable dynamic linking: ```bash -$ bun setup -DUSE_STATIC_LIBATOMIC=OFF +$ bun run build -DUSE_STATIC_LIBATOMIC=OFF ``` The built version of Bun may not work on other systems if compiled this way. -## ccache conflicts with building TinyCC on macOS +### ccache conflicts with building TinyCC on macOS If you run into issues with `ccache` when building TinyCC, try reinstalling ccache @@ -325,3 +303,9 @@ If you run into issues with `ccache` when building TinyCC, try reinstalling ccac brew uninstall ccache brew install ccache ``` + +## Using bun-debug + +- Disable logging: `BUN_DEBUG_QUIET_LOGS=1 bun-debug ...` (to disable all debug logging) +- Enable logging for a specific zig scope: `BUN_DEBUG_EventLoop=1 bun-debug ...` (to allow `std.log.scoped(.EventLoop)`) +- Bun transpiles every file it runs, to see the actual executed source in a debug build find it in `/tmp/bun-debug-src/...path/to/file`, for example the transpiled version of `/home/bun/index.ts` would be in `/tmp/bun-debug-src/home/bun/index.ts` diff --git a/Dockerfile b/Dockerfile deleted file mode 100644 index aaece20d36..0000000000 --- a/Dockerfile +++ /dev/null @@ -1,645 +0,0 @@ -# This Dockerfile is used by CI workflows to build Bun. It is not intended as a development -# environment, or to be used as a base image for other projects. -# -# You likely want this image instead: https://hub.docker.com/r/oven/bun -# -# TODO: move this file to reduce confusion -ARG DEBIAN_FRONTEND=noninteractive -ARG GITHUB_WORKSPACE=/build -ARG WEBKIT_DIR=${GITHUB_WORKSPACE}/bun-webkit -ARG BUN_RELEASE_DIR=${GITHUB_WORKSPACE}/bun-release -ARG BUN_DEPS_OUT_DIR=${GITHUB_WORKSPACE}/bun-deps -ARG BUN_DIR=${GITHUB_WORKSPACE}/bun -ARG CPU_TARGET=native -ARG ARCH=x86_64 -ARG BUILD_MACHINE_ARCH=x86_64 -ARG BUILDARCH=amd64 -ARG TRIPLET=${ARCH}-linux-gnu -ARG GIT_SHA="" -ARG BUN_VERSION="bun-v1.1.4" -ARG BUN_DOWNLOAD_URL_BASE="https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/${BUN_VERSION}" -ARG CANARY=0 -ARG ASSERTIONS=OFF -ARG ZIG_OPTIMIZE=ReleaseFast -ARG CMAKE_BUILD_TYPE=Release - -ARG NODE_VERSION="20" -ARG LLVM_VERSION="16" - -ARG ZIG_VERSION="0.13.0" -ARG ZIG_VERSION_SHORT="0.13.0" - -ARG SCCACHE_BUCKET -ARG SCCACHE_REGION -ARG SCCACHE_S3_USE_SSL -ARG SCCACHE_ENDPOINT -ARG AWS_ACCESS_KEY_ID -ARG AWS_SECRET_ACCESS_KEY - -FROM bitnami/minideb:bullseye as bun-base - -ARG BUN_DOWNLOAD_URL_BASE -ARG DEBIAN_FRONTEND -ARG BUN_VERSION -ARG NODE_VERSION -ARG LLVM_VERSION -ARG BUILD_MACHINE_ARCH -ARG BUN_DIR -ARG BUN_DEPS_OUT_DIR -ARG CPU_TARGET - -ENV CI 1 -ENV CPU_TARGET=${CPU_TARGET} -ENV BUILDARCH=${BUILDARCH} -ENV BUN_DEPS_OUT_DIR=${BUN_DEPS_OUT_DIR} -ENV USE_LTO 1 - -ENV LC_CTYPE=en_US.UTF-8 -ENV LC_ALL=en_US.UTF-8 - -ENV SCCACHE_BUCKET=${SCCACHE_BUCKET} -ENV SCCACHE_REGION=${SCCACHE_REGION} -ENV SCCACHE_S3_USE_SSL=${SCCACHE_S3_USE_SSL} -ENV SCCACHE_ENDPOINT=${SCCACHE_ENDPOINT} -ENV AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID} -ENV AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY} - -RUN install_packages \ - ca-certificates \ - curl \ - gnupg \ - && echo "deb https://apt.llvm.org/bullseye/ llvm-toolchain-bullseye-${LLVM_VERSION} main" > /etc/apt/sources.list.d/llvm.list \ - && echo "deb-src https://apt.llvm.org/bullseye/ llvm-toolchain-bullseye-${LLVM_VERSION} main" >> /etc/apt/sources.list.d/llvm.list \ - && curl -fsSL "https://apt.llvm.org/llvm-snapshot.gpg.key" | apt-key add - \ - && echo "deb https://deb.nodesource.com/node_${NODE_VERSION}.x nodistro main" > /etc/apt/sources.list.d/nodesource.list \ - && curl -fsSL "https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key" | apt-key add - \ - && echo "deb https://apt.kitware.com/ubuntu/ focal main" > /etc/apt/sources.list.d/kitware.list \ - && curl -fsSL "https://apt.kitware.com/keys/kitware-archive-latest.asc" | apt-key add - \ - && install_packages \ - wget \ - bash \ - software-properties-common \ - build-essential \ - autoconf \ - automake \ - libtool \ - pkg-config \ - clang-${LLVM_VERSION} \ - lld-${LLVM_VERSION} \ - lldb-${LLVM_VERSION} \ - clangd-${LLVM_VERSION} \ - libc++-${LLVM_VERSION}-dev \ - libc++abi-${LLVM_VERSION}-dev \ - llvm-${LLVM_VERSION}-runtime \ - llvm-${LLVM_VERSION}-dev \ - make \ - cmake \ - ninja-build \ - file \ - libc-dev \ - libxml2 \ - libxml2-dev \ - xz-utils \ - git \ - tar \ - rsync \ - gzip \ - unzip \ - perl \ - python3 \ - ruby \ - ruby-dev \ - golang \ - nodejs && \ - for f in /usr/lib/llvm-${LLVM_VERSION}/bin/*; do ln -sf "$f" /usr/bin; done \ - && ln -sf /usr/bin/clang-${LLVM_VERSION} /usr/bin/clang \ - && ln -sf /usr/bin/clang++-${LLVM_VERSION} /usr/bin/clang++ \ - && ln -sf /usr/bin/lld-${LLVM_VERSION} /usr/bin/lld \ - && ln -sf /usr/bin/lldb-${LLVM_VERSION} /usr/bin/lldb \ - && ln -sf /usr/bin/clangd-${LLVM_VERSION} /usr/bin/clangd \ - && ln -sf /usr/bin/llvm-ar-${LLVM_VERSION} /usr/bin/llvm-ar \ - && ln -sf /usr/bin/ld.lld /usr/bin/ld \ - && ln -sf /usr/bin/llvm-ranlib-${LLVM_VERSION} /usr/bin/ranlib \ - && ln -sf /usr/bin/clang /usr/bin/cc \ - && ln -sf /usr/bin/clang /usr/bin/c89 \ - && ln -sf /usr/bin/clang /usr/bin/c99 \ - && ln -sf /usr/bin/clang++ /usr/bin/c++ \ - && ln -sf /usr/bin/clang++ /usr/bin/g++ \ - && ln -sf /usr/bin/llvm-ar /usr/bin/ar \ - && ln -sf /usr/bin/clang /usr/bin/gcc \ - && arch="$(dpkg --print-architecture)" \ - && case "${arch##*-}" in \ - amd64) variant="x64";; \ - arm64) variant="aarch64";; \ - *) echo "unsupported architecture: $arch"; exit 1 ;; \ - esac \ - && wget "${BUN_DOWNLOAD_URL_BASE}/bun-linux-${variant}.zip" \ - && unzip bun-linux-${variant}.zip \ - && mv bun-linux-${variant}/bun /usr/bin/bun \ - && ln -s /usr/bin/bun /usr/bin/bunx \ - && rm -rf bun-linux-${variant} bun-linux-${variant}.zip \ - && mkdir -p ${BUN_DIR} ${BUN_DEPS_OUT_DIR} - -# && if [ -n "${SCCACHE_BUCKET}" ]; then \ -# echo "Setting up sccache" \ -# && wget https://github.com/mozilla/sccache/releases/download/v0.5.4/sccache-v0.5.4-${BUILD_MACHINE_ARCH}-unknown-linux-musl.tar.gz \ -# && tar xf sccache-v0.5.4-${BUILD_MACHINE_ARCH}-unknown-linux-musl.tar.gz \ -# && mv sccache-v0.5.4-${BUILD_MACHINE_ARCH}-unknown-linux-musl/sccache /usr/bin/sccache \ -# && rm -rf sccache-v0.5.4-${BUILD_MACHINE_ARCH}-unknown-linux-musl.tar.gz sccache-v0.5.4-${BUILD_MACHINE_ARCH}-unknown-linux-musl \ - -FROM bun-base as bun-base-with-zig - -ARG ZIG_VERSION -ARG ZIG_VERSION_SHORT -ARG BUILD_MACHINE_ARCH -ARG ZIG_FOLDERNAME=zig-linux-${BUILD_MACHINE_ARCH}-${ZIG_VERSION} -ARG ZIG_FILENAME=${ZIG_FOLDERNAME}.tar.xz -ARG ZIG_URL="https://ziglang.org/builds/${ZIG_FILENAME}" -ARG ZIG_LOCAL_CACHE_DIR=/zig-cache -ENV ZIG_LOCAL_CACHE_DIR=${ZIG_LOCAL_CACHE_DIR} - -WORKDIR $GITHUB_WORKSPACE - -ADD $ZIG_URL . -RUN tar xf ${ZIG_FILENAME} \ - && mv ${ZIG_FOLDERNAME}/lib /usr/lib/zig \ - && mv ${ZIG_FOLDERNAME}/zig /usr/bin/zig \ - && rm -rf ${ZIG_FILENAME} ${ZIG_FOLDERNAME} - -FROM bun-base as c-ares - -ARG BUN_DIR -ARG CPU_TARGET -ENV CPU_TARGET=${CPU_TARGET} -ARG CCACHE_DIR=/ccache -ENV CCACHE_DIR=${CCACHE_DIR} - -COPY Makefile ${BUN_DIR}/Makefile -COPY src/deps/c-ares ${BUN_DIR}/src/deps/c-ares -COPY scripts ${BUN_DIR}/scripts - -WORKDIR $BUN_DIR - -RUN --mount=type=cache,target=${CCACHE_DIR} \ - cd $BUN_DIR \ - && bash ./scripts/build-cares.sh \ - && rm -rf ${BUN_DIR}/src/deps/c-ares ${BUN_DIR}/Makefile ${BUN_DIR}/scripts - -FROM bun-base as lolhtml - -RUN curl https://sh.rustup.rs -sSf | sh -s -- -y - -ARG BUN_DIR -ARG CPU_TARGET -ENV CPU_TARGET=${CPU_TARGET} - -COPY Makefile ${BUN_DIR}/Makefile -COPY src/deps/lol-html ${BUN_DIR}/src/deps/lol-html - -ARG CCACHE_DIR=/ccache -ENV CCACHE_DIR=${CCACHE_DIR} - -RUN --mount=type=cache,target=${CCACHE_DIR} \ - export PATH=$PATH:$HOME/.cargo/bin \ - && cd ${BUN_DIR} \ - && make lolhtml \ - && rm -rf src/deps/lol-html Makefile - -FROM bun-base as mimalloc - -ARG BUN_DIR -ARG CPU_TARGET -ARG ASSERTIONS -ENV CPU_TARGET=${CPU_TARGET} - -COPY Makefile ${BUN_DIR}/Makefile -COPY src/deps/mimalloc ${BUN_DIR}/src/deps/mimalloc -COPY scripts ${BUN_DIR}/scripts - -ARG CCACHE_DIR=/ccache -ENV CCACHE_DIR=${CCACHE_DIR} - -RUN --mount=type=cache,target=${CCACHE_DIR} \ - cd ${BUN_DIR} \ - && bash ./scripts/build-mimalloc.sh \ - && rm -rf src/deps/mimalloc Makefile - -FROM bun-base as mimalloc-debug - -ARG BUN_DIR -ARG CPU_TARGET -ARG ASSERTIONS -ENV CPU_TARGET=${CPU_TARGET} - -COPY Makefile ${BUN_DIR}/Makefile -COPY src/deps/mimalloc ${BUN_DIR}/src/deps/mimalloc - -ARG CCACHE_DIR=/ccache -ENV CCACHE_DIR=${CCACHE_DIR} - -RUN --mount=type=cache,target=${CCACHE_DIR} \ - cd ${BUN_DIR} \ - && make mimalloc-debug \ - && rm -rf src/deps/mimalloc Makefile - -FROM bun-base as zlib - -ARG BUN_DIR -ARG CPU_TARGET -ENV CPU_TARGET=${CPU_TARGET} -ARG CCACHE_DIR=/ccache -ENV CCACHE_DIR=${CCACHE_DIR} - -COPY Makefile ${BUN_DIR}/Makefile -COPY CMakeLists.txt ${BUN_DIR}/CMakeLists.txt -COPY scripts ${BUN_DIR}/scripts -COPY src/deps/zlib ${BUN_DIR}/src/deps/zlib -COPY package.json bun.lockb Makefile .gitmodules ${BUN_DIR}/ - -WORKDIR $BUN_DIR - -RUN --mount=type=cache,target=${CCACHE_DIR} \ - cd $BUN_DIR \ - && bash ./scripts/build-zlib.sh && rm -rf src/deps/zlib scripts - - -FROM bun-base as libdeflate - -ARG BUN_DIR -ARG CPU_TARGET -ENV CPU_TARGET=${CPU_TARGET} -ARG CCACHE_DIR=/ccache -ENV CCACHE_DIR=${CCACHE_DIR} - -COPY Makefile ${BUN_DIR}/Makefile -COPY CMakeLists.txt ${BUN_DIR}/CMakeLists.txt -COPY scripts ${BUN_DIR}/scripts -COPY src/deps/libdeflate ${BUN_DIR}/src/deps/libdeflate -COPY package.json bun.lockb Makefile .gitmodules ${BUN_DIR}/ - -WORKDIR $BUN_DIR - -RUN --mount=type=cache,target=${CCACHE_DIR} \ - cd $BUN_DIR \ - && bash ./scripts/build-libdeflate.sh && rm -rf src/deps/libdeflate scripts - - -FROM bun-base as libarchive - -ARG BUN_DIR -ARG CPU_TARGET -ENV CPU_TARGET=${CPU_TARGET} -ARG CCACHE_DIR=/ccache -ENV CCACHE_DIR=${CCACHE_DIR} - -RUN install_packages autoconf automake libtool pkg-config - -COPY scripts ${BUN_DIR}/scripts -COPY src/deps/libarchive ${BUN_DIR}/src/deps/libarchive - -WORKDIR $BUN_DIR - -RUN --mount=type=cache,target=${CCACHE_DIR} \ - cd $BUN_DIR \ - && bash ./scripts/build-libarchive.sh && rm -rf src/deps/libarchive .scripts - -FROM bun-base as tinycc - -ARG BUN_DEPS_OUT_DIR -ARG BUN_DIR -ARG CPU_TARGET -ENV CPU_TARGET=${CPU_TARGET} - -RUN install_packages libtcc-dev && cp /usr/lib/$(uname -m)-linux-gnu/libtcc.a ${BUN_DEPS_OUT_DIR} - -FROM bun-base as boringssl - -RUN install_packages golang - -ARG BUN_DIR -ARG CPU_TARGET -ENV CPU_TARGET=${CPU_TARGET} - -COPY Makefile ${BUN_DIR}/Makefile -COPY scripts ${BUN_DIR}/scripts -COPY src/deps/boringssl ${BUN_DIR}/src/deps/boringssl - -WORKDIR $BUN_DIR - -ARG CCACHE_DIR=/ccache -ENV CCACHE_DIR=${CCACHE_DIR} - -RUN --mount=type=cache,target=${CCACHE_DIR} \ - cd ${BUN_DIR} \ - && bash ./scripts/build-boringssl.sh \ - && rm -rf src/deps/boringssl Makefile - - -FROM bun-base as zstd - -ARG BUN_DIR - -ARG CPU_TARGET -ENV CPU_TARGET=${CPU_TARGET} - -ARG CCACHE_DIR=/ccache -ENV CCACHE_DIR=${CCACHE_DIR} - -COPY Makefile ${BUN_DIR}/Makefile -COPY src/deps/zstd ${BUN_DIR}/src/deps/zstd -COPY scripts ${BUN_DIR}/scripts - -WORKDIR $BUN_DIR - -RUN --mount=type=cache,target=${CCACHE_DIR} \ - cd $BUN_DIR \ - && bash ./scripts/build-zstd.sh \ - && rm -rf src/deps/zstd scripts - -FROM bun-base as ls-hpack - -ARG BUN_DIR - -ARG CPU_TARGET -ENV CPU_TARGET=${CPU_TARGET} - -ARG CCACHE_DIR=/ccache -ENV CCACHE_DIR=${CCACHE_DIR} - -COPY Makefile ${BUN_DIR}/Makefile -COPY src/deps/ls-hpack ${BUN_DIR}/src/deps/ls-hpack -COPY scripts ${BUN_DIR}/scripts - -WORKDIR $BUN_DIR - -RUN --mount=type=cache,target=${CCACHE_DIR} \ - cd $BUN_DIR \ - && bash ./scripts/build-lshpack.sh \ - && rm -rf src/deps/ls-hpack scripts - -FROM bun-base-with-zig as bun-identifier-cache - -ARG DEBIAN_FRONTEND -ARG GITHUB_WORKSPACE -ARG CPU_TARGET -ARG BUN_DIR -ENV CPU_TARGET=${CPU_TARGET} - -WORKDIR $BUN_DIR - -COPY src/js_lexer/identifier_data.zig ${BUN_DIR}/src/js_lexer/identifier_data.zig -COPY src/js_lexer/identifier_cache.zig ${BUN_DIR}/src/js_lexer/identifier_cache.zig - -RUN --mount=type=cache,target=${ZIG_LOCAL_CACHE_DIR} \ - cd $BUN_DIR \ - && zig run src/js_lexer/identifier_data.zig - -FROM bun-base as bun-node-fallbacks - -ARG BUN_DIR - -WORKDIR $BUN_DIR - -COPY src/node-fallbacks ${BUN_DIR}/src/node-fallbacks - -RUN cd $BUN_DIR/src/node-fallbacks \ - && bun install --frozen-lockfile \ - && bun run build \ - && rm -rf src/node-fallbacks/node_modules - -FROM bun-base as bun-webkit - -ARG BUILDARCH -ARG ASSERTIONS - -COPY CMakeLists.txt ${BUN_DIR}/CMakeLists.txt - -RUN mkdir ${BUN_DIR}/bun-webkit \ - && WEBKIT_TAG=$(grep 'set(WEBKIT_TAG' "${BUN_DIR}/CMakeLists.txt" | awk '{print $2}' | cut -f 1 -d ')') \ - && WEBKIT_SUFFIX=$(if [ "${ASSERTIONS}" = "ON" ]; then echo "debug"; else echo "lto"; fi) \ - && WEBKIT_URL="https://github.com/oven-sh/WebKit/releases/download/autobuild-${WEBKIT_TAG}/bun-webkit-linux-${BUILDARCH}-${WEBKIT_SUFFIX}.tar.gz" \ - && echo "Downloading ${WEBKIT_URL}" \ - && curl -fsSL "${WEBKIT_URL}" | tar -xz -C ${BUN_DIR}/bun-webkit --strip-components=1 - -FROM bun-base as bun-cpp-objects - -ARG CANARY -ARG ASSERTIONS - -COPY --from=bun-webkit ${BUN_DIR}/bun-webkit ${BUN_DIR}/bun-webkit - -COPY packages ${BUN_DIR}/packages -COPY src ${BUN_DIR}/src -COPY CMakeLists.txt ${BUN_DIR}/CMakeLists.txt -COPY src/deps/boringssl/include ${BUN_DIR}/src/deps/boringssl/include - -# for uWebSockets -COPY src/deps/libdeflate ${BUN_DIR}/src/deps/libdeflate - -ARG CCACHE_DIR=/ccache -ENV CCACHE_DIR=${CCACHE_DIR} - -RUN --mount=type=cache,target=${CCACHE_DIR} mkdir ${BUN_DIR}/build \ - && cd ${BUN_DIR}/build \ - && mkdir -p tmp_modules tmp_functions js codegen \ - && cmake .. -GNinja -DCMAKE_BUILD_TYPE=Release -DUSE_LTO=ON -DUSE_DEBUG_JSC=${ASSERTIONS} -DBUN_CPP_ONLY=1 -DWEBKIT_DIR=/build/bun/bun-webkit -DCANARY=${CANARY} -DZIG_COMPILER=system \ - && bash compile-cpp-only.sh -v - -FROM bun-base-with-zig as bun-codegen-for-zig - -COPY package.json bun.lockb Makefile .gitmodules ${BUN_DIR}/ -COPY src/runtime ${BUN_DIR}/src/runtime -COPY src/runtime.js src/runtime.bun.js ${BUN_DIR}/src/ -COPY packages/bun-error ${BUN_DIR}/packages/bun-error -COPY packages/bun-types ${BUN_DIR}/packages/bun-types -COPY src/fallback.ts ${BUN_DIR}/src/fallback.ts -COPY src/api ${BUN_DIR}/src/api - -WORKDIR $BUN_DIR - -# TODO: move away from Makefile entirely -RUN --mount=type=cache,target=${ZIG_LOCAL_CACHE_DIR} \ - bun install --frozen-lockfile \ - && make runtime_js fallback_decoder bun_error \ - && rm -rf src/runtime src/fallback.ts node_modules bun.lockb package.json Makefile - -FROM bun-base-with-zig as bun-compile-zig-obj - -ARG ZIG_PATH -ARG TRIPLET -ARG GIT_SHA -ARG CPU_TARGET -ARG CANARY=0 -ARG ASSERTIONS=OFF -ARG ZIG_OPTIMIZE=ReleaseFast - -ARG CCACHE_DIR=/ccache -ENV CCACHE_DIR=${CCACHE_DIR} - -COPY *.zig package.json CMakeLists.txt ${BUN_DIR}/ -COPY completions ${BUN_DIR}/completions -COPY packages ${BUN_DIR}/packages -COPY src ${BUN_DIR}/src - -COPY --from=bun-identifier-cache ${BUN_DIR}/src/js_lexer/*.blob ${BUN_DIR}/src/js_lexer/ -COPY --from=bun-node-fallbacks ${BUN_DIR}/src/node-fallbacks/out ${BUN_DIR}/src/node-fallbacks/out -COPY --from=bun-codegen-for-zig ${BUN_DIR}/src/*.out.js ${BUN_DIR}/src/*.out.refresh.js ${BUN_DIR}/src/ -COPY --from=bun-codegen-for-zig ${BUN_DIR}/packages/bun-error/dist ${BUN_DIR}/packages/bun-error/dist - -WORKDIR $BUN_DIR - -RUN --mount=type=cache,target=${CCACHE_DIR} \ - --mount=type=cache,target=${ZIG_LOCAL_CACHE_DIR} \ - mkdir -p build \ - && bun run $BUN_DIR/src/codegen/bundle-modules.ts --debug=OFF $BUN_DIR/build \ - && cd build \ - && cmake .. \ - -G Ninja \ - -DCMAKE_BUILD_TYPE=Release \ - -DUSE_LTO=ON \ - -DZIG_OPTIMIZE="${ZIG_OPTIMIZE}" \ - -DCPU_TARGET="${CPU_TARGET}" \ - -DZIG_TARGET="${TRIPLET}" \ - -DWEBKIT_DIR="omit" \ - -DNO_CONFIGURE_DEPENDS=1 \ - -DNO_CODEGEN=1 \ - -DBUN_ZIG_OBJ_DIR="/tmp" \ - -DCANARY="${CANARY}" \ - -DZIG_COMPILER=system \ - -DZIG_LIB_DIR=$BUN_DIR/src/deps/zig/lib \ - && ONLY_ZIG=1 ninja "/tmp/bun-zig.o" -v - -FROM scratch as build_release_obj - -ARG CPU_TARGET -ENV CPU_TARGET=${CPU_TARGET} - -COPY --from=bun-compile-zig-obj /tmp/bun-zig.o / - -FROM bun-base as bun-link - -ARG CPU_TARGET -ARG CANARY -ARG ASSERTIONS - -ENV CPU_TARGET=${CPU_TARGET} -ARG CCACHE_DIR=/ccache -ENV CCACHE_DIR=${CCACHE_DIR} -ARG ZIG_LOCAL_CACHE_DIR=/zig-cache -ENV ZIG_LOCAL_CACHE_DIR=${ZIG_LOCAL_CACHE_DIR} - -WORKDIR $BUN_DIR - -RUN mkdir -p build bun-webkit - -# lol -COPY src/bun.js/bindings/sqlite/sqlite3.c ${BUN_DIR}/src/bun.js/bindings/sqlite/sqlite3.c -COPY src/deps/brotli ${BUN_DIR}/src/deps/brotli - -COPY src/symbols.dyn src/linker.lds ${BUN_DIR}/src/ - -COPY CMakeLists.txt ${BUN_DIR}/CMakeLists.txt -COPY --from=zlib ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/ -COPY --from=libdeflate ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/ -COPY --from=libarchive ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/ -COPY --from=boringssl ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/ -COPY --from=lolhtml ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/ -COPY --from=mimalloc ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/ -COPY --from=zstd ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/ -COPY --from=tinycc ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/ -COPY --from=c-ares ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/ -COPY --from=ls-hpack ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/ -COPY --from=bun-compile-zig-obj /tmp/bun-zig.o ${BUN_DIR}/build/bun-zig.o -COPY --from=bun-cpp-objects ${BUN_DIR}/build/*.a ${BUN_DIR}/build/ -COPY --from=bun-cpp-objects ${BUN_DIR}/build/*.o ${BUN_DIR}/build/ -COPY --from=bun-cpp-objects ${BUN_DIR}/bun-webkit/lib ${BUN_DIR}/bun-webkit/lib - -WORKDIR $BUN_DIR/build - -RUN --mount=type=cache,target=${CCACHE_DIR} \ - --mount=type=cache,target=${ZIG_LOCAL_CACHE_DIR} \ - cmake .. \ - -G Ninja \ - -DCMAKE_BUILD_TYPE=Release \ - -DBUN_LINK_ONLY=1 \ - -DBUN_ZIG_OBJ_DIR="${BUN_DIR}/build" \ - -DUSE_LTO=ON \ - -DUSE_DEBUG_JSC=${ASSERTIONS} \ - -DBUN_CPP_ARCHIVE="${BUN_DIR}/build/bun-cpp-objects.a" \ - -DWEBKIT_DIR="${BUN_DIR}/bun-webkit" \ - -DBUN_DEPS_OUT_DIR="${BUN_DEPS_OUT_DIR}" \ - -DCPU_TARGET="${CPU_TARGET}" \ - -DNO_CONFIGURE_DEPENDS=1 \ - -DCANARY="${CANARY}" \ - -DZIG_COMPILER=system \ - && ninja -v \ - && ./bun --revision \ - && mkdir -p /build/out \ - && mv bun bun-profile /build/out \ - && rm -rf ${BUN_DIR} ${BUN_DEPS_OUT_DIR} - -FROM scratch as artifact - -COPY --from=bun-link /build/out / - -FROM bun-base as bun-link-assertions - -ARG CPU_TARGET -ARG CANARY -ARG ASSERTIONS - -ENV CPU_TARGET=${CPU_TARGET} -ARG CCACHE_DIR=/ccache -ENV CCACHE_DIR=${CCACHE_DIR} -ARG ZIG_LOCAL_CACHE_DIR=/zig-cache -ENV ZIG_LOCAL_CACHE_DIR=${ZIG_LOCAL_CACHE_DIR} - -WORKDIR $BUN_DIR - -RUN mkdir -p build bun-webkit - -# lol -COPY src/bun.js/bindings/sqlite/sqlite3.c ${BUN_DIR}/src/bun.js/bindings/sqlite/sqlite3.c - -COPY src/symbols.dyn src/linker.lds ${BUN_DIR}/src/ - -COPY CMakeLists.txt ${BUN_DIR}/CMakeLists.txt -COPY --from=zlib ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/ -COPY --from=libarchive ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/ -COPY --from=boringssl ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/ -COPY --from=lolhtml ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/ -COPY --from=mimalloc-debug ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/ -COPY --from=zstd ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/ -COPY --from=tinycc ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/ -COPY --from=c-ares ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/ -COPY --from=bun-compile-zig-obj /tmp/bun-zig.o ${BUN_DIR}/build/bun-zig.o -COPY --from=bun-cpp-objects ${BUN_DIR}/build/bun-cpp-objects.a ${BUN_DIR}/build/bun-cpp-objects.a -COPY --from=bun-cpp-objects ${BUN_DIR}/bun-webkit/lib ${BUN_DIR}/bun-webkit/lib - -WORKDIR $BUN_DIR/build - -RUN --mount=type=cache,target=${CCACHE_DIR} \ - --mount=type=cache,target=${ZIG_LOCAL_CACHE_DIR} \ - cmake .. \ - -G Ninja \ - -DCMAKE_BUILD_TYPE=Release \ - -DBUN_LINK_ONLY=1 \ - -DBUN_ZIG_OBJ_DIR="${BUN_DIR}/build" \ - -DUSE_DEBUG_JSC=ON \ - -DBUN_CPP_ARCHIVE="${BUN_DIR}/build/bun-cpp-objects.a" \ - -DWEBKIT_DIR="${BUN_DIR}/bun-webkit" \ - -DBUN_DEPS_OUT_DIR="${BUN_DEPS_OUT_DIR}" \ - -DCPU_TARGET="${CPU_TARGET}" \ - -DNO_CONFIGURE_DEPENDS=1 \ - -DCANARY="${CANARY}" \ - -DZIG_COMPILER=system \ - -DUSE_LTO=ON \ - && ninja -v \ - && ./bun --revision \ - && mkdir -p /build/out \ - && mv bun bun-profile /build/out \ - && rm -rf ${BUN_DIR} ${BUN_DEPS_OUT_DIR} - -FROM scratch as artifact-assertions - -COPY --from=bun-link-assertions /build/out / \ No newline at end of file diff --git a/LATEST b/LATEST index a2a8e42b09..9b51125a6c 100644 --- a/LATEST +++ b/LATEST @@ -1 +1 @@ -1.1.21 \ No newline at end of file +1.1.34 \ No newline at end of file diff --git a/Makefile b/Makefile index fb5b180219..f1a8be2646 100644 --- a/Makefile +++ b/Makefile @@ -1,3 +1,10 @@ +# ------------------------------------------------------------ +# WARNING +# ------------------------------------------------------------ +# This file is very old and will be removed soon! +# You can build Bun using `cmake` or `bun run build` +# ------------------------------------------------------------ + SHELL := $(shell which bash) # Use bash syntax to be consistent OS_NAME := $(shell uname -s | tr '[:upper:]' '[:lower:]') @@ -70,7 +77,7 @@ BUN_RELEASE_BIN = $(PACKAGE_DIR)/bun PRETTIER ?= $(shell which prettier 2>/dev/null || echo "./node_modules/.bin/prettier") ESBUILD = "$(shell which esbuild 2>/dev/null || echo "./node_modules/.bin/esbuild")" DSYMUTIL ?= $(shell which dsymutil 2>/dev/null || which dsymutil-15 2>/dev/null) -WEBKIT_DIR ?= $(realpath src/bun.js/WebKit) +WEBKIT_DIR ?= $(realpath vendor/WebKit) WEBKIT_RELEASE_DIR ?= $(WEBKIT_DIR)/WebKitBuild/Release WEBKIT_DEBUG_DIR ?= $(WEBKIT_DIR)/WebKitBuild/Debug WEBKIT_RELEASE_DIR_LTO ?= $(WEBKIT_DIR)/WebKitBuild/ReleaseLTO @@ -131,8 +138,8 @@ endif SED = $(shell which gsed 2>/dev/null || which sed 2>/dev/null) BUN_DIR ?= $(shell dirname $(realpath $(firstword $(MAKEFILE_LIST)))) -BUN_DEPS_DIR ?= $(shell pwd)/src/deps -BUN_DEPS_OUT_DIR ?= $(shell pwd)/build/bun-deps +BUN_DEPS_DIR ?= $(shell pwd)/vendor +BUN_DEPS_OUT_DIR ?= $(shell pwd)/build/release CPU_COUNT = 2 ifeq ($(OS_NAME),darwin) CPU_COUNT = $(shell sysctl -n hw.logicalcpu) @@ -366,7 +373,7 @@ ifeq ($(OS_NAME),linux) endif ifeq ($(OS_NAME),darwin) -MACOS_MIN_FLAG=-mmacosx-version-min=$(MIN_MACOS_VERSION) +MACOS_MIN_FLAG=-mmacos-version-min=$(MIN_MACOS_VERSION) POSIX_PKG_MANAGER=brew INCLUDE_DIRS += $(MAC_INCLUDE_DIRS) endif @@ -682,19 +689,10 @@ assert-deps: @test $(shell cargo --version | awk '{print $$2}' | cut -d. -f2) -gt 57 || (echo -e "ERROR: cargo version must be at least 1.57."; exit 1) @echo "You have the dependencies installed! Woo" -# the following allows you to run `make submodule` to update or init submodules. but we will exclude webkit -# unless you explicitly clone it yourself (a huge download) -SUBMODULE_NAMES=$(shell cat .gitmodules | grep 'path = ' | awk '{print $$3}') -ifeq ("$(wildcard src/bun.js/WebKit/.git)", "") - SUBMODULE_NAMES := $(filter-out src/bun.js/WebKit, $(SUBMODULE_NAMES)) -endif .PHONY: init-submodules init-submodules: submodule # (backwards-compatibility alias) -.PHONY: submodule -submodule: ## to init or update all submodules - git submodule update --init --recursive --progress --depth=1 --checkout $(SUBMODULE_NAMES) .PHONY: build-obj build-obj: @@ -797,7 +795,7 @@ cls: @echo -e "\n\n---\n\n" jsc-check: - @ls $(JSC_BASE_DIR) >/dev/null 2>&1 || (echo -e "Failed to access WebKit build. Please compile the WebKit submodule using the Dockerfile at $(shell pwd)/src/javascript/WebKit/Dockerfile and then copy from /output in the Docker container to $(JSC_BASE_DIR). You can override the directory via JSC_BASE_DIR. \n\n DOCKER_BUILDKIT=1 docker build -t bun-webkit $(shell pwd)/src/bun.js/WebKit -f $(shell pwd)/src/bun.js/WebKit/Dockerfile --progress=plain\n\n docker container create bun-webkit\n\n # Get the container ID\n docker container ls\n\n docker cp DOCKER_CONTAINER_ID_YOU_JUST_FOUND:/output $(JSC_BASE_DIR)" && exit 1) + @ls $(JSC_BASE_DIR) >/dev/null 2>&1 || (echo -e "Failed to access WebKit build. Please compile the WebKit submodule using the Dockerfile at $(shell pwd)/src/javascript/WebKit/Dockerfile and then copy from /output in the Docker container to $(JSC_BASE_DIR). You can override the directory via JSC_BASE_DIR. \n\n DOCKER_BUILDKIT=1 docker build -t bun-webkit $(shell pwd)/vendor/WebKit -f $(shell pwd)/vendor/WebKit/Dockerfile --progress=plain\n\n docker container create bun-webkit\n\n # Get the container ID\n docker container ls\n\n docker cp DOCKER_CONTAINER_ID_YOU_JUST_FOUND:/output $(JSC_BASE_DIR)" && exit 1) @ls $(JSC_INCLUDE_DIR) >/dev/null 2>&1 || (echo "Failed to access WebKit include directory at $(JSC_INCLUDE_DIR)." && exit 1) @ls $(JSC_LIB) >/dev/null 2>&1 || (echo "Failed to access WebKit lib directory at $(JSC_LIB)." && exit 1) @@ -928,7 +926,7 @@ bun-codesign-release-local-debug: .PHONY: jsc jsc: jsc-build jsc-copy-headers jsc-bindings .PHONY: jsc-debug -jsc-debug: jsc-build-debug jsc-copy-headers-debug +jsc-debug: jsc-build-debug .PHONY: jsc-build jsc-build: $(JSC_BUILD_STEPS) .PHONY: jsc-build-debug @@ -938,7 +936,7 @@ jsc-bindings: headers bindings .PHONY: clone-submodules clone-submodules: - git -c submodule."src/bun.js/WebKit".update=none submodule update --init --recursive --depth=1 --progress + git -c submodule."vendor/WebKit".update=none submodule update --init --recursive --depth=1 --progress .PHONY: headers @@ -1258,7 +1256,7 @@ jsc-build-mac-compile: -DENABLE_STATIC_JSC=ON \ -DENABLE_SINGLE_THREADED_VM_ENTRY_SCOPE=ON \ -DALLOW_LINE_AND_COLUMN_NUMBER_IN_BUILTINS=ON \ - -DCMAKE_BUILD_TYPE=Release \ + -DCMAKE_BUILD_TYPE=RelWithDebInfo \ -DUSE_THIN_ARCHIVES=OFF \ -DBUN_FAST_TLS=ON \ -DENABLE_FTL_JIT=ON \ @@ -1270,7 +1268,7 @@ jsc-build-mac-compile: $(WEBKIT_DIR) \ $(WEBKIT_RELEASE_DIR) && \ CFLAGS="$(CFLAGS) -ffat-lto-objects" CXXFLAGS="$(CXXFLAGS) -ffat-lto-objects" \ - cmake --build $(WEBKIT_RELEASE_DIR) --config Release --target jsc + cmake --build $(WEBKIT_RELEASE_DIR) --config RelWithDebInfo --target jsc .PHONY: jsc-build-mac-compile-lto jsc-build-mac-compile-lto: @@ -1372,7 +1370,7 @@ jsc-build-linux-compile-config-debug: $(WEBKIT_DEBUG_DIR) # If you get "Error: could not load cache" -# run rm -rf src/bun.js/WebKit/CMakeCache.txt +# run rm -rf vendor/WebKit/CMakeCache.txt .PHONY: jsc-build-linux-compile-build jsc-build-linux-compile-build: mkdir -p $(WEBKIT_RELEASE_DIR) && \ @@ -1389,10 +1387,10 @@ jsc-build-linux-compile-build-debug: jsc-build-mac: jsc-force-fastjit jsc-build-mac-compile jsc-build-copy -jsc-build-mac-debug: jsc-force-fastjit jsc-build-mac-compile-debug jsc-build-copy-debug +jsc-build-mac-debug: jsc-force-fastjit jsc-build-mac-compile-debug jsc-build-linux: jsc-build-linux-compile-config jsc-build-linux-compile-build jsc-build-copy -jsc-build-linux-debug: jsc-build-linux-compile-config-debug jsc-build-linux-compile-build-debug jsc-build-copy-debug +jsc-build-linux-debug: jsc-build-linux-compile-config-debug jsc-build-linux-compile-build-debug jsc-build-copy: cp $(WEBKIT_RELEASE_DIR)/lib/libJavaScriptCore.a $(BUN_DEPS_OUT_DIR)/libJavaScriptCore.a @@ -1407,7 +1405,7 @@ jsc-build-copy-debug: cp $(WEBKIT_DEBUG_DIR)/lib/libbmalloc.a $(BUN_DEPS_OUT_DIR)/libbmalloc.a clean-jsc: - cd src/bun.js/WebKit && rm -rf **/CMakeCache.txt **/CMakeFiles && rm -rf src/bun.js/WebKit/WebKitBuild + cd vendor/WebKit && rm -rf **/CMakeCache.txt **/CMakeFiles && rm -rf vendor/WebKit/WebKitBuild clean-bindings: rm -rf $(OBJ_DIR)/*.o $(DEBUG_OBJ_DIR)/*.o $(DEBUG_OBJ_DIR)/webcore/*.o $(DEBUG_BINDINGS_OBJ) $(OBJ_DIR)/webcore/*.o $(BINDINGS_OBJ) $(OBJ_DIR)/*.d $(DEBUG_OBJ_DIR)/*.d diff --git a/README.md b/README.md index 63c6d5e0b7..4b748b865c 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,5 @@

- Logo + Logo

Bun

@@ -24,8 +24,6 @@ ## What is Bun? -> **Bun is under active development.** Use it to speed up your development workflows or run simpler production code in resource-constrained environments like serverless functions. We're working on more complete Node.js compatibility and integration with existing frameworks. Join the [Discord](https://bun.sh/discord) and watch the [GitHub repository](https://github.com/oven-sh/bun) to keep tabs on future releases. - Bun is an all-in-one toolkit for JavaScript and TypeScript apps. It ships as a single executable called `bun`. At its core is the _Bun runtime_, a fast JavaScript runtime designed as a drop-in replacement for Node.js. It's written in Zig and powered by JavaScriptCore under the hood, dramatically reducing startup times and memory usage. @@ -87,16 +85,19 @@ bun upgrade --canary ## Quick links - Intro + - [What is Bun?](https://bun.sh/docs/index) - [Installation](https://bun.sh/docs/installation) - [Quickstart](https://bun.sh/docs/quickstart) - [TypeScript](https://bun.sh/docs/typescript) - Templating + - [`bun init`](https://bun.sh/docs/cli/init) - [`bun create`](https://bun.sh/docs/cli/bun-create) - Runtime + - [`bun run`](https://bun.sh/docs/cli/run) - [File types](https://bun.sh/docs/runtime/loaders) - [TypeScript](https://bun.sh/docs/runtime/typescript) @@ -115,6 +116,7 @@ bun upgrade --canary - [Framework API](https://bun.sh/docs/runtime/framework) - Package manager + - [`bun install`](https://bun.sh/docs/cli/install) - [`bun add`](https://bun.sh/docs/cli/add) - [`bun remove`](https://bun.sh/docs/cli/remove) @@ -130,6 +132,7 @@ bun upgrade --canary - [Overrides and resolutions](https://bun.sh/docs/install/overrides) - Bundler + - [`Bun.build`](https://bun.sh/docs/bundler) - [Loaders](https://bun.sh/docs/bundler/loaders) - [Plugins](https://bun.sh/docs/bundler/plugins) @@ -137,6 +140,7 @@ bun upgrade --canary - [vs esbuild](https://bun.sh/docs/bundler/vs-esbuild) - Test runner + - [`bun test`](https://bun.sh/docs/cli/test) - [Writing tests](https://bun.sh/docs/test/writing) - [Watch mode](https://bun.sh/docs/test/hot) @@ -148,9 +152,11 @@ bun upgrade --canary - [Code coverage](https://bun.sh/docs/test/coverage) - Package runner + - [`bunx`](https://bun.sh/docs/cli/bunx) - API + - [HTTP server](https://bun.sh/docs/api/http) - [WebSockets](https://bun.sh/docs/api/websockets) - [Workers](https://bun.sh/docs/api/workers) @@ -183,9 +189,10 @@ bun upgrade --canary - [Building Windows](https://bun.sh/docs/project/building-windows) - [License](https://bun.sh/docs/project/licensing) -## Guides +## Guides + +- Binary -- Binary - [Convert a Blob to a DataView](https://bun.sh/guides/binary/blob-to-dataview) - [Convert a Blob to a ReadableStream](https://bun.sh/guides/binary/blob-to-stream) - [Convert a Blob to a string](https://bun.sh/guides/binary/blob-to-string) @@ -209,7 +216,8 @@ bun upgrade --canary - [Convert an ArrayBuffer to a Uint8Array](https://bun.sh/guides/binary/arraybuffer-to-typedarray) - [Convert an ArrayBuffer to an array of numbers](https://bun.sh/guides/binary/arraybuffer-to-array) -- Ecosystem +- Ecosystem + - [Build a frontend using Vite and Bun](https://bun.sh/guides/ecosystem/vite) - [Build an app with Astro and Bun](https://bun.sh/guides/ecosystem/astro) - [Build an app with Next.js and Bun](https://bun.sh/guides/ecosystem/nextjs) @@ -236,7 +244,8 @@ bun upgrade --canary - [Use React and JSX](https://bun.sh/guides/ecosystem/react) - [Add Sentry to a Bun app](https://bun.sh/guides/ecosystem/sentry) -- HTTP +- HTTP + - [Common HTTP server usage](https://bun.sh/guides/http/server) - [Configure TLS on an HTTP server](https://bun.sh/guides/http/tls) - [fetch with unix domain sockets in Bun](https://bun.sh/guides/http/fetch-unix) @@ -250,7 +259,8 @@ bun upgrade --canary - [Upload files via HTTP using FormData](https://bun.sh/guides/http/file-uploads) - [Write a simple HTTP server](https://bun.sh/guides/http/simple) -- Install +- Install + - [Add a dependency](https://bun.sh/guides/install/add) - [Add a development dependency](https://bun.sh/guides/install/add-dev) - [Add a Git dependency](https://bun.sh/guides/install/add-git) @@ -268,7 +278,8 @@ bun upgrade --canary - [Using bun install with an Azure Artifacts npm registry](https://bun.sh/guides/install/azure-artifacts) - [Using bun install with Artifactory](https://bun.sh/guides/install/jfrog-artifactory) -- Process +- Process + - [Get the process uptime in nanoseconds](https://bun.sh/guides/process/nanoseconds) - [Listen for CTRL+C](https://bun.sh/guides/process/ctrl-c) - [Listen to OS signals](https://bun.sh/guides/process/os-signals) @@ -279,7 +290,8 @@ bun upgrade --canary - [Spawn a child process](https://bun.sh/guides/process/spawn) - [Spawn a child process and communicate using IPC](https://bun.sh/guides/process/ipc) -- Read file +- Read file + - [Check if a file exists](https://bun.sh/guides/read-file/exists) - [Get the MIME type of a file](https://bun.sh/guides/read-file/mime) - [Read a file as a ReadableStream](https://bun.sh/guides/read-file/stream) @@ -290,7 +302,8 @@ bun upgrade --canary - [Read a JSON file](https://bun.sh/guides/read-file/json) - [Watch a directory for changes](https://bun.sh/guides/read-file/watch) -- Runtime +- Runtime + - [Debugging Bun with the VS Code extension](https://bun.sh/guides/runtime/vscode-debugger) - [Debugging Bun with the web debugger](https://bun.sh/guides/runtime/web-debugger) - [Define and replace static globals & constants](https://bun.sh/guides/runtime/define-constant) @@ -305,7 +318,8 @@ bun upgrade --canary - [Set a time zone in Bun](https://bun.sh/guides/runtime/timezone) - [Set environment variables](https://bun.sh/guides/runtime/set-env) -- Streams +- Streams + - [Convert a Node.js Readable to a Blob](https://bun.sh/guides/streams/node-readable-to-blob) - [Convert a Node.js Readable to a string](https://bun.sh/guides/streams/node-readable-to-string) - [Convert a Node.js Readable to an ArrayBuffer](https://bun.sh/guides/streams/node-readable-to-arraybuffer) @@ -318,7 +332,8 @@ bun upgrade --canary - [Convert a ReadableStream to an ArrayBuffer](https://bun.sh/guides/streams/to-arraybuffer) - [Convert a ReadableStream to JSON](https://bun.sh/guides/streams/to-json) -- Test +- Test + - [Bail early with the Bun test runner](https://bun.sh/guides/test/bail) - [Generate code coverage reports with the Bun test runner](https://bun.sh/guides/test/coverage) - [Mark a test as a "todo" with the Bun test runner](https://bun.sh/guides/test/todo-tests) @@ -336,7 +351,8 @@ bun upgrade --canary - [Use snapshot testing in `bun test`](https://bun.sh/guides/test/snapshot) - [Write browser DOM tests with Bun and happy-dom](https://bun.sh/guides/test/happy-dom) -- Util +- Util + - [Check if the current file is the entrypoint](https://bun.sh/guides/util/entrypoint) - [Check if two objects are deeply equal](https://bun.sh/guides/util/deep-equals) - [Compress and decompress data with DEFLATE](https://bun.sh/guides/util/deflate) @@ -355,13 +371,14 @@ bun upgrade --canary - [Hash a password](https://bun.sh/guides/util/hash-a-password) - [Sleep for a fixed number of milliseconds](https://bun.sh/guides/util/sleep) -- WebSocket +- WebSocket + - [Build a publish-subscribe WebSocket server](https://bun.sh/guides/websocket/pubsub) - [Build a simple WebSocket server](https://bun.sh/guides/websocket/simple) - [Enable compression for WebSocket messages](https://bun.sh/guides/websocket/compression) - [Set per-socket contextual data on a WebSocket](https://bun.sh/guides/websocket/context) -- Write file +- Write file - [Append content to a file](https://bun.sh/guides/write-file/append) - [Copy a file to another location](https://bun.sh/guides/write-file/file-cp) - [Delete a file](https://bun.sh/guides/write-file/unlink) diff --git a/bench/async/bun.js b/bench/async/bun.js index 51d0d119bb..b5f91da558 100644 --- a/bench/async/bun.js +++ b/bench/async/bun.js @@ -1,4 +1,4 @@ -import { run, bench } from "mitata"; +import { bench, run } from "../runner.mjs"; bench("sync", () => {}); bench("async", async () => {}); diff --git a/bench/async/deno.js b/bench/async/deno.js index 9e4347b539..b5f91da558 100644 --- a/bench/async/deno.js +++ b/bench/async/deno.js @@ -1,4 +1,4 @@ -import { run, bench } from "../node_modules/mitata/src/cli.mjs"; +import { bench, run } from "../runner.mjs"; bench("sync", () => {}); bench("async", async () => {}); diff --git a/bench/async/node.mjs b/bench/async/node.mjs index 51d0d119bb..b5f91da558 100644 --- a/bench/async/node.mjs +++ b/bench/async/node.mjs @@ -1,4 +1,4 @@ -import { run, bench } from "mitata"; +import { bench, run } from "../runner.mjs"; bench("sync", () => {}); bench("async", async () => {}); diff --git a/bench/async/package.json b/bench/async/package.json index f5c377686b..bb84ce4cf6 100644 --- a/bench/async/package.json +++ b/bench/async/package.json @@ -3,9 +3,9 @@ "scripts": { "deps": "exit 0", "build": "exit 0", - "bench:bun": "$BUN bun.js", - "bench:node": "$NODE node.mjs", - "bench:deno": "$DENO run -A --unstable deno.js", + "bench:bun": "bun bun.js", + "bench:node": "node node.mjs", + "bench:deno": "deno run -A --unstable deno.js", "bench": "bun run bench:bun && bun run bench:node && bun run bench:deno" } } diff --git a/bench/bun.lockb b/bench/bun.lockb index 679ce8aba1..e77a3b406c 100755 Binary files a/bench/bun.lockb and b/bench/bun.lockb differ diff --git a/bench/copyfile/node.mitata.mjs b/bench/copyfile/node.mitata.mjs index 379150487f..de0e76beab 100644 --- a/bench/copyfile/node.mitata.mjs +++ b/bench/copyfile/node.mitata.mjs @@ -1,5 +1,5 @@ -import { copyFileSync, writeFileSync, readFileSync, statSync } from "node:fs"; -import { bench, run } from "mitata"; +import { copyFileSync, statSync, writeFileSync } from "node:fs"; +import { bench, run } from "../runner.mjs"; function runner(ready) { for (let size of [1, 10, 100, 1000, 10000, 100000, 1000000, 10000000]) { diff --git a/bench/crypto/asymmetricCipher.js b/bench/crypto/asymmetricCipher.js new file mode 100644 index 0000000000..7fa92b20e0 --- /dev/null +++ b/bench/crypto/asymmetricCipher.js @@ -0,0 +1,24 @@ +import { bench, run } from "../runner.mjs"; +const crypto = require("node:crypto"); + +const keyPair = crypto.generateKeyPairSync("rsa", { + modulusLength: 2048, + publicKeyEncoding: { + type: "spki", + format: "pem", + }, + privateKeyEncoding: { + type: "pkcs8", + format: "pem", + }, +}); + +// Max message size for 2048-bit RSA keys +const plaintext = crypto.getRandomValues(Buffer.alloc(214)); + +bench("RSA_PKCS1_OAEP_PADDING round-trip", () => { + const ciphertext = crypto.publicEncrypt(keyPair.publicKey, plaintext); + crypto.privateDecrypt(keyPair.privateKey, ciphertext); +}); + +await run(); diff --git a/bench/crypto/asymmetricSign.js b/bench/crypto/asymmetricSign.js new file mode 100644 index 0000000000..e00634963e --- /dev/null +++ b/bench/crypto/asymmetricSign.js @@ -0,0 +1,24 @@ +import { bench, run } from "../runner.mjs"; +const crypto = require("node:crypto"); + +const keyPair = crypto.generateKeyPairSync("rsa", { + modulusLength: 2048, + publicKeyEncoding: { + type: "spki", + format: "pem", + }, + privateKeyEncoding: { + type: "pkcs8", + format: "pem", + }, +}); + +// Max message size for 2048-bit RSA keys +const plaintext = crypto.getRandomValues(Buffer.alloc(245)); + +bench("RSA sign RSA_PKCS1_PADDING round-trip", () => { + const sig = crypto.privateEncrypt(keyPair.privateKey, plaintext); + crypto.publicDecrypt(keyPair.publicKey, sig); +}); + +await run(); diff --git a/bench/deepEqual/map.js b/bench/deepEqual/map.js new file mode 100644 index 0000000000..3d89d61eea --- /dev/null +++ b/bench/deepEqual/map.js @@ -0,0 +1,27 @@ +import { expect } from "bun:test"; +import { bench, run } from "../runner.mjs"; + +const MAP_SIZE = 10_000; + +function* genPairs(count) { + for (let i = 0; i < MAP_SIZE; i++) { + yield ["k" + i, "v" + i]; + } +} + +class CustomMap extends Map { + abc = 123; + constructor(iterable) { + super(iterable); + } +} + +const a = new Map(genPairs()); +const b = new Map(genPairs()); +bench("deepEqual Map", () => expect(a).toEqual(b)); + +const x = new CustomMap(genPairs()); +const y = new CustomMap(genPairs()); +bench("deepEqual CustomMap", () => expect(x).toEqual(y)); + +await run(); diff --git a/bench/deepEqual/set.js b/bench/deepEqual/set.js new file mode 100644 index 0000000000..1f16d09e9c --- /dev/null +++ b/bench/deepEqual/set.js @@ -0,0 +1,27 @@ +import { expect } from "bun:test"; +import { bench, run } from "../runner.mjs"; + +const SET_SIZE = 10_000; + +function* genValues(count) { + for (let i = 0; i < SET_SIZE; i++) { + yield "v" + i; + } +} + +class CustomSet extends Set { + abc = 123; + constructor(iterable) { + super(iterable); + } +} + +const a = new Set(genValues()); +const b = new Set(genValues()); +bench("deepEqual Set", () => expect(a).toEqual(b)); + +const x = new CustomSet(genValues()); +const y = new CustomSet(genValues()); +bench("deepEqual CustomSet", () => expect(x).toEqual(y)); + +await run(); diff --git a/bench/emitter/implementations.mjs b/bench/emitter/implementations.mjs index 2050ac38e0..abf025645f 100644 --- a/bench/emitter/implementations.mjs +++ b/bench/emitter/implementations.mjs @@ -1,6 +1,5 @@ -import EventEmitter3 from "eventemitter3"; -import { group } from "mitata"; import EventEmitterNative from "node:events"; +import { group } from "../runner.mjs"; export const implementations = [ { diff --git a/bench/emitter/microbench.mjs b/bench/emitter/microbench.mjs index eae59d4c19..4f3ebb465d 100644 --- a/bench/emitter/microbench.mjs +++ b/bench/emitter/microbench.mjs @@ -1,4 +1,4 @@ -import { bench, run } from "mitata"; +import { bench, run } from "../runner.mjs"; import { groupForEmitter } from "./implementations.mjs"; var id = 0; diff --git a/bench/emitter/microbench_once.mjs b/bench/emitter/microbench_once.mjs index b24fb21031..fa5ca9496a 100644 --- a/bench/emitter/microbench_once.mjs +++ b/bench/emitter/microbench_once.mjs @@ -1,4 +1,4 @@ -import { bench, run } from "mitata"; +import { bench, run } from "../runner.mjs"; import { groupForEmitter } from "./implementations.mjs"; var id = 0; diff --git a/bench/emitter/realworld_stream.mjs b/bench/emitter/realworld_stream.mjs index 1b2d19945b..6d2428df24 100644 --- a/bench/emitter/realworld_stream.mjs +++ b/bench/emitter/realworld_stream.mjs @@ -1,4 +1,4 @@ -import { bench, run } from "mitata"; +import { bench, run } from "../runner.mjs"; import { groupForEmitter } from "./implementations.mjs"; // Pseudo RNG is derived from https://stackoverflow.com/a/424445 diff --git a/bench/expect-to-equal/expect-to-equal.test.js b/bench/expect-to-equal/expect-to-equal.test.js index e8361596f5..52a904382f 100644 --- a/bench/expect-to-equal/expect-to-equal.test.js +++ b/bench/expect-to-equal/expect-to-equal.test.js @@ -1,5 +1,5 @@ // bun:test automatically rewrites this import to bun:test when run in bun -import { test, expect } from "@jest/globals"; +import { expect, test } from "@jest/globals"; const N = parseInt(process.env.RUN_COUNT || "10000", 10); if (!Number.isSafeInteger(N)) { diff --git a/bench/expect-to-equal/expect-to-equal.vitest.test.js b/bench/expect-to-equal/expect-to-equal.vitest.test.js index aea945180b..d02b56e3e8 100644 --- a/bench/expect-to-equal/expect-to-equal.vitest.test.js +++ b/bench/expect-to-equal/expect-to-equal.vitest.test.js @@ -1,4 +1,4 @@ -import { test, expect } from "vitest"; +import { expect, test } from "vitest"; const N = parseInt(process.env.RUN_COUNT || "10000", 10); if (!Number.isSafeInteger(N)) { diff --git a/bench/fetch/bun.js b/bench/fetch/bun.js index 96e7275a85..1241aa7d4f 100644 --- a/bench/fetch/bun.js +++ b/bench/fetch/bun.js @@ -1,4 +1,4 @@ -import { bench, run } from "mitata"; +import { bench, run } from "../runner.mjs"; const count = 100; diff --git a/bench/fetch/node.mjs b/bench/fetch/node.mjs index 96e7275a85..1241aa7d4f 100644 --- a/bench/fetch/node.mjs +++ b/bench/fetch/node.mjs @@ -1,4 +1,4 @@ -import { bench, run } from "mitata"; +import { bench, run } from "../runner.mjs"; const count = 100; diff --git a/bench/ffi/bun.js b/bench/ffi/bun.js index 6e83702ee0..5ef13e234a 100644 --- a/bench/ffi/bun.js +++ b/bench/ffi/bun.js @@ -1,5 +1,5 @@ -import { ptr, dlopen, CString, toBuffer } from "bun:ffi"; -import { run, bench, group } from "mitata"; +import { CString, dlopen, ptr } from "bun:ffi"; +import { bench, group, run } from "../runner.mjs"; const { napiNoop, napiHash, napiString } = require(import.meta.dir + "/src/ffi_napi_bench.node"); diff --git a/bench/ffi/deno.js b/bench/ffi/deno.js index 63ba6358c8..a1e7ae0ee4 100644 --- a/bench/ffi/deno.js +++ b/bench/ffi/deno.js @@ -1,4 +1,4 @@ -import { run, bench, group } from "../node_modules/mitata/src/cli.mjs"; +import { bench, group, run } from "../runner.mjs"; const extension = "darwin" !== Deno.build.os ? "so" : "dylib"; const path = new URL("src/target/release/libffi_napi_bench." + extension, import.meta.url).pathname; diff --git a/bench/ffi/node.mjs b/bench/ffi/node.mjs index 8c2d069717..c6c9f67c3d 100644 --- a/bench/ffi/node.mjs +++ b/bench/ffi/node.mjs @@ -1,5 +1,5 @@ -import { run, bench, group } from "mitata"; import { createRequire } from "node:module"; +import { bench, group, run } from "../runner.mjs"; const require = createRequire(import.meta.url); const { napiNoop, napiHash, napiString } = require("./src/ffi_napi_bench.node"); diff --git a/bench/ffi/package.json b/bench/ffi/package.json index b7de8e9dd9..3bef4583fd 100644 --- a/bench/ffi/package.json +++ b/bench/ffi/package.json @@ -1,11 +1,11 @@ { "name": "bench", "scripts": { - "bench:bun": "$BUN bun.js", - "bench:node": "$NODE node.mjs", + "bench:bun": "bun bun.js", + "bench:node": "node node.mjs", "deps": "cd src && bun run deps", "build": "cd src && bun run build", - "bench:deno": "$DENO run -A --unstable deno.js", + "bench:deno": "deno run -A --unstable deno.js", "bench": "bun run bench:bun && bun run bench:node && bun run bench:deno" } } diff --git a/bench/glob/braces.mjs b/bench/glob/braces.mjs index c81aeb9d98..fa67614259 100644 --- a/bench/glob/braces.mjs +++ b/bench/glob/braces.mjs @@ -1,5 +1,5 @@ import braces from "braces"; -import { group, bench, run } from "mitata"; +import { bench, group, run } from "../runner.mjs"; // const iterations = 1000; const iterations = 100; @@ -10,15 +10,16 @@ const veryComplexPattern = "{a,b,HI{c,e,LMAO{d,f}Q}}{1,2,{3,4},5}"; console.log(braces(complexPattern, { expand: true })); function benchPattern(pattern, name) { - group({ name: `${name} pattern: "${pattern}"`, summary: true }, () => { + const _name = `${name} pattern: "${pattern}"`; + group({ name: _name, summary: true }, () => { if (typeof Bun !== "undefined") - bench("Bun", () => { + bench(`Bun (${_name})`, () => { for (let i = 0; i < iterations; i++) { Bun.$.braces(pattern); } }); - bench("micromatch/braces", () => { + bench(`micromatch/braces ${_name}`, () => { for (let i = 0; i < iterations; i++) { braces(pattern, { expand: true }); } diff --git a/bench/glob/match.mjs b/bench/glob/match.mjs index 66150daf25..c81a972c41 100644 --- a/bench/glob/match.mjs +++ b/bench/glob/match.mjs @@ -1,5 +1,5 @@ import micromatch from "micromatch"; -import { bench, run } from "mitata"; +import { bench, run } from "../runner.mjs"; const Glob = typeof Bun !== "undefined" ? Bun.Glob : undefined; const doMatch = typeof Bun === "undefined" ? micromatch.isMatch : (a, b) => new Glob(b).match(a); diff --git a/bench/glob/scan.mjs b/bench/glob/scan.mjs index 0d500af668..b5292eba1e 100644 --- a/bench/glob/scan.mjs +++ b/bench/glob/scan.mjs @@ -1,6 +1,6 @@ -import { run, bench, group } from "mitata"; import fg from "fast-glob"; import { fdir } from "fdir"; +import { bench, group, run } from "../runner.mjs"; const normalPattern = "*.ts"; const recursivePattern = "**/*.ts"; diff --git a/bench/grpc-server/benchmark.proto b/bench/grpc-server/benchmark.proto new file mode 100644 index 0000000000..cdbbd32400 --- /dev/null +++ b/bench/grpc-server/benchmark.proto @@ -0,0 +1,14 @@ +syntax = "proto3"; +package benchmark; + +service BenchmarkService { + rpc Ping(Request) returns (Response); +} + +message Request { + string message = 1; +} + +message Response { + string message = 1; +} \ No newline at end of file diff --git a/bench/grpc-server/cert.pem b/bench/grpc-server/cert.pem new file mode 100644 index 0000000000..df1f536127 --- /dev/null +++ b/bench/grpc-server/cert.pem @@ -0,0 +1,33 @@ +-----BEGIN CERTIFICATE----- +MIIFxjCCA66gAwIBAgIUUaQCzOcxcFBP0KwoQfNqD/FoI44wDQYJKoZIhvcNAQEL +BQAwYjELMAkGA1UEBhMCVVMxCzAJBgNVBAgMAkNBMRYwFAYDVQQHDA1TYW4gRnJh +bmNpc2NvMQwwCgYDVQQKDANCdW4xDDAKBgNVBAsMA0J1bjESMBAGA1UEAwwJbG9j +YWxob3N0MB4XDTI0MTAxNjAwMDExNloXDTM0MTAxNDAwMDExNlowYjELMAkGA1UE +BhMCVVMxCzAJBgNVBAgMAkNBMRYwFAYDVQQHDA1TYW4gRnJhbmNpc2NvMQwwCgYD +VQQKDANCdW4xDDAKBgNVBAsMA0J1bjESMBAGA1UEAwwJbG9jYWxob3N0MIICIjAN +BgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp2s1CWRRV3bkjUxyBefcRCiZj8v6 +LIIWOb/kFJOo1PQsmQtOOWfY/kNEATPhLtEVolMzsQtaKV+u/Jnp6vU6cCU0qfQ/ +cha/s0XaSn9zkJSXjmNOPDOXoeJ5wmSUvWETRvDgeYXCg84zTwRnD1pXIsKxHtia +SYkTC29skSn0+63GW2Ebzkbn3jcYbk3gfkRO/qw8EDh/4/TcS2SjoHl96E1QcfBX +InXrPGoHQhuqJV60rmmkVws0lTIZIq0g2p7iFDCg5TG1asakX7+CrEM/q+oyo3e8 +RwMfc+9pqFEqyvXGIQSulS+CVKKbpAFMg07UGYe1t0s5iCwfLQ9apaKL31t/3Vkr +uVKgy5FrPLnRXkFXDZ1v+43AZBmdLrKODzsqHEbt2JmV0V6JVUkE4kbeJr/nlkhQ +x6yXloYY3VKbnCb1L3HmMInrK1QSpxlOb8RllTd33oBwd1FKEvH2gza0j9hqq8uQ +hWVN7tlamkgtBteZ8Y9fd3MdxD9iZOx4dVtCX1+sgJFdaL2ZgE0asojn46yT8Uqw +5d0M9vqmWc5AqG7c4UWWRrfB1MfOq/X8GtImmKyhEgizIPdWFeF1cNjhPffJv4yR +Y4Rj33OBTCM+9h8ZSw/fKo55yRXyz3bjrW2Mg8Dtq+6TcRd5gSLCaTN6jX8E9y7G +TobnA9MnKHhSIhsCAwEAAaN0MHIwHQYDVR0OBBYEFEJU6/9ELCp1CAxYJ5FJJxpV +FSRmMB8GA1UdIwQYMBaAFEJU6/9ELCp1CAxYJ5FJJxpVFSRmMA8GA1UdEwEB/wQF +MAMBAf8wHwYDVR0RBBgwFoIJbG9jYWxob3N0ggkxMjcuMC4wLjEwDQYJKoZIhvcN +AQELBQADggIBACyOPdVwfJg1aUNANy78+cm6eoInM9NDdXGWHMqCJwYF6qJTQV11 +jYwYrl+OWOi3CEC+ogXl+uJX4tSS5d+rBTXEb73cLpogxP+xuxr4cBHhtgpGRpY0 +GqWCFUTexHxXMrYhHQxf3uv79PNauw/dd1Baby1OjF3zSKRzFsv4KId97cAgT/9H +HfUo2ym5jmhNFj5rhUavO3Pw1++1eeDeDAkS6T59buzx0h9760WD20oBdgjt42cb +P6xg9OwV7ALQSwJ8YPEXpkl7u+6jy0j5ceYmXh76tAyA+hDYOJrY0opBjSPmXH99 +p3W63gvk/AdfeAdbFHp6en0b04x4EIogOGZxBP35rzBvsQpqavBE3PBpUIyrQs5p +OBUncRrcjEDL6WKh6RJIjZnvpHPrEqOqyxaeWRc4+85ZrVArJHGMc8I+zs9uCFjo +Cjfde3d317kCszUTxo0l3azyBpr007PMIUoBF2VJEAyQp2Tz/yu0CbEscNJO/wCn +Sb1A6ojaQcgQe2hsaJz/mS+OOjHHaDbCp9iltP2CS63PYleEx4q1Bn8KVRy2zYTB +n74y4YaD8Q+hSA6zU741pzqK2SFCpBQnSz757ocr6WspQ47iOonX2giGZS/3KVeK +qNzU14+h0b8HaBqZmOvjF+S4G0HDpRwxPzDWgc7dEIWlzHH+ZCqjBFwL +-----END CERTIFICATE----- diff --git a/bench/grpc-server/index.js b/bench/grpc-server/index.js new file mode 100644 index 0000000000..07edf3a4d6 --- /dev/null +++ b/bench/grpc-server/index.js @@ -0,0 +1,31 @@ +const grpc = require("@grpc/grpc-js"); +const protoLoader = require("@grpc/proto-loader"); +const packageDefinition = protoLoader.loadSync("benchmark.proto", {}); +const proto = grpc.loadPackageDefinition(packageDefinition).benchmark; +const fs = require("fs"); + +function ping(call, callback) { + callback(null, { message: "Hello, World" }); +} + +function main() { + const server = new grpc.Server(); + server.addService(proto.BenchmarkService.service, { ping: ping }); + const tls = !!process.env.TLS && (process.env.TLS === "1" || process.env.TLS === "true"); + const port = process.env.PORT || 50051; + const host = process.env.HOST || "localhost"; + let credentials; + if (tls) { + const ca = fs.readFileSync("./cert.pem"); + const key = fs.readFileSync("./key.pem"); + const cert = fs.readFileSync("./cert.pem"); + credentials = grpc.ServerCredentials.createSsl(ca, [{ private_key: key, cert_chain: cert }]); + } else { + credentials = grpc.ServerCredentials.createInsecure(); + } + server.bindAsync(`${host}:${port}`, credentials, () => { + console.log(`Server running at ${tls ? "https" : "http"}://${host}:${port}`); + }); +} + +main(); diff --git a/bench/grpc-server/key.pem b/bench/grpc-server/key.pem new file mode 100644 index 0000000000..fb87dccfd2 --- /dev/null +++ b/bench/grpc-server/key.pem @@ -0,0 +1,52 @@ +-----BEGIN PRIVATE KEY----- +MIIJQgIBADANBgkqhkiG9w0BAQEFAASCCSwwggkoAgEAAoICAQCnazUJZFFXduSN +THIF59xEKJmPy/osghY5v+QUk6jU9CyZC045Z9j+Q0QBM+Eu0RWiUzOxC1opX678 +menq9TpwJTSp9D9yFr+zRdpKf3OQlJeOY048M5eh4nnCZJS9YRNG8OB5hcKDzjNP +BGcPWlciwrEe2JpJiRMLb2yRKfT7rcZbYRvORufeNxhuTeB+RE7+rDwQOH/j9NxL +ZKOgeX3oTVBx8Fcides8agdCG6olXrSuaaRXCzSVMhkirSDanuIUMKDlMbVqxqRf +v4KsQz+r6jKjd7xHAx9z72moUSrK9cYhBK6VL4JUopukAUyDTtQZh7W3SzmILB8t +D1qloovfW3/dWSu5UqDLkWs8udFeQVcNnW/7jcBkGZ0uso4POyocRu3YmZXRXolV +SQTiRt4mv+eWSFDHrJeWhhjdUpucJvUvceYwiesrVBKnGU5vxGWVN3fegHB3UUoS +8faDNrSP2Gqry5CFZU3u2VqaSC0G15nxj193cx3EP2Jk7Hh1W0JfX6yAkV1ovZmA +TRqyiOfjrJPxSrDl3Qz2+qZZzkCobtzhRZZGt8HUx86r9fwa0iaYrKESCLMg91YV +4XVw2OE998m/jJFjhGPfc4FMIz72HxlLD98qjnnJFfLPduOtbYyDwO2r7pNxF3mB +IsJpM3qNfwT3LsZOhucD0ycoeFIiGwIDAQABAoICAE+YYrDCZwHEXsjmzVcNcuVc +wBVjjt9WQabXGmLGCQClzgY9H8WfH8VSyaQgvDB762MvV2YW1ZjSCunBazrvuAbV +SYJ7wyZEtoNO9IdyrMjSPHPPtsRcavzmJalMFIMtAfM6Vh6wf1gW0sIAf9cGxmKa +WYcmx8OqTcmkAePKJNT7O1D6jDO39kjpvM3EbLTbWQsva6bylasVIR8fC8QhvsCQ +8WwaLfMOSPaCGk1Nxcjai+BYDW/sveUo2lZoJTSLUUT0EaqlxXCsXD3BWSj5F+5t +/AFHzdWdIHkIHB2P6V5xFu9fwHjhC3+dh42jqHLNKX2xza0FMKcTAwdzQ094RjL3 +cOGIsa0Vdt7Mks5eLCRxz0xI3kyrbF0/CopxT0pVWZwUzPk1G+Z3HesWkVtQpg7u +RYzsoNKKc5mhc/V+vG290WAcNB4E3m85DgKQr4ib+J/rCy5/SnJYgg4QXsEyNlQ5 +ESBtRmuPfnrPIxqrDKZ7ZsJv8XFWydXTOfJxeKR1T1S02iYna+z1FnNu+t0ELTr9 +uhmkuqmV8RJVTub1P2EJPdiku/61UwNLyyZMgFjATDxB0hHIj1FP1HbfhEYbkYNc +Dl7a7egJ4KFYWpQ+7MzOmc0OKq1HuJ9H4FhoYpbVq1OQosZ6G3d9afKSZa6dFdK0 +8ujvdQBR0NlAhc/LAr6BAoIBAQDfD3h9P4i5L8NCdocovCi3Eo0kcNQ3QuvnWrrs +B/9CLoWhJrcLV85d0dEX6lSYl9BWW02ilVB+Qvom2wS2td1CBUgDxovX4tCZCuXt +otYL/yWWOA7IG0Fjt6YEERQD/tRfKnn8hVBlk5cDTXXxHRGVMku4CHsN3ILtITQS +VnVsTrGoWd6mFFA9X9Qu4zR9wKtjGEuL7BT8ixxtXLa2tMjdc4UL140yAgmMemJS +TzC6EURe2OnhIzVe9yyLKcqw0prkGHg/Lau5lA1CAh67ZMY4EjO3cuda8R+O7vyO +z2afeaTORzzdEbSZPG+8oqIN1/RjRCbl3RXYN8ibSwOzp6X7AoIBAQDAJEVta98J +P2/36rXrkl6WrRfYqUPy6vgo/lPuRpp+BQ7ldgmH4+ZrJW5Mxa5hktVujk/C2kAO +auzhzNlsxR+c/KwtsL1JXwBn8CT1bR0qvi+URmvGQn9GOKrLLy+6cfphuZWuc4/r +hAgXzEjzPcJJJfxA1i2soKPbiFiCGHxot68P4uJSM2sU6QjNIxEjPbTJjEg894pD +GJoiRRVHgnzzxL3cqrK90Zn6MAl9f2tYihfddsENeZb5t84LBppxBSGouE3ZH8uD +Sufs4DSj1ptocbDbX+0kRNqfjTI5ivDxlS+ZKBe05PVTUmGBAWLamfCe89IW3/z+ +Rfkh4ZBPtlphAoIBADwjSqPR7kWnN+iCVjxIRl3dNYpelQh1FW7hikW6fjpUmphw +/KalPLEUsV/WQIqHW5b8tLihsvrnidPR9rpf29BB5kGGVQuWThEE3CquXTEM0BBo ++qs+lemRiMPN6uyM1qr1o7/OHXfVS8CLMMIZyTTFQ57RQoPhMLdH3WcYQj46FTHD +UQDLtzpkzKr7fJpuyIZF9ZA6zQmtY7OkbGpj4Ue7LmKb8ahK3lIuaLWyPfvcTeeY +aa3WNTxuPWcjlE8J6NKYOksmQAcfgFeMhMaXC83wMltCMlfVbGG30wWZqxxRynoG +wMUFUgCCR8m+uxwqXewpYqdUbOBHYeFkXxIfn+MCggEAR5p8wQ1NHd4lNOekCfkP +BOnWlChoKRPFjUlSL97h3gq2hW6amKimitF1LGkS1kvo+/1O3heFfZn9UxyK/kzr +vg4vgAt4Tup3dUR6EXgrQW2Ev6YKreTEF4Awre2UxM+K9nY5wLxSKvuWJIA9w2AF +kkr0mZj3hniK99n02e6UFlY1iB8OJoIA6tb5L7FcxpxNTjrYBNhfDygQ8Kp8Bp0r +QZDVDHIUkEaXMjRKpRkiAOndgOurgAEK8V69C0DXtzypUX31jO+bYP8+NPlMxK3K +Vn7f4LD75+M88e6lg+oyZmUpStM1GnWksvtlWLUSiNKLaEEGzv2EA6JB+I1dwUb8 +oQKCAQEAlmisUyn1/lpNnEzKsfUnRs53WxS2e1br5vJ5+pet3cjXT2btfp6J5/mf +Tfqv5mZfTjYxydG0Kl3afI/SnhTcRS2/s4svrktZYLOLM2PAGYdCV6j1stXl4ObO +eIfjzB3y1Zc2dEcWTylJ/lABoNGMPWFJQ67q8WS37pUHQPseJ++LmZFvlRyBgZBl +VLqiHHiZ2ax+yC1ZxY4RECtEiYFplspNldNe+bP/lzTJftsUDe1FqRT/SvEam+1f +kb//sbHkJ+l4BEv0Us3SIGwJ0BblhxLYO34IFVpheY4UQBy/nRaeUUdVR9r8JtYD +z/cCLOrUJfealezimyd8SKPWPeHhrA== +-----END PRIVATE KEY----- diff --git a/bench/grpc-server/package.json b/bench/grpc-server/package.json new file mode 100644 index 0000000000..191a6ad719 --- /dev/null +++ b/bench/grpc-server/package.json @@ -0,0 +1,15 @@ +{ + "name": "bench", + "scripts": { + "deps": "exit 0", + "build": "exit 0", + "bun:server": "TLS=1 PORT=50051 bun ./index.js", + "node:server": "TLS=1 PORT=50051 node ./index.js", + "bench": "ghz --cacert ./cert.pem --proto ./benchmark.proto --call benchmark.BenchmarkService.Ping -d '{\"message\": \"Hello\"}' --total=100000 localhost:50051", + "bench:insecure": "ghz --insecure --proto ./benchmark.proto --call benchmark.BenchmarkService.Ping -d '{\"message\": \"Hello\"}' --total=100000 localhost:50051" + }, + "dependencies": { + "@grpc/grpc-js": "1.12.0", + "@grpc/proto-loader": "0.7.10" + } +} diff --git a/bench/gzip/bun.js b/bench/gzip/bun.js index 6b69ae1fbb..6b7b66cb66 100644 --- a/bench/gzip/bun.js +++ b/bench/gzip/bun.js @@ -1,5 +1,5 @@ -import { run, bench, group } from "mitata"; -import { gzipSync, gunzipSync } from "bun"; +import { gunzipSync, gzipSync } from "bun"; +import { bench, group, run } from "../runner.mjs"; const data = await Bun.file(require.resolve("@babel/standalone/babel.min.js")).arrayBuffer(); diff --git a/bench/gzip/deno.js b/bench/gzip/deno.js index 66c858e55b..fa425e917a 100644 --- a/bench/gzip/deno.js +++ b/bench/gzip/deno.js @@ -1,4 +1,4 @@ -import { run, bench } from "../node_modules/mitata/src/cli.mjs"; +import { bench, run } from "../runner.mjs"; const data = new TextEncoder().encode("Hello World!".repeat(9999)); diff --git a/bench/gzip/node.mjs b/bench/gzip/node.mjs index d7a1abade7..f4c867ce58 100644 --- a/bench/gzip/node.mjs +++ b/bench/gzip/node.mjs @@ -1,7 +1,7 @@ -import { run, bench } from "mitata"; -import { gzipSync, gunzipSync } from "zlib"; -import { createRequire } from "module"; import { readFileSync } from "fs"; +import { createRequire } from "module"; +import { gunzipSync, gzipSync } from "zlib"; +import { bench, run } from "../runner.mjs"; const require = createRequire(import.meta.url); const data = readFileSync(require.resolve("@babel/standalone/babel.min.js")); diff --git a/bench/gzip/package.json b/bench/gzip/package.json index 49e6c3a890..a6a6cd4652 100644 --- a/bench/gzip/package.json +++ b/bench/gzip/package.json @@ -3,9 +3,9 @@ "scripts": { "deps": "exit 0", "build": "exit 0", - "bench:bun": "$BUN bun.js", - "bench:node": "$NODE node.mjs", - "bench:deno": "$DENO run -A --unstable deno.js", + "bench:bun": "bun bun.js", + "bench:node": "node node.mjs", + "bench:deno": "deno run -A --unstable deno.js", "bench": "bun run bench:bun && bun run bench:node && bun run bench:deno" }, "dependencies": { diff --git a/bench/hot-module-reloading/css-stress-test/src/index.tsx b/bench/hot-module-reloading/css-stress-test/src/index.tsx index 7ca290f48a..5eefb43040 100644 --- a/bench/hot-module-reloading/css-stress-test/src/index.tsx +++ b/bench/hot-module-reloading/css-stress-test/src/index.tsx @@ -1,6 +1,5 @@ -import { Main } from "./main"; -import classNames from "classnames"; import ReactDOM from "react-dom"; +import { Main } from "./main"; const Base = ({}) => { const name = typeof location !== "undefined" ? decodeURIComponent(location.search.substring(1)) : null; diff --git a/bench/install/app/entry.server.tsx b/bench/install/app/entry.server.tsx index fbea6220e2..a83df79c87 100644 --- a/bench/install/app/entry.server.tsx +++ b/bench/install/app/entry.server.tsx @@ -4,11 +4,11 @@ * For more information, see https://remix.run/docs/en/main/file-conventions/entry.server */ -import { PassThrough } from "node:stream"; import type { EntryContext } from "@remix-run/node"; import { Response } from "@remix-run/node"; import { RemixServer } from "@remix-run/react"; import isbot from "isbot"; +import { PassThrough } from "node:stream"; import { renderToPipeableStream } from "react-dom/server"; const ABORT_DELAY = 5_000; diff --git a/bench/log/bun.js b/bench/log/bun.js index 43728fd648..3e78eb4206 100644 --- a/bench/log/bun.js +++ b/bench/log/bun.js @@ -1,4 +1,4 @@ -import { bench, run } from "mitata"; +import { bench, run } from "../runner.mjs"; bench("console.log('hello')", () => console.log("hello")); bench("console.log({ hello: 'object' })", () => console.log({ hello: "object" })); diff --git a/bench/log/deno.mjs b/bench/log/deno.mjs index 24d7244633..4bfa1a3cc2 100644 --- a/bench/log/deno.mjs +++ b/bench/log/deno.mjs @@ -1,4 +1,4 @@ -import { bench, run } from "../node_modules/mitata/src/cli.mjs"; +import { bench, run } from "../runner.mjs"; bench("console.log", () => console.log("hello")); bench("console.log({ hello: 'object' })", () => console.log({ hello: "object" })); diff --git a/bench/log/node.mjs b/bench/log/node.mjs index 6ec73f7438..4bfa1a3cc2 100644 --- a/bench/log/node.mjs +++ b/bench/log/node.mjs @@ -1,4 +1,4 @@ -import { bench, run } from "mitata"; +import { bench, run } from "../runner.mjs"; bench("console.log", () => console.log("hello")); bench("console.log({ hello: 'object' })", () => console.log({ hello: "object" })); diff --git a/bench/log/package.json b/bench/log/package.json index 1dc6e46020..821c1c3064 100644 --- a/bench/log/package.json +++ b/bench/log/package.json @@ -3,9 +3,9 @@ "scripts": { "deps": "exit 0", "build": "exit 0", - "bench:bun": "$BUN bun.js | grep iter", - "bench:node": "$NODE node.mjs | grep iter", - "bench:deno": "$DENO run -A --unstable deno.mjs | grep iter", + "bench:bun": "bun bun.js | grep iter", + "bench:node": "node node.mjs | grep iter", + "bench:deno": "deno run -A --unstable deno.mjs | grep iter", "bench": "bun run bench:bun && bun run bench:node && bun run bench:deno" } } diff --git a/bench/modules/node_os/bun.js b/bench/modules/node_os/bun.js index 217fae47da..713f9483a9 100644 --- a/bench/modules/node_os/bun.js +++ b/bench/modules/node_os/bun.js @@ -1,21 +1,21 @@ -import { bench, run } from "mitata"; +import { bench, run } from "../../runner.mjs"; import { + arch, cpus, endianness, - arch, - uptime, - networkInterfaces, - getPriority, - totalmem, freemem, + getPriority, homedir, hostname, loadavg, + networkInterfaces, platform, release, setPriority, tmpdir, + totalmem, type, + uptime, userInfo, version, } from "node:os"; diff --git a/bench/modules/node_os/node.mjs b/bench/modules/node_os/node.mjs index 217fae47da..36139b29ef 100644 --- a/bench/modules/node_os/node.mjs +++ b/bench/modules/node_os/node.mjs @@ -1,24 +1,24 @@ -import { bench, run } from "mitata"; import { + arch, cpus, endianness, - arch, - uptime, - networkInterfaces, - getPriority, - totalmem, freemem, + getPriority, homedir, hostname, loadavg, + networkInterfaces, platform, release, setPriority, tmpdir, + totalmem, type, + uptime, userInfo, version, } from "node:os"; +import { bench, run } from "../../runner.mjs"; bench("cpus()", () => cpus()); bench("networkInterfaces()", () => networkInterfaces()); diff --git a/bench/modules/node_os/package.json b/bench/modules/node_os/package.json index 2a095e28b6..d198465b9e 100644 --- a/bench/modules/node_os/package.json +++ b/bench/modules/node_os/package.json @@ -3,8 +3,8 @@ "scripts": { "deps": "exit 0", "build": "exit 0", - "bench:bun": "$BUN bun.js", - "bench:node": "$NODE node.mjs", + "bench:bun": "bun bun.js", + "bench:node": "node node.mjs", "bench": "bun run bench:bun && bun run bench:node" } } diff --git a/bench/package.json b/bench/package.json index 6ea67f5c99..a80d7566dc 100644 --- a/bench/package.json +++ b/bench/package.json @@ -7,13 +7,17 @@ "@swc/core": "^1.2.133", "benchmark": "^2.1.4", "braces": "^3.0.2", + "color": "^4.2.3", "esbuild": "^0.14.12", "eventemitter3": "^5.0.0", "execa": "^8.0.1", "fast-glob": "3.3.1", "fdir": "^6.1.0", - "mitata": "^0.1.6", + "mitata": "^1.0.10", + "react": "^18.3.1", + "react-dom": "^18.3.1", "string-width": "7.1.0", + "tinycolor2": "^1.6.0", "zx": "^7.2.3" }, "scripts": { diff --git a/bench/react-hello-world/react-hello-world.deno.jsx b/bench/react-hello-world/react-hello-world.deno.jsx index 0bea2574a2..08cb7b0222 100644 --- a/bench/react-hello-world/react-hello-world.deno.jsx +++ b/bench/react-hello-world/react-hello-world.deno.jsx @@ -1,5 +1,5 @@ -import { renderToReadableStream } from "https://esm.run/react-dom/server"; import * as React from "https://esm.run/react"; +import { renderToReadableStream } from "https://esm.run/react-dom/server"; const App = () => ( diff --git a/bench/react-hello-world/react-hello-world.node.jsx b/bench/react-hello-world/react-hello-world.node.jsx index a1a5125737..52dd3b0566 100644 --- a/bench/react-hello-world/react-hello-world.node.jsx +++ b/bench/react-hello-world/react-hello-world.node.jsx @@ -1,6 +1,6 @@ // react-ssr.tsx -import { renderToPipeableStream } from "react-dom/server.node"; import React from "react"; +import { renderToPipeableStream } from "react-dom/server.node"; const http = require("http"); const App = () => ( diff --git a/bench/runner.mjs b/bench/runner.mjs new file mode 100644 index 0000000000..9f6bcee16f --- /dev/null +++ b/bench/runner.mjs @@ -0,0 +1,19 @@ +import * as Mitata from "mitata"; +import process from "node:process"; + +const asJSON = !!process?.env?.BENCHMARK_RUNNER; + +/** @param {Parameters["0"]} opts */ +export function run(opts = {}) { + if (asJSON) { + opts.format = "json"; + } + + return Mitata.run(opts); +} + +export const bench = Mitata.bench; + +export function group(_name, fn) { + return Mitata.group(fn); +} diff --git a/bench/scanner/remix-route.ts b/bench/scanner/remix-route.ts index dbacf3a4ca..e9d0880eed 100644 --- a/bench/scanner/remix-route.ts +++ b/bench/scanner/remix-route.ts @@ -1,5 +1,5 @@ +import type { ActionFunction, LoaderFunction } from "remix"; import { useParams } from "remix"; -import type { LoaderFunction, ActionFunction } from "remix"; export const loader: LoaderFunction = async ({ params }) => { console.log(params.postId); diff --git a/bench/snippets/array-arguments-slice.mjs b/bench/snippets/array-arguments-slice.mjs index 5d1139b8b3..8470ab79a6 100644 --- a/bench/snippets/array-arguments-slice.mjs +++ b/bench/snippets/array-arguments-slice.mjs @@ -1,4 +1,4 @@ -import { bench, run } from "./runner.mjs"; +import { bench, run } from "../runner.mjs"; function doIt(...args) { // we use .at() to prevent constant folding optimizations diff --git a/bench/snippets/array-map.mjs b/bench/snippets/array-map.mjs index 7b8bc6fdcf..b467e9cd3e 100644 --- a/bench/snippets/array-map.mjs +++ b/bench/snippets/array-map.mjs @@ -1,5 +1,5 @@ // https://github.com/oven-sh/bun/issues/1096 -import { bench, run } from "./runner.mjs"; +import { bench, run } from "../runner.mjs"; const identity = x => x; diff --git a/bench/snippets/array-shift.mjs b/bench/snippets/array-shift.mjs index 7039026706..15733f940b 100644 --- a/bench/snippets/array-shift.mjs +++ b/bench/snippets/array-shift.mjs @@ -1,4 +1,4 @@ -import { bench, run } from "./runner.mjs"; +import { bench, run } from "../runner.mjs"; var myArray = new Array(5); bench("[1, 2, 3, 4, 5].shift()", () => { diff --git a/bench/snippets/array-sort.mjs b/bench/snippets/array-sort.mjs index 9ed257740e..8951d716a6 100644 --- a/bench/snippets/array-sort.mjs +++ b/bench/snippets/array-sort.mjs @@ -1,4 +1,4 @@ -import { bench, run } from "./runner.mjs"; +import { bench, run } from "../runner.mjs"; var comparator = (a, b) => a - b; const numbers = [ diff --git a/bench/snippets/arraybuffersink.mjs b/bench/snippets/arraybuffersink.mjs index f90fae69fd..566f9bd630 100644 --- a/bench/snippets/arraybuffersink.mjs +++ b/bench/snippets/arraybuffersink.mjs @@ -1,6 +1,6 @@ // @runtime bun import { ArrayBufferSink } from "bun"; -import { bench, run } from "./runner.mjs"; +import { bench, run } from "../runner.mjs"; var short = "Hello World!"; var shortUTF16 = "Hello World 💕💕💕"; diff --git a/bench/snippets/assert.mjs b/bench/snippets/assert.mjs index 3b3284e54b..7ed8cf7596 100644 --- a/bench/snippets/assert.mjs +++ b/bench/snippets/assert.mjs @@ -1,5 +1,5 @@ -import { bench, group, run } from "./runner.mjs"; import * as assert from "assert"; +import { bench, run } from "../runner.mjs"; bench("deepEqual", () => { assert.deepEqual({ foo: "123", bar: "baz" }, { foo: "123", bar: "baz" }); diff --git a/bench/snippets/async-overhead.mjs b/bench/snippets/async-overhead.mjs index e285c7edd6..ec171dae54 100644 --- a/bench/snippets/async-overhead.mjs +++ b/bench/snippets/async-overhead.mjs @@ -1,4 +1,4 @@ -import { bench, run } from "mitata"; +import { bench, run } from "../runner.mjs"; bench("noop", function () {}); bench("async function(){}", async function () {}); diff --git a/bench/snippets/atob.mjs b/bench/snippets/atob.mjs index 3a848300c0..de7d128265 100644 --- a/bench/snippets/atob.mjs +++ b/bench/snippets/atob.mjs @@ -1,4 +1,4 @@ -import { bench, run } from "./runner.mjs"; +import { bench, run } from "../runner.mjs"; function makeBenchmark(size) { const latin1 = btoa("A".repeat(size)); diff --git a/bench/snippets/blob.mjs b/bench/snippets/blob.mjs index 68ebc1ce4d..7486f56fc9 100644 --- a/bench/snippets/blob.mjs +++ b/bench/snippets/blob.mjs @@ -1,4 +1,4 @@ -import { bench, run } from "../node_modules/mitata/src/cli.mjs"; +import { bench, run } from "../runner.mjs"; bench("new Blob(['hello world'])", function () { return new Blob(["hello world"]); diff --git a/bench/snippets/buffer-base64.mjs b/bench/snippets/buffer-base64.mjs index 96bab6f039..73dc3bccf8 100644 --- a/bench/snippets/buffer-base64.mjs +++ b/bench/snippets/buffer-base64.mjs @@ -1,4 +1,4 @@ -import { bench, run } from "./runner.mjs"; +import { bench, run } from "../runner.mjs"; function makeBenchmark(size, isToString) { const base64Input = Buffer.alloc(size, "latin1").toString("base64"); diff --git a/bench/snippets/buffer-concat.mjs b/bench/snippets/buffer-concat.mjs index 0a6e4a0c85..c2812796a7 100644 --- a/bench/snippets/buffer-concat.mjs +++ b/bench/snippets/buffer-concat.mjs @@ -1,4 +1,4 @@ -import { bench, run } from "./runner.mjs"; +import { bench, run } from "../runner.mjs"; for (let size of [32, 2048, 1024 * 16, 1024 * 1024 * 2, 1024 * 1024 * 16]) { const first = Buffer.allocUnsafe(size); diff --git a/bench/snippets/buffer-create.mjs b/bench/snippets/buffer-create.mjs index 115f8dd4aa..ded7f02cab 100644 --- a/bench/snippets/buffer-create.mjs +++ b/bench/snippets/buffer-create.mjs @@ -1,7 +1,7 @@ // @runtime bun,node,deno -import { bench, run } from "./runner.mjs"; -import process from "node:process"; import { Buffer } from "node:buffer"; +import process from "node:process"; +import { bench, run } from "../runner.mjs"; const N = parseInt(process.env.RUN_COUNTER ?? "10000", 10); var isBuffer = new Buffer(0); diff --git a/bench/snippets/buffer-fill.mjs b/bench/snippets/buffer-fill.mjs new file mode 100644 index 0000000000..47b5babbc4 --- /dev/null +++ b/bench/snippets/buffer-fill.mjs @@ -0,0 +1,15 @@ +import { bench, run } from "../runner.mjs"; + +for (let size of [32, 2048, 1024 * 16, 1024 * 1024 * 2, 1024 * 1024 * 16]) { + for (let fillSize of [4, 8, 16, 11]) { + const buffer = Buffer.allocUnsafe(size); + + const pattern = "x".repeat(fillSize); + + bench(`Buffer.fill ${size} bytes with ${fillSize} byte value`, () => { + buffer.fill(pattern); + }); + } +} + +await run(); diff --git a/bench/snippets/buffer-to-string.mjs b/bench/snippets/buffer-to-string.mjs index c4dac62081..2d26535838 100644 --- a/bench/snippets/buffer-to-string.mjs +++ b/bench/snippets/buffer-to-string.mjs @@ -1,6 +1,6 @@ -import { bench, run } from "./runner.mjs"; import { Buffer } from "node:buffer"; import crypto from "node:crypto"; +import { bench, run } from "../runner.mjs"; const bigBuffer = Buffer.from("hello world".repeat(10000)); const converted = bigBuffer.toString("base64"); diff --git a/bench/snippets/color.mjs b/bench/snippets/color.mjs new file mode 100644 index 0000000000..4a505630fc --- /dev/null +++ b/bench/snippets/color.mjs @@ -0,0 +1,25 @@ +import Color from "color"; +import tinycolor from "tinycolor2"; +import { bench, group, run } from "../runner.mjs"; + +const inputs = ["#f00", "rgb(255, 0, 0)", "rgba(255, 0, 0, 1)", "hsl(0, 100%, 50%)"]; + +for (const input of inputs) { + group(`${input}`, () => { + if (typeof Bun !== "undefined") { + bench(`Bun.color() (${input})`, () => { + Bun.color(input, "css"); + }); + } + + bench(`color (${input})`, () => { + Color(input).hex(); + }); + + bench(`'tinycolor2' (${input})`, () => { + tinycolor(input).toHexString(); + }); + }); +} + +await run(); diff --git a/bench/snippets/concat.js b/bench/snippets/concat.js index 76804dae19..15e418f05e 100644 --- a/bench/snippets/concat.js +++ b/bench/snippets/concat.js @@ -1,6 +1,6 @@ -import { bench, group, run } from "./runner.mjs"; -import { readFileSync } from "fs"; import { allocUnsafe } from "bun"; +import { readFileSync } from "fs"; +import { bench, group, run } from "../runner.mjs"; function polyfill(chunks) { var size = 0; @@ -41,15 +41,16 @@ const chunkGroups = [ ]; for (const chunks of chunkGroups) { - group(`${chunks.reduce((prev, curr, i, a) => prev + curr.byteLength, 0)} bytes for ${chunks.length} chunks`, () => { - bench("Bun.concatArrayBuffers", () => { + const name = `${chunks.reduce((prev, curr, i, a) => prev + curr.byteLength, 0)} bytes for ${chunks.length} chunks` + group(name, () => { + bench(`Bun.concatArrayBuffers (${name})`, () => { Bun.concatArrayBuffers(chunks); }); - bench("Uint8Array.set", () => { + bench(`Uint8Array.set (${name})`, () => { polyfill(chunks); }); - bench("Uint8Array.set (uninitialized memory)", () => { + bench(`Uint8Array.set (uninitialized memory) (${name})`, () => { polyfillUninitialized(chunks); }); }); diff --git a/bench/snippets/console-log.mjs b/bench/snippets/console-log.mjs index b95533f012..274af84d67 100644 --- a/bench/snippets/console-log.mjs +++ b/bench/snippets/console-log.mjs @@ -1,4 +1,4 @@ -import { bench, run } from "../node_modules/mitata/src/cli.mjs"; +import { bench, run } from "../runner.mjs"; const json = { login: "wongmjane", diff --git a/bench/snippets/cp.mjs b/bench/snippets/cp.mjs index 898375439f..1572296e62 100644 --- a/bench/snippets/cp.mjs +++ b/bench/snippets/cp.mjs @@ -2,7 +2,7 @@ import { mkdirSync, rmSync, writeFileSync } from "fs"; import { cp } from "fs/promises"; import { tmpdir } from "os"; import { join, resolve } from "path"; -import { bench, run } from "./runner.mjs"; +import { bench, run } from "../runner.mjs"; import { fileURLToPath } from "url"; const hugeDirectory = (() => { diff --git a/bench/snippets/crypto-2190.mjs b/bench/snippets/crypto-2190.mjs index dab54f1fdf..1ff6536788 100644 --- a/bench/snippets/crypto-2190.mjs +++ b/bench/snippets/crypto-2190.mjs @@ -1,6 +1,6 @@ // https://github.com/oven-sh/bun/issues/2190 -import { bench, run } from "mitata"; import { createHash } from "node:crypto"; +import { bench, run } from "../runner.mjs"; const data = "Delightful remarkably mr on announcing themselves entreaties favourable. About to in so terms voice at. Equal an would is found seems of. The particular friendship one sufficient terminated frequently themselves. It more shed went up is roof if loud case. Delay music in lived noise an. Beyond genius really enough passed is up."; diff --git a/bench/snippets/crypto-hasher.mjs b/bench/snippets/crypto-hasher.mjs index 36f67739ad..e08e360753 100644 --- a/bench/snippets/crypto-hasher.mjs +++ b/bench/snippets/crypto-hasher.mjs @@ -1,5 +1,5 @@ // so it can run in environments without node module resolution -import { bench, run } from "./runner.mjs"; +import { bench, run } from "../runner.mjs"; import crypto from "node:crypto"; diff --git a/bench/snippets/crypto-randomUUID.mjs b/bench/snippets/crypto-randomUUID.mjs index f6a4c0aa68..f8faeb6c9e 100644 --- a/bench/snippets/crypto-randomUUID.mjs +++ b/bench/snippets/crypto-randomUUID.mjs @@ -1,4 +1,4 @@ -import { bench, run } from "./runner.mjs"; +import { bench, run } from "../runner.mjs"; bench("crypto.randomUUID()", () => { return crypto.randomUUID(); diff --git a/bench/snippets/crypto-stream.mjs b/bench/snippets/crypto-stream.mjs index 3560563d9d..f931f2ed73 100644 --- a/bench/snippets/crypto-stream.mjs +++ b/bench/snippets/crypto-stream.mjs @@ -1,6 +1,6 @@ // https://github.com/oven-sh/bun/issues/2190 -import { bench, run } from "mitata"; import { createHash } from "node:crypto"; +import { bench, run } from "../runner.mjs"; const data = "Delightful remarkably mr on announcing themselves entreaties favourable. About to in so terms voice at. Equal an would is found seems of. The particular friendship one sufficient terminated frequently themselves. It more shed went up is roof if loud case. Delay music in lived noise an. Beyond genius really enough passed is up."; diff --git a/bench/snippets/crypto.mjs b/bench/snippets/crypto.mjs index 484a4295dd..c285722056 100644 --- a/bench/snippets/crypto.mjs +++ b/bench/snippets/crypto.mjs @@ -1,6 +1,6 @@ // so it can run in environments without node module resolution -import { bench, run } from "../node_modules/mitata/src/cli.mjs"; import crypto from "node:crypto"; +import { bench, run } from "../runner.mjs"; var foo = new Uint8Array(65536); bench("crypto.getRandomValues(65536)", () => { crypto.getRandomValues(foo); diff --git a/bench/snippets/deep-equals.js b/bench/snippets/deep-equals.js index 53dee81ab4..87d68ce030 100644 --- a/bench/snippets/deep-equals.js +++ b/bench/snippets/deep-equals.js @@ -1,5 +1,5 @@ -import { bench, group, run } from "./runner.mjs"; import fastDeepEquals from "fast-deep-equal/es6/index"; +import { bench, group, run } from "../runner.mjs"; // const Date = globalThis.Date; function func1() {} @@ -490,7 +490,7 @@ for (let { tests, description } of fixture) { var expected; group(describe, () => { for (let equalsFn of [Bun.deepEquals, fastDeepEquals]) { - bench(equalsFn.name, () => { + bench(`${describe}: ${equalsFn.name}`, () => { expected = equalsFn(value1, value2); if (expected !== equal) { throw new Error(`Expected ${expected} to be ${equal} for ${description}`); diff --git a/bench/snippets/define-properties.mjs b/bench/snippets/define-properties.mjs index 6a10ab1832..f26d3c7188 100644 --- a/bench/snippets/define-properties.mjs +++ b/bench/snippets/define-properties.mjs @@ -1,4 +1,4 @@ -import { bench, run } from "../node_modules/mitata/src/cli.mjs"; +import { bench, run } from "../runner.mjs"; const properties = { closed: { diff --git a/bench/snippets/dns.node.mjs b/bench/snippets/dns.node.mjs index ffa58ff236..fe065edf06 100644 --- a/bench/snippets/dns.node.mjs +++ b/bench/snippets/dns.node.mjs @@ -1,5 +1,5 @@ import { lookup, resolve } from "node:dns/promises"; -import { bench, run } from "./runner.mjs"; +import { bench, run } from "../runner.mjs"; bench("(cached) dns.lookup remote x 50", async () => { var tld = "example.com"; diff --git a/bench/snippets/dns.ts b/bench/snippets/dns.ts index 7eeeea689b..cb350a808d 100644 --- a/bench/snippets/dns.ts +++ b/bench/snippets/dns.ts @@ -1,10 +1,10 @@ import { dns } from "bun"; -import { bench, run, group } from "./runner.mjs"; +import { bench, group, run } from "../runner.mjs"; async function forEachBackend(name, fn) { group(name, () => { for (let backend of ["libc", "c-ares", process.platform === "darwin" ? "system" : ""].filter(Boolean)) - bench(backend, fn(backend)); + bench(`${backend} (${name})`, fn(backend)); }); } diff --git a/bench/snippets/encode-into.mjs b/bench/snippets/encode-into.mjs index 5275b6f108..20ac486bad 100644 --- a/bench/snippets/encode-into.mjs +++ b/bench/snippets/encode-into.mjs @@ -1,4 +1,4 @@ -import { run, bench } from "../node_modules/mitata/src/cli.mjs"; +import { bench, run } from "../runner.mjs"; const encoder = new TextEncoder(); diff --git a/bench/snippets/error-capturestack.mjs b/bench/snippets/error-capturestack.mjs index 0c59ff9c84..3b715b3961 100644 --- a/bench/snippets/error-capturestack.mjs +++ b/bench/snippets/error-capturestack.mjs @@ -1,4 +1,4 @@ -import { bench, run } from "./runner.mjs"; +import { bench, run } from "../runner.mjs"; var err = new Error(); bench("Error.captureStackTrace(err)", () => { diff --git a/bench/snippets/escapeHTML.js b/bench/snippets/escapeHTML.js index 809666d71c..48b12bf61d 100644 --- a/bench/snippets/escapeHTML.js +++ b/bench/snippets/escapeHTML.js @@ -1,7 +1,4 @@ -import { group } from "./runner.mjs"; -import { bench, run } from "./runner.mjs"; -import { encode as htmlEntityEncode } from "html-entities"; -import { escape as heEscape } from "he"; +import { bench, group, run } from "../runner.mjs"; var bunEscapeHTML = globalThis.escapeHTML || Bun.escapeHTML; @@ -95,24 +92,21 @@ function reactEscapeHtml(string) { // } for (let input of [ - `long string, nothing to escape... `.repeat(9999999 * 3), + "long string, nothing to escape... ".repeat(9999999 * 3), FIXTURE.repeat(8000), // "[unicode]" + FIXTURE_WITH_UNICODE, ]) { + const name = `"${input.substring(0, Math.min(input.length, 32))}" (${new Intl.NumberFormat().format(input.length / 100_000_000_0)} GB)` group( { summary: true, - name: - `"` + - input.substring(0, Math.min(input.length, 32)) + - `"` + - ` (${new Intl.NumberFormat().format(input.length / 100_000_000_0)} GB)`, + name }, () => { // bench(`ReactDOM.escapeHTML`, () => reactEscapeHtml(input)); // bench(`html-entities.encode`, () => htmlEntityEncode(input)); // bench(`he.escape`, () => heEscape(input)); - bench(`Bun.escapeHTML`, () => bunEscapeHTML(input)); + bench(`Bun.escapeHTML (${name})`, () => bunEscapeHTML(input)); }, ); } diff --git a/bench/snippets/ffi-overhead.mjs b/bench/snippets/ffi-overhead.mjs index bfb92634c2..d0f11e907c 100644 --- a/bench/snippets/ffi-overhead.mjs +++ b/bench/snippets/ffi-overhead.mjs @@ -1,5 +1,5 @@ -import { viewSource, dlopen, CString, ptr, toBuffer, toArrayBuffer, FFIType, callback } from "bun:ffi"; -import { bench, group, run } from "./runner.mjs"; +import { dlopen } from "bun:ffi"; +import { bench, group, run } from "../runner.mjs"; const types = { returns_true: { diff --git a/bench/snippets/form-data.mjs b/bench/snippets/form-data.mjs index a12cf4b134..b78edbfbe7 100644 --- a/bench/snippets/form-data.mjs +++ b/bench/snippets/form-data.mjs @@ -1,5 +1,5 @@ // so it can run in environments without node module resolution -import { bench, run } from "../node_modules/mitata/src/cli.mjs"; +import { bench, run } from "../runner.mjs"; const blob = new Blob(["foo", "bar", "baz"]); bench("FormData.append", () => { diff --git a/bench/snippets/headers.mjs b/bench/snippets/headers.mjs index 7057db02a7..8c0c0ec450 100644 --- a/bench/snippets/headers.mjs +++ b/bench/snippets/headers.mjs @@ -1,4 +1,4 @@ -import { bench, run } from "../node_modules/mitata/src/cli.mjs"; +import { bench, run } from "../runner.mjs"; // pure JS implementation will optimze this out bench("new Headers", function () { diff --git a/bench/snippets/index-of.mjs b/bench/snippets/index-of.mjs index 04b9704e96..8f22ab3518 100644 --- a/bench/snippets/index-of.mjs +++ b/bench/snippets/index-of.mjs @@ -1,4 +1,4 @@ -import { bench, run } from "./runner.mjs"; +import { bench, run } from "../runner.mjs"; const input = "Hello, World! foo bar baz qux quux corge grault garply waldo fred plugh xyzzy thud z a b c d e f g h i j k l m n o p q r s t u v w x y z".split( diff --git a/bench/snippets/json-parse-stringify.mjs b/bench/snippets/json-parse-stringify.mjs index c58041e100..f516f5364c 100644 --- a/bench/snippets/json-parse-stringify.mjs +++ b/bench/snippets/json-parse-stringify.mjs @@ -1,4 +1,4 @@ -import { bench, run } from "./runner.mjs"; +import { bench, run } from "../runner.mjs"; var obj = { "restApiRoot": "/api", diff --git a/bench/json-stringify/bun.js b/bench/snippets/json-stringify.js similarity index 83% rename from bench/json-stringify/bun.js rename to bench/snippets/json-stringify.js index 22f29deb40..e50ab7be10 100644 --- a/bench/json-stringify/bun.js +++ b/bench/snippets/json-stringify.js @@ -1,4 +1,4 @@ -import { bench, run } from "mitata"; +import { bench, run } from "../runner.mjs"; bench("JSON.stringify({hello: 'world'})", () => JSON.stringify({ hello: "world" })); diff --git a/bench/snippets/module-exports-putter.cjs b/bench/snippets/module-exports-putter.cjs index 9bef17b90e..7afb1e3aa6 100644 --- a/bench/snippets/module-exports-putter.cjs +++ b/bench/snippets/module-exports-putter.cjs @@ -1,6 +1,6 @@ // This is a stress test of some internals in How Bun does the module.exports assignment. // If it crashes or throws then this fails -import("./runner.mjs").then(({ bench, run }) => { +import("../runner.mjs").then(({ bench, run }) => { bench("Object.defineProperty(module, 'exports', { get() { return 42; } })", () => { Object.defineProperty(module, "exports", { get() { @@ -36,7 +36,9 @@ import("./runner.mjs").then(({ bench, run }) => { a: 1, }; - console.log( + const log = !process?.env?.BENCHMARK_RUNNER ? console.log : () => {}; + + log( module?.exports, require.cache[module.id].exports, module?.exports === require.cache[module.id], @@ -49,10 +51,11 @@ import("./runner.mjs").then(({ bench, run }) => { return 42; }; - console.log(module.exports, module.exports()); + log(module.exports); + log(module.exports, module.exports()); queueMicrotask(() => { - console.log( + log( module?.exports, require.cache[module.id].exports, module?.exports === require.cache[module.id]?.exports, diff --git a/bench/snippets/native-overhead.mjs b/bench/snippets/native-overhead.mjs index 2c33c46fab..32d459247e 100644 --- a/bench/snippets/native-overhead.mjs +++ b/bench/snippets/native-overhead.mjs @@ -1,4 +1,4 @@ -import { bench, run } from "./runner.mjs"; +import { bench, run } from "../runner.mjs"; // These are no-op C++ functions that are exported to JS. const lazy = globalThis[Symbol.for("Bun.lazy")]; diff --git a/bench/snippets/new-incomingmessage.mjs b/bench/snippets/new-incomingmessage.mjs index ae480e0311..13cd172646 100644 --- a/bench/snippets/new-incomingmessage.mjs +++ b/bench/snippets/new-incomingmessage.mjs @@ -1,5 +1,5 @@ -import { bench, run } from "./runner.mjs"; import { IncomingMessage } from "node:http"; +import { bench, run } from "../runner.mjs"; const headers = { date: "Mon, 06 Nov 2023 05:12:49 GMT", diff --git a/bench/snippets/node-vm.mjs b/bench/snippets/node-vm.mjs index 6f9d607736..74bed6a4be 100644 --- a/bench/snippets/node-vm.mjs +++ b/bench/snippets/node-vm.mjs @@ -1,6 +1,6 @@ // @runtime node, bun -import { bench, run } from "./runner.mjs"; import * as vm from "node:vm"; +import { bench, run } from "../runner.mjs"; const context = { animal: "cat", diff --git a/bench/snippets/noop.js b/bench/snippets/noop.js index 9b9f1a1d12..6b647064c1 100644 --- a/bench/snippets/noop.js +++ b/bench/snippets/noop.js @@ -1,4 +1,4 @@ -import { bench, run } from "./runner.mjs"; +import { bench, run } from "../runner.mjs"; var noop = globalThis[Symbol.for("Bun.lazy")]("noop"); var { function: noopFn, callback } = noop; diff --git a/bench/snippets/object-entries.mjs b/bench/snippets/object-entries.mjs index c3e4bf9e5b..8c4b331b51 100644 --- a/bench/snippets/object-entries.mjs +++ b/bench/snippets/object-entries.mjs @@ -1,5 +1,5 @@ // so it can run in environments without node module resolution -import { bench, run } from "../../node_modules/mitata/src/cli.mjs"; +import { bench, run } from "../runner.mjs"; const obj = { a: 1, diff --git a/bench/snippets/object-values.mjs b/bench/snippets/object-values.mjs index 8dc62780bf..86e4bef2c1 100644 --- a/bench/snippets/object-values.mjs +++ b/bench/snippets/object-values.mjs @@ -24,7 +24,7 @@ const obj = { w: 23, }; -import { bench, group, run } from "./runner.mjs"; +import { bench, run } from "../runner.mjs"; var val = 0; bench("Object.values(literal)", () => { diff --git a/bench/snippets/path-resolve.mjs b/bench/snippets/path-resolve.mjs new file mode 100644 index 0000000000..8263a7b048 --- /dev/null +++ b/bench/snippets/path-resolve.mjs @@ -0,0 +1,22 @@ +import { posix } from "path"; +import { bench, run } from "../runner.mjs"; + +const pathConfigurations = [ + "", + ".", + "./", + ["", ""].join("|"), + ["./abc.js"].join("|"), + ["foo/bar", "/tmp/file/", "..", "a/../subfile"].join("|"), + ["a/b/c/", "../../.."].join("|"), +]; + +pathConfigurations.forEach(paths => { + const args = paths.split("|"); + + bench(`resolve(${args.map(a => JSON.stringify(a)).join(", ")})`, () => { + globalThis.abc = posix.resolve(...args); + }); +}); + +await run(); diff --git a/bench/snippets/pbkdf2.mjs b/bench/snippets/pbkdf2.mjs index 6c21d3d6ea..3b286543ec 100644 --- a/bench/snippets/pbkdf2.mjs +++ b/bench/snippets/pbkdf2.mjs @@ -1,6 +1,6 @@ -import { pbkdf2, pbkdf2Sync } from "node:crypto"; +import { pbkdf2 } from "node:crypto"; -import { bench, run } from "./runner.mjs"; +import { bench, run } from "../runner.mjs"; const password = "password"; const salt = "salt"; diff --git a/bench/snippets/peek-promise.mjs b/bench/snippets/peek-promise.mjs index cabb15a313..9468efca25 100644 --- a/bench/snippets/peek-promise.mjs +++ b/bench/snippets/peek-promise.mjs @@ -1,5 +1,5 @@ -import { bench, run } from "mitata"; import { peek } from "bun"; +import { bench, run } from "../runner.mjs"; let pending = Bun.sleep(1000); let resolved = Promise.resolve(1); diff --git a/bench/snippets/performance-now-overhead.js b/bench/snippets/performance-now-overhead.js index 442d305639..b7626e3312 100644 --- a/bench/snippets/performance-now-overhead.js +++ b/bench/snippets/performance-now-overhead.js @@ -1,5 +1,4 @@ -import { group } from "./runner.mjs"; -import { bench, run } from "./runner.mjs"; +import { bench, run } from "../runner.mjs"; bench("performance.now x 1000", () => { for (let i = 0; i < 1000; i++) { performance.now(); diff --git a/bench/snippets/private.mjs b/bench/snippets/private.mjs index 452dab06b7..2cf72a3ced 100644 --- a/bench/snippets/private.mjs +++ b/bench/snippets/private.mjs @@ -1,4 +1,4 @@ -import { bench, run } from "../node_modules/mitata/src/cli.mjs"; +import { bench, run } from "../runner.mjs"; // This is a benchmark of the performance impact of using private properties. bench("Polyfillprivate", () => { diff --git a/bench/snippets/process-cwd.mjs b/bench/snippets/process-cwd.mjs new file mode 100644 index 0000000000..9d7576e253 --- /dev/null +++ b/bench/snippets/process-cwd.mjs @@ -0,0 +1,7 @@ +import { bench, run } from "../runner.mjs"; + +bench("process.cwd()", () => { + process.cwd(); +}); + +await run(); diff --git a/bench/snippets/process-info.mjs b/bench/snippets/process-info.mjs index 0366472e5a..bb053a205f 100644 --- a/bench/snippets/process-info.mjs +++ b/bench/snippets/process-info.mjs @@ -1,5 +1,5 @@ -import { bench, run } from "./runner.mjs"; import { performance } from "perf_hooks"; +import { bench, run } from "../runner.mjs"; bench("process.memoryUsage()", () => { process.memoryUsage(); diff --git a/bench/snippets/process.mjs b/bench/snippets/process.mjs index 40bb48e0e1..666fc8dcaf 100644 --- a/bench/snippets/process.mjs +++ b/bench/snippets/process.mjs @@ -1,4 +1,4 @@ -import { bench, run } from "./runner.mjs"; +import { bench, run } from "../runner.mjs"; bench("process.stderr.write('hey')", () => { process.stderr.write("hey"); diff --git a/bench/snippets/react-dom-render.bun.js b/bench/snippets/react-dom-render.bun.js index b13508d75d..d808b9547f 100644 --- a/bench/snippets/react-dom-render.bun.js +++ b/bench/snippets/react-dom-render.bun.js @@ -1,6 +1,6 @@ -import { bench, group, run } from "./runner.mjs"; -import { renderToReadableStream } from "react-dom/server.browser"; import { renderToReadableStream as renderToReadableStreamBun } from "react-dom/server"; +import { renderToReadableStream } from "react-dom/server.browser"; +import { bench, group, run } from "../runner.mjs"; const App = () => (
diff --git a/bench/snippets/read-file-chunk.mjs b/bench/snippets/read-file-chunk.mjs index e6a33a4992..7a0526e1f1 100644 --- a/bench/snippets/read-file-chunk.mjs +++ b/bench/snippets/read-file-chunk.mjs @@ -1,7 +1,7 @@ -import { tmpdir } from "node:os"; -import { bench, group, run } from "./runner.mjs"; import { createReadStream, writeFileSync } from "node:fs"; +import { tmpdir } from "node:os"; import { sep } from "node:path"; +import { bench, run } from "../runner.mjs"; if (!Promise.withResolvers) { Promise.withResolvers = function () { diff --git a/bench/snippets/read-file.mjs b/bench/snippets/read-file.mjs index b808dee792..8a9e1f1825 100644 --- a/bench/snippets/read-file.mjs +++ b/bench/snippets/read-file.mjs @@ -1,5 +1,5 @@ import { readFileSync, writeFileSync } from "node:fs"; -import { bench, run } from "./runner.mjs"; +import { bench, run } from "../runner.mjs"; var short = (function () { const text = "Hello World!"; diff --git a/bench/snippets/readdir.mjs b/bench/snippets/readdir.mjs index 37aefe6ac8..7a43cc6fdc 100644 --- a/bench/snippets/readdir.mjs +++ b/bench/snippets/readdir.mjs @@ -1,10 +1,10 @@ -import { readdirSync, readdir as readdirCb } from "fs"; +import { createHash } from "crypto"; +import { readdirSync } from "fs"; import { readdir } from "fs/promises"; -import { bench, run } from "./runner.mjs"; +import { relative, resolve } from "path"; import { argv } from "process"; import { fileURLToPath } from "url"; -import { relative, resolve } from "path"; -import { createHash } from "crypto"; +import { bench, run } from "../runner.mjs"; let dir = resolve(argv.length > 2 ? argv[2] : fileURLToPath(new URL("../../node_modules", import.meta.url))); if (dir.includes(process.cwd())) { @@ -43,8 +43,11 @@ bench(`await readdir("${dir}", {recursive: false})`, async () => { }); await run(); -console.log("\n", count, "files/dirs in", dir, "\n", "SHA256:", hash, "\n"); -if (count !== syncCount) { - throw new Error(`Mismatched file counts: ${count} async !== ${syncCount} sync`); +if (!process?.env?.BENCHMARK_RUNNER) { + console.log("\n", count, "files/dirs in", dir, "\n", "SHA256:", hash, "\n"); + + if (count !== syncCount) { + throw new Error(`Mismatched file counts: ${count} async !== ${syncCount} sync`); + } } diff --git a/bench/snippets/readfile-not-found.mjs b/bench/snippets/readfile-not-found.mjs index c28100ba4a..af90ba1f6b 100644 --- a/bench/snippets/readfile-not-found.mjs +++ b/bench/snippets/readfile-not-found.mjs @@ -1,6 +1,6 @@ -import { bench, run } from "./runner.mjs"; -import { readFileSync, existsSync } from "node:fs"; +import { readFileSync } from "node:fs"; import { readFile } from "node:fs/promises"; +import { bench, run } from "../runner.mjs"; bench(`readFileSync(/tmp/404-not-found)`, () => { try { diff --git a/bench/snippets/realpath.mjs b/bench/snippets/realpath.mjs index 4793ee3d67..30f2bf8da0 100644 --- a/bench/snippets/realpath.mjs +++ b/bench/snippets/realpath.mjs @@ -1,7 +1,7 @@ import { realpathSync } from "node:fs"; +import { bench, run } from "../runner.mjs"; const count = parseInt(process.env.ITERATIONS || "1", 10) || 1; const arg = process.argv[process.argv.length - 1]; -import { bench, run } from "./runner.mjs"; bench("realpathSync x " + count, () => { for (let i = 0; i < count; i++) realpathSync(arg, "utf-8"); diff --git a/bench/snippets/render.js b/bench/snippets/render.js index 9ef70bc273..58aaefb1c4 100644 --- a/bench/snippets/render.js +++ b/bench/snippets/render.js @@ -1,4 +1,4 @@ -import decoding from "./jsx-entity-decoding"; import ReactDOMServer from "react-dom/server.browser"; +import decoding from "./jsx-entity-decoding"; console.log(ReactDOMServer.renderToString(decoding)); diff --git a/bench/snippets/request-response-clone.mjs b/bench/snippets/request-response-clone.mjs index 05a9806560..9ba1f25d93 100644 --- a/bench/snippets/request-response-clone.mjs +++ b/bench/snippets/request-response-clone.mjs @@ -1,5 +1,5 @@ // This mostly exists to check for a memory leak in response.clone() -import { bench, run } from "./runner.mjs"; +import { bench, run } from "../runner.mjs"; const req = new Request("http://localhost:3000/"); const resp = await fetch("http://example.com"); diff --git a/bench/snippets/require-builtins.mjs b/bench/snippets/require-builtins.mjs index c458f3a356..34c008a892 100644 --- a/bench/snippets/require-builtins.mjs +++ b/bench/snippets/require-builtins.mjs @@ -1,7 +1,6 @@ -import { bench, run } from "./runner.mjs"; -import { builtinModules } from "node:module"; -import { writeFile } from "node:fs/promises"; import { spawnSync } from "child_process"; +import { writeFile } from "node:fs/promises"; +import { builtinModules } from "node:module"; for (let builtin of builtinModules) { const path = `/tmp/require.${builtin.replaceAll("/", "_")}.cjs`; diff --git a/bench/snippets/response-arrayBuffer.mjs b/bench/snippets/response-arrayBuffer.mjs index a3b1f0a730..255c46e7d8 100644 --- a/bench/snippets/response-arrayBuffer.mjs +++ b/bench/snippets/response-arrayBuffer.mjs @@ -1,6 +1,6 @@ // This snippet mostly exists to reproduce a memory leak // -import { bench, run } from "mitata"; +import { bench, run } from "../runner.mjs"; const obj = { "id": 1296269, diff --git a/bench/snippets/response-json.mjs b/bench/snippets/response-json.mjs index dd28203f0b..2cd20523b6 100644 --- a/bench/snippets/response-json.mjs +++ b/bench/snippets/response-json.mjs @@ -1,5 +1,5 @@ // This snippet mostly exists to reproduce a memory leak -import { bench, run } from "mitata"; +import { bench, run } from "../runner.mjs"; const obj = { "id": 1296269, diff --git a/bench/snippets/return-await.mjs b/bench/snippets/return-await.mjs index 079eb4bdd0..4ccdccf549 100644 --- a/bench/snippets/return-await.mjs +++ b/bench/snippets/return-await.mjs @@ -1,4 +1,4 @@ -import { bench, run } from "../node_modules/mitata/src/cli.mjs"; +import { bench, run } from "../runner.mjs"; bench("return await Promise.resolve(1)", async function () { return await Promise.resolve(1); diff --git a/bench/snippets/rewriter.mjs b/bench/snippets/rewriter.mjs index abdc7f0af5..4cb1143aac 100644 --- a/bench/snippets/rewriter.mjs +++ b/bench/snippets/rewriter.mjs @@ -1,4 +1,4 @@ -import { bench, run } from "./runner.mjs"; +import { bench, run } from "../runner.mjs"; const blob = new Blob(["

Hello

"]); bench("prepend", async () => { diff --git a/bench/snippets/rmdir.mjs b/bench/snippets/rmdir.mjs index 258d69097d..8cc7bb08fb 100644 --- a/bench/snippets/rmdir.mjs +++ b/bench/snippets/rmdir.mjs @@ -1,5 +1,5 @@ +import { existsSync, mkdirSync, promises } from "node:fs"; import { tmpdir } from "node:os"; -import { promises, existsSync, mkdirSync } from "node:fs"; const count = 1024 * 12; var queue = new Array(count); diff --git a/bench/snippets/runner-entrypoint.js b/bench/snippets/runner-entrypoint.js index 77011c1317..cbcf0f6726 100644 --- a/bench/snippets/runner-entrypoint.js +++ b/bench/snippets/runner-entrypoint.js @@ -1,9 +1,9 @@ // note: this isn't done yet // we look for `// @runtime` in the file to determine which runtimes to run the benchmark in import { spawnSync } from "bun"; -import { readdirSync, readFileSync } from "node:fs"; import { Database } from "bun:sqlite"; -import { extname, basename } from "path"; +import { readdirSync, readFileSync } from "node:fs"; +import { basename, extname } from "path"; const exts = [".js", ".ts", ".mjs", ".tsx"]; diff --git a/bench/snippets/runner.mjs b/bench/snippets/runner.mjs deleted file mode 100644 index 4f6e29fba5..0000000000 --- a/bench/snippets/runner.mjs +++ /dev/null @@ -1,22 +0,0 @@ -import * as Mitata from "../node_modules/mitata/src/cli.mjs"; -import process from "node:process"; - -const asJSON = !!process?.env?.BENCHMARK_RUNNER; - -export function run(opts = {}) { - opts ??= {}; - - if (asJSON) { - opts.json = true; - } - - return Mitata.run(opts); -} - -export function bench(name, fn) { - return Mitata.bench(name, fn); -} - -export function group(name, fn) { - return Mitata.group(name, fn); -} diff --git a/bench/snippets/semver.mjs b/bench/snippets/semver.mjs index bacacef214..7b3d599a58 100644 --- a/bench/snippets/semver.mjs +++ b/bench/snippets/semver.mjs @@ -1,5 +1,5 @@ import { satisfies } from "semver"; -import { bench, run } from "./runner.mjs"; +import { bench, run } from "../runner.mjs"; const tests = [ ["~1.2.3", "1.2.3", true], ["~1.2", "1.2.0", true], diff --git a/bench/snippets/serialize.mjs b/bench/snippets/serialize.mjs index 1a3646f792..80da320dfb 100644 --- a/bench/snippets/serialize.mjs +++ b/bench/snippets/serialize.mjs @@ -1,5 +1,5 @@ -import { serialize, deserialize } from "node:v8"; -import { bench, run } from "./runner.mjs"; +import { deserialize, serialize } from "node:v8"; +import { bench, run } from "../runner.mjs"; const obj = { "id": 1296269, "node_id": "MDEwOlJlcG9zaXRvcnkxMjk2MjY5", diff --git a/bench/snippets/set-timeout.mjs b/bench/snippets/set-timeout.mjs index 47228f77ce..a9f495a319 100644 --- a/bench/snippets/set-timeout.mjs +++ b/bench/snippets/set-timeout.mjs @@ -1,5 +1,3 @@ -import { bench, run } from "../node_modules/mitata/src/cli.mjs"; - let count = 20_000_000; const batchSize = 1_000_000; console.time("Run"); diff --git a/bench/snippets/sha512.js b/bench/snippets/sha512.js index ac162dc248..548bbc096b 100644 --- a/bench/snippets/sha512.js +++ b/bench/snippets/sha512.js @@ -1,5 +1,5 @@ -import { bench, run } from "./runner.mjs"; import { SHA512 } from "bun"; +import { bench, run } from "../runner.mjs"; bench('SHA512.hash("hello world")', () => { SHA512.hash("hello world"); diff --git a/bench/snippets/sha512.node.mjs b/bench/snippets/sha512.node.mjs index 3c3ac16976..26268ea0ab 100644 --- a/bench/snippets/sha512.node.mjs +++ b/bench/snippets/sha512.node.mjs @@ -1,5 +1,5 @@ -import { bench, run } from "./runner.mjs"; import { createHash } from "crypto"; +import { bench, run } from "../runner.mjs"; bench('createHash("sha256").update("hello world").digest()', () => { createHash("sha256").update("hello world").digest(); diff --git a/bench/snippets/shell-spawn.mjs b/bench/snippets/shell-spawn.mjs index aa4da66eeb..c3aaf557db 100644 --- a/bench/snippets/shell-spawn.mjs +++ b/bench/snippets/shell-spawn.mjs @@ -1,6 +1,6 @@ -import { $ as zx } from "zx"; import { $ as execa$ } from "execa"; -import { bench, run, group } from "./runner.mjs"; +import { $ as zx } from "zx"; +import { bench, group, run } from "../runner.mjs"; const execa = execa$({ stdio: "ignore", cwd: import.meta.dirname }); diff --git a/bench/snippets/spawn-hugemem.mjs b/bench/snippets/spawn-hugemem.mjs index 177382c743..792381ab0d 100644 --- a/bench/snippets/spawn-hugemem.mjs +++ b/bench/snippets/spawn-hugemem.mjs @@ -1,5 +1,5 @@ import { spawnSync } from "bun"; -import { bench, run } from "./runner.mjs"; +import { bench, run } from "../runner.mjs"; var memory = new Uint8Array(128 * 1024 * 1024); memory.fill(10); diff --git a/bench/snippets/spawn-hugemem.node.mjs b/bench/snippets/spawn-hugemem.node.mjs index d33a5d4bd4..489c1c33e9 100644 --- a/bench/snippets/spawn-hugemem.node.mjs +++ b/bench/snippets/spawn-hugemem.node.mjs @@ -1,5 +1,5 @@ import { spawnSync } from "child_process"; -import { bench, run } from "./runner.mjs"; +import { bench, run } from "../runner.mjs"; var memory = new Uint8Array(128 * 1024 * 1024); memory.fill(10); diff --git a/bench/snippets/spawn.deno.mjs b/bench/snippets/spawn.deno.mjs index 0e96d9e93e..198d3d43ce 100644 --- a/bench/snippets/spawn.deno.mjs +++ b/bench/snippets/spawn.deno.mjs @@ -1,4 +1,4 @@ -import { bench, run } from "../node_modules/mitata/src/cli.mjs"; +import { bench, run } from "../runner.mjs"; bench("spawnSync echo hi", () => { Deno.spawnSync("echo", { diff --git a/bench/snippets/spawn.mjs b/bench/snippets/spawn.mjs index 9c259b096f..8836f19aab 100644 --- a/bench/snippets/spawn.mjs +++ b/bench/snippets/spawn.mjs @@ -1,5 +1,5 @@ import { spawnSync } from "bun"; -import { bench, run } from "./runner.mjs"; +import { bench, run } from "../runner.mjs"; bench("spawnSync echo hi", () => { spawnSync({ cmd: ["echo", "hi"] }); diff --git a/bench/snippets/spawn.node.mjs b/bench/snippets/spawn.node.mjs index c72a3bf036..008949d990 100644 --- a/bench/snippets/spawn.node.mjs +++ b/bench/snippets/spawn.node.mjs @@ -1,6 +1,6 @@ // @runtime bun,node,deno import { spawnSync } from "node:child_process"; -import { bench, run } from "./runner.mjs"; +import { bench, run } from "../runner.mjs"; bench("spawnSync echo hi", () => { spawnSync("echo", ["hi"], { encoding: "buffer", shell: false }); diff --git a/bench/snippets/stat.mjs b/bench/snippets/stat.mjs index 17d6a68c83..68fd1f5135 100644 --- a/bench/snippets/stat.mjs +++ b/bench/snippets/stat.mjs @@ -1,6 +1,6 @@ -import { readdirSync, statSync } from "fs"; -import { bench, run } from "./runner.mjs"; +import { statSync } from "fs"; import { argv } from "process"; +import { bench, run } from "../runner.mjs"; const dir = argv.length > 2 ? argv[2] : "/tmp"; diff --git a/bench/snippets/stderr.mjs b/bench/snippets/stderr.mjs index f4669905b0..e06c388588 100644 --- a/bench/snippets/stderr.mjs +++ b/bench/snippets/stderr.mjs @@ -1,4 +1,4 @@ -import { run, bench } from "./runner.mjs"; +import { bench, run } from "../runner.mjs"; var writer = globalThis.Bun ? Bun.stderr.writer() : undefined; if (writer) diff --git a/bench/snippets/string-decoder.mjs b/bench/snippets/string-decoder.mjs index b00b7b67d4..1969937441 100644 --- a/bench/snippets/string-decoder.mjs +++ b/bench/snippets/string-decoder.mjs @@ -1,5 +1,5 @@ -import { bench, run } from "./runner.mjs"; import { StringDecoder } from "string_decoder"; +import { bench, run } from "../runner.mjs"; var short = Buffer.from("Hello World!"); var shortUTF16 = Buffer.from("Hello World 💕💕💕"); diff --git a/bench/snippets/string-width.mjs b/bench/snippets/string-width.mjs index 03b4833a3b..d75507657a 100644 --- a/bench/snippets/string-width.mjs +++ b/bench/snippets/string-width.mjs @@ -1,5 +1,5 @@ -import { bench, run } from "./runner.mjs"; import npmStringWidth from "string-width"; +import { bench, run } from "../runner.mjs"; const bunStringWidth = globalThis?.Bun?.stringWidth; diff --git a/bench/snippets/structuredClone.mjs b/bench/snippets/structuredClone.mjs index 3007b22f56..684acd3b19 100644 --- a/bench/snippets/structuredClone.mjs +++ b/bench/snippets/structuredClone.mjs @@ -31,7 +31,7 @@ var testArray = [ }, ]; -import { bench, run } from "./runner.mjs"; +import { bench, run } from "../runner.mjs"; bench("structuredClone(array)", () => structuredClone(testArray)); bench("structuredClone(123)", () => structuredClone(123)); diff --git a/bench/snippets/tcp-echo.bun.ts b/bench/snippets/tcp-echo.bun.ts index c0f227e754..193ce0bd2a 100644 --- a/bench/snippets/tcp-echo.bun.ts +++ b/bench/snippets/tcp-echo.bun.ts @@ -1,4 +1,4 @@ -import { listen, connect } from "bun"; +import { connect, listen } from "bun"; var counter = 0; const msg = "Hello World!"; diff --git a/bench/snippets/text-decoder-stream.mjs b/bench/snippets/text-decoder-stream.mjs new file mode 100644 index 0000000000..c30e45f1b5 --- /dev/null +++ b/bench/snippets/text-decoder-stream.mjs @@ -0,0 +1,55 @@ +import { bench, run } from "../runner.mjs"; + +const latin1 = `hello hello hello!!!! `.repeat(10240); + +function create(src) { + function split(str, chunkSize) { + let chunkedHTML = []; + let html = str; + const encoder = new TextEncoder(); + while (html.length > 0) { + chunkedHTML.push(encoder.encode(html.slice(0, chunkSize))); + html = html.slice(chunkSize); + } + return chunkedHTML; + } + + async function runBench(chunks) { + const decoder = new TextDecoderStream(); + const stream = new ReadableStream({ + pull(controller) { + for (let chunk of chunks) { + controller.enqueue(chunk); + } + controller.close(); + }, + }).pipeThrough(decoder); + for (let reader = stream.getReader(); ; ) { + const { done, value } = await reader.read(); + if (done) { + break; + } + } + } + + // if (new TextDecoder().decode(await runBench(oneKB)) !== src) { + // throw new Error("Benchmark failed"); + // } + const sizes = [16 * 1024, 64 * 1024, 256 * 1024]; + for (const chunkSize of sizes) { + const text = split(src, chunkSize); + bench( + `${Math.round(src.length / 1024)} KB of text in ${Math.round(chunkSize / 1024) > 0 ? Math.round(chunkSize / 1024) : (chunkSize / 1024).toFixed(2)} KB chunks`, + async () => { + await runBench(text); + }, + ); + } +} +create(latin1); +create( + // bun's old readme was extremely long + await fetch("https://web.archive.org/web/20230119110956/https://github.com/oven-sh/bun").then(res => res.text()), +); + +await run(); diff --git a/bench/snippets/text-decoder.mjs b/bench/snippets/text-decoder.mjs index 340815e9df..5bf0e90cbf 100644 --- a/bench/snippets/text-decoder.mjs +++ b/bench/snippets/text-decoder.mjs @@ -1,4 +1,4 @@ -import { bench, run } from "../node_modules/mitata/src/cli.mjs"; +import { bench, run } from "../runner.mjs"; var short = new TextEncoder().encode("Hello World!"); var shortUTF16 = new TextEncoder().encode("Hello World 💕💕💕"); diff --git a/bench/snippets/text-encoder-stream.mjs b/bench/snippets/text-encoder-stream.mjs new file mode 100644 index 0000000000..ee83f90d5c --- /dev/null +++ b/bench/snippets/text-encoder-stream.mjs @@ -0,0 +1,49 @@ +import { bench, run } from "../runner.mjs"; + +const latin1 = `hello hello hello!!!! `.repeat(10240); + +function create(src) { + function split(str, chunkSize) { + let chunkedHTML = []; + let html = str; + while (html.length > 0) { + chunkedHTML.push(html.slice(0, chunkSize)); + html = html.slice(chunkSize); + } + return chunkedHTML; + } + + async function runBench(chunks) { + const encoderStream = new TextEncoderStream(); + const stream = new ReadableStream({ + pull(controller) { + for (let chunk of chunks) { + controller.enqueue(chunk); + } + controller.close(); + }, + }).pipeThrough(encoderStream); + return await new Response(stream).bytes(); + } + + // if (new TextDecoder().decode(await runBench(oneKB)) !== src) { + // throw new Error("Benchmark failed"); + // } + const sizes = [1024, 16 * 1024, 64 * 1024, 256 * 1024]; + for (const chunkSize of sizes) { + const text = split(src, chunkSize); + bench( + `${Math.round(src.length / 1024)} KB of text in ${Math.round(chunkSize / 1024) > 0 ? Math.round(chunkSize / 1024) : (chunkSize / 1024).toFixed(2)} KB chunks`, + async () => { + await runBench(text); + }, + ); + } +} +create(latin1); +create( + // bun's old readme was extremely long + await fetch("https://web.archive.org/web/20230119110956/https://github.com/oven-sh/bun").then(res => res.text()), +); + +await run(); diff --git a/bench/snippets/text-encoder.mjs b/bench/snippets/text-encoder.mjs index d0f5c40a4d..674345177d 100644 --- a/bench/snippets/text-encoder.mjs +++ b/bench/snippets/text-encoder.mjs @@ -1,4 +1,4 @@ -import { bench, run } from "./runner.mjs"; +import { bench, run } from "../runner.mjs"; var short = "Hello World!"; var shortUTF16 = "Hello World 💕💕💕"; diff --git a/bench/snippets/transpiler-2.mjs b/bench/snippets/transpiler-2.mjs index 702fda9d18..fdf3deb713 100644 --- a/bench/snippets/transpiler-2.mjs +++ b/bench/snippets/transpiler-2.mjs @@ -1,5 +1,5 @@ -import { bench, run } from "mitata"; import { join } from "path"; +import { bench, run } from "../runner.mjs"; const code = require("fs").readFileSync( process.argv[2] || join(import.meta.dir, "../node_modules/@babel/standalone/babel.min.js"), diff --git a/bench/snippets/transpiler.mjs b/bench/snippets/transpiler.mjs index 3a5c57d0af..f453270435 100644 --- a/bench/snippets/transpiler.mjs +++ b/bench/snippets/transpiler.mjs @@ -1,8 +1,8 @@ import { readFileSync } from "fs"; +import { createRequire } from "module"; import { dirname } from "path"; import { fileURLToPath } from "url"; -import { bench, run, group } from "./runner.mjs"; -import { createRequire } from "module"; +import { bench, group, run } from "../runner.mjs"; const require = createRequire(import.meta.url); const esbuild_ = require("esbuild/lib/main"); const swc_ = require("@swc/core"); diff --git a/bench/snippets/url.mjs b/bench/snippets/url.mjs index 1cb6e7a8f1..d794b7f6d6 100644 --- a/bench/snippets/url.mjs +++ b/bench/snippets/url.mjs @@ -1,4 +1,4 @@ -import { bench, run } from "./runner.mjs"; +import { bench, run } from "../runner.mjs"; bench(`new URL('https://example.com/')`, () => { const url = new URL("https://example.com/"); diff --git a/bench/snippets/urlsearchparams.mjs b/bench/snippets/urlsearchparams.mjs index af653c917f..83a874dc5f 100644 --- a/bench/snippets/urlsearchparams.mjs +++ b/bench/snippets/urlsearchparams.mjs @@ -1,4 +1,4 @@ -import { bench, run } from "./runner.mjs"; +import { bench, run } from "../runner.mjs"; // bench("new URLSearchParams({})", () => { // return new URLSearchParams({}); diff --git a/bench/snippets/util-deprecate.mjs b/bench/snippets/util-deprecate.mjs index 364601d79a..1acd31f5a1 100644 --- a/bench/snippets/util-deprecate.mjs +++ b/bench/snippets/util-deprecate.mjs @@ -1,4 +1,4 @@ -import { bench, run } from "./runner.mjs"; +import { bench, run } from "../runner.mjs"; function deprecateUsingClosure(fn, msg, code) { if (process.noDeprecation === true) { return fn; diff --git a/bench/snippets/webcrypto.mjs b/bench/snippets/webcrypto.mjs index 2d1256cf8f..2ae35652d7 100644 --- a/bench/snippets/webcrypto.mjs +++ b/bench/snippets/webcrypto.mjs @@ -1,5 +1,4 @@ -import { group } from "mitata"; -import { bench, run } from "./runner.mjs"; +import { bench, group, run } from "../runner.mjs"; const sizes = [ ["small (63 bytes)", 63], @@ -10,7 +9,7 @@ for (let [name, size] of sizes) { group(name, () => { var buf = new Uint8Array(size); for (let algorithm of ["SHA-1", "SHA-256", "SHA-384", "SHA-512"]) { - bench(algorithm, async () => { + bench(`${algorithm} (${name})`, async () => { await crypto.subtle.digest(algorithm, buf); }); } diff --git a/bench/snippets/write-file-huge.mjs b/bench/snippets/write-file-huge.mjs index fe874c9399..f79a8ca991 100644 --- a/bench/snippets/write-file-huge.mjs +++ b/bench/snippets/write-file-huge.mjs @@ -1,6 +1,6 @@ import { Buffer } from "node:buffer"; import { writeFile } from "node:fs/promises"; -import { bench, run } from "./runner.mjs"; +import { bench, run } from "../runner.mjs"; var hugeFile = Buffer.alloc(1024 * 1024 * 64); var medFile = Buffer.alloc(1024 * 1024 * 16); diff --git a/bench/snippets/write-file.mjs b/bench/snippets/write-file.mjs index 4417c817cd..e16732cb7e 100644 --- a/bench/snippets/write-file.mjs +++ b/bench/snippets/write-file.mjs @@ -1,5 +1,5 @@ -import { readFileSync, writeFileSync } from "node:fs"; -import { bench, run } from "./runner.mjs"; +import { writeFileSync } from "node:fs"; +import { bench, run } from "../runner.mjs"; var short = "Hello World!"; var shortUTF16 = "Hello World 💕💕💕"; diff --git a/bench/snippets/write.bun.js b/bench/snippets/write.bun.js index 67fbbe3b25..a3ea86b871 100644 --- a/bench/snippets/write.bun.js +++ b/bench/snippets/write.bun.js @@ -1,6 +1,6 @@ -import { bench, run } from "./runner.mjs"; import { write } from "bun"; import { openSync } from "fs"; +import { bench, run } from "../runner.mjs"; bench('write(/tmp/foo.txt, "short string")', async () => { await write("/tmp/foo.txt", "short string"); diff --git a/bench/snippets/write.node.mjs b/bench/snippets/write.node.mjs index f59c98aefa..92b97f77c8 100644 --- a/bench/snippets/write.node.mjs +++ b/bench/snippets/write.node.mjs @@ -1,9 +1,8 @@ // @runtime node, bun, deno -import { bench, run } from "./runner.mjs"; import { Buffer } from "node:buffer"; -import { openSync } from "node:fs"; +import { openSync, writeSync as write } from "node:fs"; import { writeFile } from "node:fs/promises"; -import { writeSync as write } from "node:fs"; +import { bench, run } from "../runner.mjs"; bench("writeFile(/tmp/foo.txt, short string)", async () => { await writeFile("/tmp/foo.txt", "short string", "utf8"); diff --git a/bench/sqlite/better-sqlite3.mjs b/bench/sqlite/better-sqlite3.mjs index 9bf25105b9..cf32b3e912 100644 --- a/bench/sqlite/better-sqlite3.mjs +++ b/bench/sqlite/better-sqlite3.mjs @@ -1,5 +1,5 @@ -import { run, bench } from "mitata"; import { createRequire } from "module"; +import { bench, run } from "../runner.mjs"; const require = createRequire(import.meta.url); const db = require("better-sqlite3")("./src/northwind.sqlite"); diff --git a/bench/sqlite/bun.js b/bench/sqlite/bun.js index c178981f17..9d2167c30b 100644 --- a/bench/sqlite/bun.js +++ b/bench/sqlite/bun.js @@ -1,5 +1,5 @@ -import { run, bench } from "mitata"; import { Database } from "bun:sqlite"; +import { bench, run } from "../runner.mjs"; import { join } from "path"; const db = Database.open(join(import.meta.dir, "src", "northwind.sqlite")); diff --git a/bench/sqlite/deno.js b/bench/sqlite/deno.js index 8b4b215ee8..74ab5b9ebe 100644 --- a/bench/sqlite/deno.js +++ b/bench/sqlite/deno.js @@ -1,5 +1,5 @@ import { Database } from "https://deno.land/x/sqlite3@0.11.1/mod.ts"; -import { run, bench } from "../node_modules/mitata/src/cli.mjs"; +import { bench, run } from "../runner.mjs"; const db = new Database("./src/northwind.sqlite"); diff --git a/bench/sqlite/node.mjs b/bench/sqlite/node.mjs index 7602a87612..e620913aaa 100644 --- a/bench/sqlite/node.mjs +++ b/bench/sqlite/node.mjs @@ -1,7 +1,7 @@ // Run `node --experimental-sqlite bench/sqlite/node.mjs` to run the script. // You will need `--experimental-sqlite` flag to run this script and node v22.5.0 or higher. -import { run, bench } from "mitata"; import { DatabaseSync as Database } from "node:sqlite"; +import { bench, run } from "../runner.mjs"; const db = new Database("./src/northwind.sqlite"); diff --git a/bench/sqlite/package.json b/bench/sqlite/package.json index 593a0c83fc..42330f727d 100644 --- a/bench/sqlite/package.json +++ b/bench/sqlite/package.json @@ -5,10 +5,10 @@ }, "scripts": { "build": "exit 0", - "bench:bun": "$BUN bun.js", - "bench:node": "$NODE node.mjs", + "bench:bun": "bun bun.js", + "bench:node": "node node.mjs", "deps": "npm install && bash src/download.sh", - "bench:deno": "$DENO run -A --unstable-ffi deno.js", + "bench:deno": "deno run -A --unstable-ffi deno.js", "bench": "bun run bench:bun && bun run bench:node && bun run bench:deno" } } diff --git a/bench/tsconfig.json b/bench/tsconfig.json new file mode 100644 index 0000000000..2432a3c9d6 --- /dev/null +++ b/bench/tsconfig.json @@ -0,0 +1,7 @@ +{ + "extends": "../tsconfig.json", + "compilerOptions": { + // For the organize imports plugin + "jsx": "react" + } +} diff --git a/build.zig b/build.zig index f545187ddc..fed6086672 100644 --- a/build.zig +++ b/build.zig @@ -44,11 +44,22 @@ const BunBuildOptions = struct { version: Version, canary_revision: ?u32, sha: []const u8, + /// enable debug logs in release builds enable_logs: bool = false, tracy_callstack_depth: u16, reported_nodejs_version: Version, + /// To make iterating on some '@embedFile's faster, we load them at runtime + /// instead of at compile time. This is disabled in release or if this flag + /// is set (to allow CI to build a portable executable). Affected files: + /// + /// - src/bake/runtime.ts (bundled) + /// - src/bun.js/api/FFI.h + /// + /// A similar technique is used in C++ code for JavaScript builtins + codegen_embed: bool = false, - generated_code_dir: []const u8, + /// `./build/codegen` or equivalent + codegen_path: []const u8, no_llvm: bool, cached_options_module: ?*Module = null, @@ -59,6 +70,10 @@ const BunBuildOptions = struct { !Target.x86.featureSetHas(this.target.result.cpu.features, .avx2); } + pub fn shouldEmbedCode(opts: *const BunBuildOptions) bool { + return opts.optimize != .Debug or opts.codegen_embed; + } + pub fn buildOptionsModule(this: *BunBuildOptions, b: *Build) *Module { if (this.cached_options_module) |mod| { return mod; @@ -66,6 +81,12 @@ const BunBuildOptions = struct { var opts = b.addOptions(); opts.addOption([]const u8, "base_path", b.pathFromRoot(".")); + opts.addOption([]const u8, "codegen_path", std.fs.path.resolve(b.graph.arena, &.{ + b.build_root.path.?, + this.codegen_path, + }) catch @panic("OOM")); + + opts.addOption(bool, "codegen_embed", this.shouldEmbedCode()); opts.addOption(u32, "canary_revision", this.canary_revision orelse 0); opts.addOption(bool, "is_canary", this.canary_revision != null); opts.addOption(Version, "version", this.version); @@ -89,10 +110,8 @@ const BunBuildOptions = struct { pub fn getOSVersionMin(os: OperatingSystem) ?Target.Query.OsVersion { return switch (os) { - // bun needs macOS 12 to work properly due to icucore, but we have been - // compiling everything with 11 as the minimum. .mac => .{ - .semver = .{ .major = 11, .minor = 0, .patch = 0 }, + .semver = .{ .major = 13, .minor = 0, .patch = 0 }, }, // Windows 10 1809 is the minimum supported version @@ -134,12 +153,19 @@ pub fn getCpuModel(os: OperatingSystem, arch: Arch) ?Target.Query.CpuModel { pub fn build(b: *Build) !void { std.log.info("zig compiler v{s}", .{builtin.zig_version_string}); - b.zig_lib_dir = b.zig_lib_dir orelse b.path("src/deps/zig/lib"); + b.zig_lib_dir = b.zig_lib_dir orelse b.path("vendor/zig/lib"); + + // TODO: Upgrade path for 0.14.0 + // b.graph.zig_lib_directory = brk: { + // const sub_path = "vendor/zig/lib"; + // const dir = try b.build_root.handle.openDir(sub_path, .{}); + // break :brk .{ .handle = dir, .path = try b.build_root.join(b.graph.arena, &.{sub_path}) }; + // }; var target_query = b.standardTargetOptionsQueryOnly(.{}); const optimize = b.standardOptimizeOption(.{}); - const os, const arch = brk: { + const os, const arch, const abi = brk: { // resolve the target query to pick up what operating system and cpu // architecture that is desired. this information is used to slightly // refine the query. @@ -153,7 +179,8 @@ pub fn build(b: *Build) !void { .windows => .windows, else => |t| std.debug.panic("Unsupported OS tag {}", .{t}), }; - break :brk .{ os, arch }; + const abi = temp_resolved.result.abi; + break :brk .{ os, arch, abi }; }; // target must be refined to support older but very popular devices on @@ -165,14 +192,16 @@ pub fn build(b: *Build) !void { } target_query.os_version_min = getOSVersionMin(os); - target_query.glibc_version = getOSGlibCVersion(os); + target_query.glibc_version = if (abi.isGnu()) getOSGlibCVersion(os) else null; const target = b.resolveTargetQuery(target_query); - const generated_code_dir = b.pathFromRoot( - b.option([]const u8, "generated-code", "Set the generated code directory") orelse - "build/codegen", + const codegen_path = b.pathFromRoot( + b.option([]const u8, "codegen_path", "Set the generated code directory") orelse + "build/debug/codegen", ); + const codegen_embed = b.option(bool, "codegen_embed", "If codegen files should be embedded in the binary") orelse false; + const bun_version = b.option([]const u8, "version", "Value of `Bun.version`") orelse "0.0.0"; b.reference_trace = ref_trace: { @@ -191,7 +220,8 @@ pub fn build(b: *Build) !void { .os = os, .arch = arch, - .generated_code_dir = generated_code_dir, + .codegen_path = codegen_path, + .codegen_embed = codegen_embed, .no_llvm = no_llvm, .version = try Version.parse(bun_version), @@ -206,9 +236,10 @@ pub fn build(b: *Build) !void { ), .sha = sha: { - const sha = b.option([]const u8, "sha", "Force the git sha") orelse - b.graph.env_map.get("GITHUB_SHA") orelse - b.graph.env_map.get("GIT_SHA") orelse fetch_sha: { + const sha_buildoption = b.option([]const u8, "sha", "Force the git sha"); + const sha_github = b.graph.env_map.get("GITHUB_SHA"); + const sha_env = b.graph.env_map.get("GIT_SHA"); + const sha = sha_buildoption orelse sha_github orelse sha_env orelse fetch_sha: { const result = std.process.Child.run(.{ .allocator = b.allocator, .argv = &.{ @@ -267,7 +298,7 @@ pub fn build(b: *Build) !void { bun_check_obj.generated_bin = null; step.dependOn(&bun_check_obj.step); - // The default install step will run zig build check This is so ZLS + // The default install step will run zig build check. This is so ZLS // identifies the codebase, as well as performs checking if build on // save is enabled. @@ -284,6 +315,8 @@ pub fn build(b: *Build) !void { .{ .os = .mac, .arch = .aarch64 }, .{ .os = .linux, .arch = .x86_64 }, .{ .os = .linux, .arch = .aarch64 }, + .{ .os = .linux, .arch = .x86_64, .musl = true }, + .{ .os = .linux, .arch = .aarch64, .musl = true }, }); } @@ -296,20 +329,20 @@ pub fn build(b: *Build) !void { } } -pub inline fn addMultiCheck( +pub fn addMultiCheck( b: *Build, parent_step: *Step, root_build_options: BunBuildOptions, - to_check: []const struct { os: OperatingSystem, arch: Arch }, + to_check: []const struct { os: OperatingSystem, arch: Arch, musl: bool = false }, ) void { - inline for (to_check) |check| { - inline for (.{ .Debug, .ReleaseFast }) |mode| { + for (to_check) |check| { + for ([_]std.builtin.Mode{ .Debug, .ReleaseFast }) |mode| { const check_target = b.resolveTargetQuery(.{ .os_tag = OperatingSystem.stdOSTag(check.os), .cpu_arch = check.arch, .cpu_model = getCpuModel(check.os, check.arch) orelse .determined_by_cpu_arch, .os_version_min = getOSVersionMin(check.os), - .glibc_version = getOSGlibCVersion(check.os), + .glibc_version = if (check.musl) null else getOSGlibCVersion(check.os), }); var options: BunBuildOptions = .{ @@ -323,7 +356,7 @@ pub inline fn addMultiCheck( .tracy_callstack_depth = root_build_options.tracy_callstack_depth, .version = root_build_options.version, .reported_nodejs_version = root_build_options.reported_nodejs_version, - .generated_code_dir = root_build_options.generated_code_dir, + .codegen_path = root_build_options.codegen_path, .no_llvm = root_build_options.no_llvm, }; @@ -340,6 +373,7 @@ pub fn addBunObject(b: *Build, opts: *BunBuildOptions) *Compile { .root_source_file = switch (opts.os) { .wasm => b.path("root_wasm.zig"), else => b.path("root.zig"), + // else => b.path("root_css.zig"), }, .target = opts.target, .optimize = opts.optimize, @@ -441,23 +475,54 @@ fn addInternalPackages(b: *Build, obj: *Compile, opts: *BunBuildOptions) void { .root_source_file = b.path(async_path), }); - const zig_generated_classes_path = b.pathJoin(&.{ opts.generated_code_dir, "ZigGeneratedClasses.zig" }); - validateGeneratedPath(zig_generated_classes_path); - obj.root_module.addAnonymousImport("ZigGeneratedClasses", .{ - .root_source_file = .{ .cwd_relative = zig_generated_classes_path }, - }); - - const resolved_source_tag_path = b.pathJoin(&.{ opts.generated_code_dir, "ResolvedSourceTag.zig" }); - validateGeneratedPath(resolved_source_tag_path); - obj.root_module.addAnonymousImport("ResolvedSourceTag", .{ - .root_source_file = .{ .cwd_relative = resolved_source_tag_path }, - }); - - const error_code_path = b.pathJoin(&.{ opts.generated_code_dir, "ErrorCode.zig" }); - validateGeneratedPath(error_code_path); - obj.root_module.addAnonymousImport("ErrorCode", .{ - .root_source_file = .{ .cwd_relative = error_code_path }, - }); + // Generated code exposed as individual modules. + inline for (.{ + .{ .file = "ZigGeneratedClasses.zig", .import = "ZigGeneratedClasses" }, + .{ .file = "ResolvedSourceTag.zig", .import = "ResolvedSourceTag" }, + .{ .file = "ErrorCode.zig", .import = "ErrorCode" }, + .{ .file = "runtime.out.js" }, + .{ .file = "bake.client.js", .import = "bake-codegen/bake.client.js", .enable = opts.shouldEmbedCode() }, + .{ .file = "bake.error.js", .import = "bake-codegen/bake.error.js", .enable = opts.shouldEmbedCode() }, + .{ .file = "bake.server.js", .import = "bake-codegen/bake.server.js", .enable = opts.shouldEmbedCode() }, + .{ .file = "bun-error/index.js", .enable = opts.shouldEmbedCode() }, + .{ .file = "bun-error/bun-error.css", .enable = opts.shouldEmbedCode() }, + .{ .file = "fallback-decoder.js", .enable = opts.shouldEmbedCode() }, + .{ .file = "node-fallbacks/assert.js" }, + .{ .file = "node-fallbacks/buffer.js" }, + .{ .file = "node-fallbacks/console.js" }, + .{ .file = "node-fallbacks/constants.js" }, + .{ .file = "node-fallbacks/crypto.js" }, + .{ .file = "node-fallbacks/domain.js" }, + .{ .file = "node-fallbacks/events.js" }, + .{ .file = "node-fallbacks/http.js" }, + .{ .file = "node-fallbacks/https.js" }, + .{ .file = "node-fallbacks/net.js" }, + .{ .file = "node-fallbacks/os.js" }, + .{ .file = "node-fallbacks/path.js" }, + .{ .file = "node-fallbacks/process.js" }, + .{ .file = "node-fallbacks/punycode.js" }, + .{ .file = "node-fallbacks/querystring.js" }, + .{ .file = "node-fallbacks/stream.js" }, + .{ .file = "node-fallbacks/string_decoder.js" }, + .{ .file = "node-fallbacks/sys.js" }, + .{ .file = "node-fallbacks/timers.js" }, + .{ .file = "node-fallbacks/tty.js" }, + .{ .file = "node-fallbacks/url.js" }, + .{ .file = "node-fallbacks/util.js" }, + .{ .file = "node-fallbacks/zlib.js" }, + }) |entry| { + if (!@hasField(@TypeOf(entry), "enable") or entry.enable) { + const path = b.pathJoin(&.{ opts.codegen_path, entry.file }); + validateGeneratedPath(path); + const import_path = if (@hasField(@TypeOf(entry), "import")) + entry.import + else + entry.file; + obj.root_module.addAnonymousImport(import_path, .{ + .root_source_file = .{ .cwd_relative = path }, + }); + } + } if (os == .windows) { obj.root_module.addAnonymousImport("bun_shim_impl.exe", .{ diff --git a/bun.lockb b/bun.lockb index 0e38e94cfc..4ccfae2715 100755 Binary files a/bun.lockb and b/bun.lockb differ diff --git a/ci/README.md b/ci/README.md new file mode 100644 index 0000000000..fbd89a34dd --- /dev/null +++ b/ci/README.md @@ -0,0 +1,84 @@ +# CI + +This directory contains scripts for building CI images for Bun. + +## Building + +### `macOS` + +On macOS, images are built using [`tart`](https://tart.run/), a tool that abstracts over the [`Virtualization.Framework`](https://developer.apple.com/documentation/virtualization) APIs, to run macOS VMs. + +To install the dependencies required, run: + +```sh +$ cd ci +$ bun run bootstrap +``` + +To build a vanilla macOS VM, run: + +```sh +$ bun run build:darwin-aarch64-vanilla +``` + +This builds a vanilla macOS VM with the current macOS release on your machine. It runs scripts to disable things like spotlight and siri, but it does not install any software. + +> Note: The image size is 50GB, so make sure you have enough disk space. + +If you want to build a specific macOS release, you can run: + +```sh +$ bun run build:darwin-aarch64-vanilla-15 +``` + +> Note: You cannot build a newer release of macOS on an older macOS machine. + +To build a macOS VM with software installed to build and test Bun, run: + +```sh +$ bun run build:darwin-aarch64 +``` + +## Running + +### `macOS` + +## How To + +### Support a new macOS release + +1. Visit [`ipsw.me`](https://ipsw.me/VirtualMac2,1) and find the IPSW of the macOS release you want to build. + +2. Add an entry to [`ci/darwin/variables.pkr.hcl`](/ci/darwin/variables.pkr.hcl) with the following format: + +```hcl +sonoma = { + distro = "sonoma" + release = "15" + ipsw = "https://updates.cdn-apple.com/..." +} +``` + +3. Add matching scripts to [`ci/package.json`](/ci/package.json) to build the image, then test it: + +```sh +$ bun run build:darwin-aarch64-vanilla-15 +``` + +> Note: If you need to troubleshoot the build, you can remove the `headless = true` property from [`ci/darwin/image-vanilla.pkr.hcl`](/ci/darwin/image-vanilla.pkr.hcl) and the VM's screen will be displayed. + +4. Test and build the non-vanilla image: + +```sh +$ bun run build:darwin-aarch64-15 +``` + +This will use the vanilla image and run the [`scripts/bootstrap.sh`](/scripts/bootstrap.sh) script to install the required software to build and test Bun. + +5. Publish the images: + +```sh +$ bun run login +$ bun run publish:darwin-aarch64-vanilla-15 +$ bun run publish:darwin-aarch64-15 +``` diff --git a/ci/alpine/build.Dockerfile b/ci/alpine/build.Dockerfile new file mode 100644 index 0000000000..f1f9aabb87 --- /dev/null +++ b/ci/alpine/build.Dockerfile @@ -0,0 +1,22 @@ +FROM alpine:edge AS build +ARG GIT_SHA +ENV GIT_SHA=${GIT_SHA} +WORKDIR /app/bun +ENV HOME=/root + +COPY . . +RUN touch $HOME/.bashrc +RUN ./scripts/bootstrap.sh +RUN . $HOME/.bashrc && bun run build:release + +RUN apk add file +RUN file ./build/release/bun +RUN ldd ./build/release/bun +RUN ./build/release/bun + +RUN cp -R /app/bun/build/* /output + +FROM scratch AS artifact +COPY --from=build /output / + +# docker build -f ./ci/alpine/build.Dockerfile --progress=plain --build-arg GIT_SHA="$(git rev-parse HEAD)" --target=artifact --output type=local,dest=./build-alpine . diff --git a/ci/alpine/test.Dockerfile b/ci/alpine/test.Dockerfile new file mode 100644 index 0000000000..e6836fe9d2 --- /dev/null +++ b/ci/alpine/test.Dockerfile @@ -0,0 +1,20 @@ +FROM alpine:edge +ENV HOME=/root +WORKDIR /root +COPY ./build-alpine/release/bun . +COPY ./test ./test +COPY ./scripts ./scripts +COPY ./package.json ./package.json +COPY ./packages ./packages + +RUN apk update +RUN apk add nodejs lsb-release-minimal git python3 npm make g++ +RUN apk add file + +RUN file /root/bun +RUN ldd /root/bun +RUN /root/bun + +RUN ./scripts/runner.node.mjs --exec-path /root/bun + +# docker build -f ./ci/alpine/test.Dockerfile --progress=plain . diff --git a/ci/darwin/image-vanilla.pkr.hcl b/ci/darwin/image-vanilla.pkr.hcl new file mode 100644 index 0000000000..40455713b4 --- /dev/null +++ b/ci/darwin/image-vanilla.pkr.hcl @@ -0,0 +1,46 @@ +# Generates a vanilla macOS VM with optimized settings for virtualized environments. +# See login.sh and optimize.sh for details. + +data "external-raw" "boot-script" { + program = ["sh", "-c", templatefile("scripts/boot-image.sh", var)] +} + +source "tart-cli" "bun-darwin-aarch64-vanilla" { + vm_name = "bun-darwin-aarch64-vanilla-${local.release.distro}-${local.release.release}" + from_ipsw = local.release.ipsw + cpu_count = local.cpu_count + memory_gb = local.memory_gb + disk_size_gb = local.disk_size_gb + ssh_username = local.username + ssh_password = local.password + ssh_timeout = "120s" + create_grace_time = "30s" + boot_command = split("\n", data.external-raw.boot-script.result) + headless = true # Disable if you need to debug why the boot_command is not working +} + +build { + sources = ["source.tart-cli.bun-darwin-aarch64-vanilla"] + + provisioner "file" { + content = file("scripts/setup-login.sh") + destination = "/tmp/setup-login.sh" + } + + provisioner "shell" { + inline = ["echo \"${local.password}\" | sudo -S sh -c 'sh /tmp/setup-login.sh \"${local.username}\" \"${local.password}\"'"] + } + + provisioner "file" { + content = file("scripts/optimize-machine.sh") + destination = "/tmp/optimize-machine.sh" + } + + provisioner "shell" { + inline = ["sudo sh /tmp/optimize-machine.sh"] + } + + provisioner "shell" { + inline = ["sudo rm -rf /tmp/*"] + } +} diff --git a/ci/darwin/image.pkr.hcl b/ci/darwin/image.pkr.hcl new file mode 100644 index 0000000000..b536efbecb --- /dev/null +++ b/ci/darwin/image.pkr.hcl @@ -0,0 +1,44 @@ +# Generates a macOS VM with software installed to build and test Bun. + +source "tart-cli" "bun-darwin-aarch64" { + vm_name = "bun-darwin-aarch64-${local.release.distro}-${local.release.release}" + vm_base_name = "bun-darwin-aarch64-vanilla-${local.release.distro}-${local.release.release}" + cpu_count = local.cpu_count + memory_gb = local.memory_gb + disk_size_gb = local.disk_size_gb + ssh_username = local.username + ssh_password = local.password + ssh_timeout = "120s" + headless = true +} + +build { + sources = ["source.tart-cli.bun-darwin-aarch64"] + + provisioner "file" { + content = file("../../scripts/bootstrap.sh") + destination = "/tmp/bootstrap.sh" + } + + provisioner "shell" { + inline = ["CI=true sh /tmp/bootstrap.sh"] + } + + provisioner "file" { + source = "darwin/plists/" + destination = "/tmp/" + } + + provisioner "shell" { + inline = [ + "sudo ls /tmp/", + "sudo mv /tmp/*.plist /Library/LaunchDaemons/", + "sudo chown root:wheel /Library/LaunchDaemons/*.plist", + "sudo chmod 644 /Library/LaunchDaemons/*.plist", + ] + } + + provisioner "shell" { + inline = ["sudo rm -rf /tmp/*"] + } +} diff --git a/ci/darwin/plists/buildkite-agent.plist b/ci/darwin/plists/buildkite-agent.plist new file mode 100644 index 0000000000..23c058913f --- /dev/null +++ b/ci/darwin/plists/buildkite-agent.plist @@ -0,0 +1,44 @@ + + + + + Label + com.buildkite.buildkite-agent + + ProgramArguments + + /usr/local/bin/buildkite-agent + start + + + KeepAlive + + SuccessfulExit + + + + RunAtLoad + + + StandardOutPath + /var/buildkite-agent/logs/buildkite-agent.log + + StandardErrorPath + /var/buildkite-agent/logs/buildkite-agent.log + + EnvironmentVariables + + BUILDKITE_AGENT_CONFIG + /etc/buildkite-agent/buildkite-agent.cfg + + + LimitLoadToSessionType + + Aqua + LoginWindow + Background + StandardIO + System + + + \ No newline at end of file diff --git a/ci/darwin/plists/tailscale.plist b/ci/darwin/plists/tailscale.plist new file mode 100644 index 0000000000..cbe3f001b0 --- /dev/null +++ b/ci/darwin/plists/tailscale.plist @@ -0,0 +1,20 @@ + + + + + Label + com.tailscale.tailscaled + + ProgramArguments + + /usr/local/bin/tailscale + up + --ssh + --authkey + ${TAILSCALE_AUTHKEY} + + + RunAtLoad + + + \ No newline at end of file diff --git a/ci/darwin/plists/tailscaled.plist b/ci/darwin/plists/tailscaled.plist new file mode 100644 index 0000000000..12d316f1ab --- /dev/null +++ b/ci/darwin/plists/tailscaled.plist @@ -0,0 +1,16 @@ + + + + + Label + com.tailscale.tailscaled + + ProgramArguments + + /usr/local/bin/tailscaled + + + RunAtLoad + + + \ No newline at end of file diff --git a/ci/darwin/scripts/boot-image.sh b/ci/darwin/scripts/boot-image.sh new file mode 100755 index 0000000000..02ae01db03 --- /dev/null +++ b/ci/darwin/scripts/boot-image.sh @@ -0,0 +1,124 @@ +#!/bin/sh + +# This script generates the boot commands for the macOS installer GUI. +# It is run on your local machine, not inside the VM. + +# Sources: +# - https://github.com/cirruslabs/macos-image-templates/blob/master/templates/vanilla-sequoia.pkr.hcl + +if ! [ "${release}" ] || ! [ "${username}" ] || ! [ "${password}" ]; then + echo "Script must be run with variables: release, username, and password" >&2 + exit 1 +fi + +# Hello, hola, bonjour, etc. +echo "" + +# Select Your Country and Region +echo "italianoenglish" +echo "united states" + +# Written and Spoken Languages +echo "" + +# Accessibility +echo "" + +# Data & Privacy +echo "" + +# Migration Assistant +echo "" + +# Sign In with Your Apple ID +echo "" + +# Are you sure you want to skip signing in with an Apple ID? +echo "" + +# Terms and Conditions +echo "" + +# I have read and agree to the macOS Software License Agreement +echo "" + +# Create a Computer Account +echo "${username}${password}${password}" + +# Enable Location Services +echo "" + +# Are you sure you don't want to use Location Services? +echo "" + +# Select Your Time Zone +echo "UTC" + +# Analytics +echo "" + +# Screen Time +echo "" + +# Siri +echo "" + +# Choose Your Look +echo "" + +if [ "${release}" = "13" ] || [ "${release}" = "14" ]; then + # Enable Voice Over + echo "v" +else + # Welcome to Mac + echo "" + + # Enable Keyboard navigation + echo "Terminal" + echo "defaults write NSGlobalDomain AppleKeyboardUIMode -int 3" + echo "q" +fi + +# Now that the installation is done, open "System Settings" +echo "System Settings" + +# Navigate to "Sharing" +echo "fsharing" + +if [ "${release}" = "13" ]; then + # Navigate to "Screen Sharing" and enable it + echo "" + + # Navigate to "Remote Login" and enable it + echo "" + + # Open "Remote Login" details + echo "" + + # Enable "Full Disk Access" + echo "" + + # Click "Done" + echo "" + + # Disable Voice Over + echo "" +elif [ "${release}" = "14" ]; then + # Navigate to "Screen Sharing" and enable it + echo "" + + # Navigate to "Remote Login" and enable it + echo "" + + # Disable Voice Over + echo "" +elif [ "${release}" = "15" ]; then + # Navigate to "Screen Sharing" and enable it + echo "" + + # Navigate to "Remote Login" and enable it + echo "" +fi + +# Quit System Settings +echo "q" diff --git a/ci/darwin/scripts/optimize-machine.sh b/ci/darwin/scripts/optimize-machine.sh new file mode 100644 index 0000000000..1d58ff4bb3 --- /dev/null +++ b/ci/darwin/scripts/optimize-machine.sh @@ -0,0 +1,122 @@ +#!/bin/sh + +# This script optimizes macOS for virtualized environments. +# It disables things like spotlight, screen saver, and sleep. + +# Sources: +# - https://github.com/sickcodes/osx-optimizer +# - https://github.com/koding88/MacBook-Optimization-Script +# - https://www.macstadium.com/blog/simple-optimizations-for-macos-and-ios-build-agents + +if [ "$(id -u)" != "0" ]; then + echo "This script must be run using sudo." >&2 + exit 1 +fi + +execute() { + echo "$ $@" >&2 + if ! "$@"; then + echo "Command failed: $@" >&2 + exit 1 + fi +} + +disable_software_update() { + execute softwareupdate --schedule off + execute defaults write com.apple.SoftwareUpdate AutomaticDownload -bool false + execute defaults write com.apple.SoftwareUpdate AutomaticCheckEnabled -bool false + execute defaults write com.apple.SoftwareUpdate ConfigDataInstall -int 0 + execute defaults write com.apple.SoftwareUpdate CriticalUpdateInstall -int 0 + execute defaults write com.apple.SoftwareUpdate ScheduleFrequency -int 0 + execute defaults write com.apple.SoftwareUpdate AutomaticDownload -int 0 + execute defaults write com.apple.commerce AutoUpdate -bool false + execute defaults write com.apple.commerce AutoUpdateRestartRequired -bool false +} + +disable_spotlight() { + execute mdutil -i off -a + execute mdutil -E / +} + +disable_siri() { + execute launchctl unload -w /System/Library/LaunchAgents/com.apple.Siri.agent.plist + execute defaults write com.apple.Siri StatusMenuVisible -bool false + execute defaults write com.apple.Siri UserHasDeclinedEnable -bool true + execute defaults write com.apple.assistant.support "Assistant Enabled" 0 +} + +disable_sleep() { + execute systemsetup -setsleep Never + execute systemsetup -setcomputersleep Never + execute systemsetup -setdisplaysleep Never + execute systemsetup -setharddisksleep Never +} + +disable_screen_saver() { + execute defaults write com.apple.screensaver loginWindowIdleTime 0 + execute defaults write com.apple.screensaver idleTime 0 +} + +disable_screen_lock() { + execute defaults write com.apple.loginwindow DisableScreenLock -bool true +} + +disable_wallpaper() { + execute defaults write com.apple.loginwindow DesktopPicture "" +} + +disable_application_state() { + execute defaults write com.apple.loginwindow TALLogoutSavesState -bool false +} + +disable_accessibility() { + execute defaults write com.apple.Accessibility DifferentiateWithoutColor -int 1 + execute defaults write com.apple.Accessibility ReduceMotionEnabled -int 1 + execute defaults write com.apple.universalaccess reduceMotion -int 1 + execute defaults write com.apple.universalaccess reduceTransparency -int 1 +} + +disable_dashboard() { + execute defaults write com.apple.dashboard mcx-disabled -boolean YES + execute killall Dock +} + +disable_animations() { + execute defaults write NSGlobalDomain NSAutomaticWindowAnimationsEnabled -bool false + execute defaults write -g QLPanelAnimationDuration -float 0 + execute defaults write com.apple.finder DisableAllAnimations -bool true +} + +disable_time_machine() { + execute tmutil disable +} + +enable_performance_mode() { + # https://support.apple.com/en-us/101992 + if ! [ $(nvram boot-args 2>/dev/null | grep -q serverperfmode) ]; then + execute nvram boot-args="serverperfmode=1 $(nvram boot-args 2>/dev/null | cut -f 2-)" + fi +} + +add_terminal_to_desktop() { + execute ln -sf /System/Applications/Utilities/Terminal.app ~/Desktop/Terminal +} + +main() { + disable_software_update + disable_spotlight + disable_siri + disable_sleep + disable_screen_saver + disable_screen_lock + disable_wallpaper + disable_application_state + disable_accessibility + disable_dashboard + disable_animations + disable_time_machine + enable_performance_mode + add_terminal_to_desktop +} + +main diff --git a/ci/darwin/scripts/setup-login.sh b/ci/darwin/scripts/setup-login.sh new file mode 100755 index 0000000000..f68beb26f2 --- /dev/null +++ b/ci/darwin/scripts/setup-login.sh @@ -0,0 +1,78 @@ +#!/bin/sh + +# This script generates a /etc/kcpassword file to enable auto-login on macOS. +# Yes, this stores your password in plain text. Do NOT do this on your local machine. + +# Sources: +# - https://github.com/xfreebird/kcpassword/blob/master/kcpassword + +if [ "$(id -u)" != "0" ]; then + echo "This script must be run using sudo." >&2 + exit 1 +fi + +execute() { + echo "$ $@" >&2 + if ! "$@"; then + echo "Command failed: $@" >&2 + exit 1 + fi +} + +kcpassword() { + passwd="$1" + key="7d 89 52 23 d2 bc dd ea a3 b9 1f" + passwd_hex=$(printf "%s" "$passwd" | xxd -p | tr -d '\n') + + key_len=33 + passwd_len=${#passwd_hex} + remainder=$((passwd_len % key_len)) + if [ $remainder -ne 0 ]; then + padding=$((key_len - remainder)) + passwd_hex="${passwd_hex}$(printf '%0*x' $((padding / 2)) 0)" + fi + + result="" + i=0 + while [ $i -lt ${#passwd_hex} ]; do + for byte in $key; do + [ $i -ge ${#passwd_hex} ] && break + p="${passwd_hex:$i:2}" + r=$(printf '%02x' $((0x$p ^ 0x$byte))) + result="${result}${r}" + i=$((i + 2)) + done + done + + echo "$result" +} + +login() { + username="$1" + password="$2" + + enable_passwordless_sudo() { + execute mkdir -p /etc/sudoers.d/ + echo "${username} ALL=(ALL) NOPASSWD: ALL" | EDITOR=tee execute visudo "/etc/sudoers.d/${username}-nopasswd" + } + + enable_auto_login() { + echo "00000000: 1ced 3f4a bcbc ba2c caca 4e82" | execute xxd -r - /etc/kcpassword + execute defaults write /Library/Preferences/com.apple.loginwindow autoLoginUser "${username}" + } + + disable_screen_lock() { + execute sysadminctl -screenLock off -password "${password}" + } + + enable_passwordless_sudo + enable_auto_login + disable_screen_lock +} + +if [ $# -ne 2 ]; then + echo "Usage: $0 " >&2 + exit 1 +fi + +login "$@" diff --git a/ci/darwin/variables.pkr.hcl b/ci/darwin/variables.pkr.hcl new file mode 100644 index 0000000000..d1133eb04a --- /dev/null +++ b/ci/darwin/variables.pkr.hcl @@ -0,0 +1,78 @@ +packer { + required_plugins { + tart = { + version = ">= 1.12.0" + source = "github.com/cirruslabs/tart" + } + external = { + version = ">= 0.0.2" + source = "github.com/joomcode/external" + } + } +} + +variable "release" { + type = number + default = 13 +} + +variable "username" { + type = string + default = "admin" +} + +variable "password" { + type = string + default = "admin" +} + +variable "cpu_count" { + type = number + default = 2 +} + +variable "memory_gb" { + type = number + default = 4 +} + +variable "disk_size_gb" { + type = number + default = 50 +} + +locals { + sequoia = { + tier = 1 + distro = "sequoia" + release = "15" + ipsw = "https://updates.cdn-apple.com/2024FallFCS/fullrestores/062-78489/BDA44327-C79E-4608-A7E0-455A7E91911F/UniversalMac_15.0_24A335_Restore.ipsw" + } + + sonoma = { + tier = 2 + distro = "sonoma" + release = "14" + ipsw = "https://updates.cdn-apple.com/2023FallFCS/fullrestores/042-54934/0E101AD6-3117-4B63-9BF1-143B6DB9270A/UniversalMac_14.0_23A344_Restore.ipsw" + } + + ventura = { + tier = 2 + distro = "ventura" + release = "13" + ipsw = "https://updates.cdn-apple.com/2022FallFCS/fullrestores/012-92188/2C38BCD1-2BFF-4A10-B358-94E8E28BE805/UniversalMac_13.0_22A380_Restore.ipsw" + } + + releases = { + 15 = local.sequoia + 14 = local.sonoma + 13 = local.ventura + } + + release = local.releases[var.release] + username = var.username + password = var.password + cpu_count = var.cpu_count + memory_gb = var.memory_gb + disk_size_gb = var.disk_size_gb +} diff --git a/ci/package.json b/ci/package.json new file mode 100644 index 0000000000..ffb1297dcd --- /dev/null +++ b/ci/package.json @@ -0,0 +1,27 @@ +{ + "private": true, + "scripts": { + "bootstrap": "brew install gh jq cirruslabs/cli/tart cirruslabs/cli/sshpass hashicorp/tap/packer && packer init darwin", + "login": "gh auth token | tart login ghcr.io --username $(gh api user --jq .login) --password-stdin", + "fetch:image-name": "echo ghcr.io/oven-sh/bun-vm", + "fetch:darwin-version": "echo 1", + "fetch:macos-version": "sw_vers -productVersion | cut -d. -f1", + "fetch:script-version": "cat ../scripts/bootstrap.sh | grep 'v=' | sed 's/v=\"//;s/\"//' | head -n 1", + "build:darwin-aarch64-vanilla": "packer build '-only=*.bun-darwin-aarch64-vanilla' -var release=$(bun fetch:macos-version) darwin/", + "build:darwin-aarch64-vanilla-15": "packer build '-only=*.bun-darwin-aarch64-vanilla' -var release=15 darwin/", + "build:darwin-aarch64-vanilla-14": "packer build '-only=*.bun-darwin-aarch64-vanilla' -var release=14 darwin/", + "build:darwin-aarch64-vanilla-13": "packer build '-only=*.bun-darwin-aarch64-vanilla' -var release=13 darwin/", + "build:darwin-aarch64": "packer build '-only=*.bun-darwin-aarch64' -var release=$(bun fetch:macos-version) darwin/", + "build:darwin-aarch64-15": "packer build '-only=*.bun-darwin-aarch64' -var release=15 darwin/", + "build:darwin-aarch64-14": "packer build '-only=*.bun-darwin-aarch64' -var release=14 darwin/", + "build:darwin-aarch64-13": "packer build '-only=*.bun-darwin-aarch64' -var release=13 darwin/", + "publish:darwin-aarch64-vanilla": "image=$(tart list --format json | jq -r \".[] | select(.Name | test(\\\"^bun-darwin-aarch64-vanilla-.*-$(bun fetch:macos-version)$\\\")) | .Name\" | head -n 1 | sed 's/bun-//'); tart push \"bun-$image\" \"ghcr.io/oven-sh/bun-vm:$image-v$(bun fetch:darwin-version)\"", + "publish:darwin-aarch64-vanilla-15": "tart push bun-darwin-aarch64-vanilla-sequoia-15 \"$(bun fetch:image-name):darwin-aarch64-vanilla-sequoia-15-v$(bun fetch:darwin-version)\"", + "publish:darwin-aarch64-vanilla-14": "tart push bun-darwin-aarch64-vanilla-sonoma-14 \"$(bun fetch:image-name):darwin-aarch64-vanilla-sonoma-14-v$(bun fetch:darwin-version)\"", + "publish:darwin-aarch64-vanilla-13": "tart push bun-darwin-aarch64-vanilla-ventura-13 \"$(bun fetch:image-name):darwin-aarch64-vanilla-ventura-13-v$(bun fetch:darwin-version)\"", + "publish:darwin-aarch64": "image=$(tart list --format json | jq -r \".[] | select(.Name | test(\\\"^bun-darwin-aarch64-.*-$(bun fetch:macos-version)$\\\")) | .Name\" | head -n 1 | sed 's/bun-//'); tart push \"bun-$image\" \"ghcr.io/oven-sh/bun-vm:$image-v$(bun fetch:script-version)\"", + "publish:darwin-aarch64-15": "tart push bun-darwin-aarch64-sequoia-15 \"$(bun fetch:image-name):darwin-aarch64-sequoia-15-v$(bun fetch:script-version)\"", + "publish:darwin-aarch64-14": "tart push bun-darwin-aarch64-sonoma-14 \"$(bun fetch:image-name):darwin-aarch64-sonoma-14-v$(bun fetch:script-version)\"", + "publish:darwin-aarch64-13": "tart push bun-darwin-aarch64-ventura-13 \"$(bun fetch:image-name):darwin-aarch64-ventura-13-v$(bun fetch:script-version)\"" + } +} diff --git a/cmake/CompilerFlags.cmake b/cmake/CompilerFlags.cmake new file mode 100644 index 0000000000..31d738134a --- /dev/null +++ b/cmake/CompilerFlags.cmake @@ -0,0 +1,298 @@ +# clang: https://clang.llvm.org/docs/CommandGuide/clang.html +# clang-cl: https://clang.llvm.org/docs/UsersManual.html#id11 + +# --- Macros --- + +macro(setb variable) + if(${variable}) + set(${variable} ON) + else() + set(${variable} OFF) + endif() +endmacro() + +set(targets WIN32 APPLE UNIX LINUX) + +foreach(target ${targets}) + setb(${target}) +endforeach() + +# --- CPU target --- +if(CMAKE_SYSTEM_PROCESSOR MATCHES "arm|ARM|arm64|ARM64|aarch64|AARCH64") + if(APPLE) + register_compiler_flags(-mcpu=apple-m1) + else() + register_compiler_flags(-march=armv8-a+crc -mtune=ampere1) + endif() +elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "x86_64|X86_64|x64|X64|amd64|AMD64") + if(ENABLE_BASELINE) + register_compiler_flags(-march=nehalem) + else() + register_compiler_flags(-march=haswell) + endif() +else() + unsupported(CMAKE_SYSTEM_PROCESSOR) +endif() + +# --- MSVC runtime --- +if(WIN32) + register_compiler_flags( + DESCRIPTION "Use static MSVC runtime" + /MTd ${DEBUG} + /MT ${RELEASE} + /U_DLL + ) +endif() + +# --- Optimization level --- +if(DEBUG) + register_compiler_flags( + DESCRIPTION "Disable optimization" + /Od ${WIN32} + -O0 ${UNIX} + ) +elseif(ENABLE_SMOL) + register_compiler_flags( + DESCRIPTION "Optimize for size" + /Os ${WIN32} + -Os ${UNIX} + ) +else() + register_compiler_flags( + DESCRIPTION "Optimize for speed" + /O2 ${WIN32} # TODO: change to /0t (same as -O3) to match macOS and Linux? + -O3 ${UNIX} + ) +endif() + +# --- Debug level --- +if(WIN32) + register_compiler_flags( + DESCRIPTION "Enable debug symbols (.pdb)" + /Z7 + ) +elseif(APPLE) + register_compiler_flags( + DESCRIPTION "Enable debug symbols (.dSYM)" + -gdwarf-4 + ) +endif() + +if(UNIX) + register_compiler_flags( + DESCRIPTION "Enable debug symbols" + -g3 ${DEBUG} + -g1 ${RELEASE} + ) + + register_compiler_flags( + DESCRIPTION "Optimize debug symbols for LLDB" + -glldb + ) +endif() + +# TODO: consider other debug options +# -fdebug-macro # Emit debug info for macros +# -fstandalone-debug # Emit debug info for non-system libraries +# -fno-eliminate-unused-debug-types # Don't eliminate unused debug symbols + +# --- C/C++ flags --- +register_compiler_flags( + DESCRIPTION "Disable C/C++ exceptions" + -fno-exceptions ${UNIX} + /EHsc ${WIN32} # (s- disables C++, c- disables C) +) + +register_compiler_flags( + DESCRIPTION "Disable C++ static destructors" + LANGUAGES CXX + -Xclang ${WIN32} + -fno-c++-static-destructors +) + +register_compiler_flags( + DESCRIPTION "Disable runtime type information (RTTI)" + /GR- ${WIN32} + -fno-rtti ${UNIX} +) + +register_compiler_flags( + DESCRIPTION "Keep frame pointers" + /Oy- ${WIN32} + -fno-omit-frame-pointer ${UNIX} + -mno-omit-leaf-frame-pointer ${UNIX} +) + +if(UNIX) + register_compiler_flags( + DESCRIPTION "Set C/C++ visibility to hidden" + -fvisibility=hidden + -fvisibility-inlines-hidden + ) + + register_compiler_flags( + DESCRIPTION "Disable unwind tables" + -fno-unwind-tables + -fno-asynchronous-unwind-tables + ) +endif() + +register_compiler_flags( + DESCRIPTION "Place each function in its own section" + -ffunction-sections ${UNIX} + /Gy ${WIN32} +) + +register_compiler_flags( + DESCRIPTION "Place each data item in its own section" + -fdata-sections ${UNIX} + /Gw ${WIN32} +) + +# having this enabled in debug mode on macOS >=14 causes libarchive to fail to configure with the error: +# > pid_t doesn't exist on this platform? +if((DEBUG AND LINUX) OR((NOT DEBUG) AND UNIX)) + register_compiler_flags( + DESCRIPTION "Emit an address-significance table" + -faddrsig + ) +endif() + +if(WIN32) + register_compiler_flags( + DESCRIPTION "Enable string pooling" + /GF + ) + + register_compiler_flags( + DESCRIPTION "Assume thread-local variables are defined in the executable" + /GA + ) +endif() + +# --- Linker flags --- +if(LINUX) + register_linker_flags( + DESCRIPTION "Disable relocation read-only (RELRO)" + -Wl,-z,norelro + ) +endif() + +# --- Assertions --- + +# Note: This is a helpful guide about assertions: +# https://best.openssf.org/Compiler-Hardening-Guides/Compiler-Options-Hardening-Guide-for-C-and-C++ +if(ENABLE_ASSERTIONS) + register_compiler_flags( + DESCRIPTION "Do not eliminate null-pointer checks" + -fno-delete-null-pointer-checks + ) + + register_compiler_definitions( + DESCRIPTION "Enable libc++ assertions" + _LIBCPP_ENABLE_ASSERTIONS=1 + _LIBCPP_HARDENING_MODE=_LIBCPP_HARDENING_MODE_EXTENSIVE ${RELEASE} + _LIBCPP_HARDENING_MODE=_LIBCPP_HARDENING_MODE_DEBUG ${DEBUG} + ) + + register_compiler_definitions( + DESCRIPTION "Enable fortified sources" + _FORTIFY_SOURCE=3 + ) + + if(LINUX) + register_compiler_definitions( + DESCRIPTION "Enable glibc++ assertions" + _GLIBCXX_ASSERTIONS=1 + ) + endif() +else() + register_compiler_definitions( + DESCRIPTION "Disable debug assertions" + NDEBUG=1 + ) + + register_compiler_definitions( + DESCRIPTION "Disable libc++ assertions" + _LIBCPP_ENABLE_ASSERTIONS=0 + _LIBCPP_HARDENING_MODE=_LIBCPP_HARDENING_MODE_NONE + ) + + if(LINUX) + register_compiler_definitions( + DESCRIPTION "Disable glibc++ assertions" + _GLIBCXX_ASSERTIONS=0 + ) + endif() +endif() + +# --- Diagnostics --- +if(UNIX) + register_compiler_flags( + DESCRIPTION "Enable color diagnostics" + -fdiagnostics-color=always + ) +endif() + +register_compiler_flags( + DESCRIPTION "Set C/C++ error limit" + -ferror-limit=${ERROR_LIMIT} +) + +# --- LTO --- +if(ENABLE_LTO) + register_compiler_flags( + DESCRIPTION "Enable link-time optimization (LTO)" + -flto=full ${UNIX} + -flto ${WIN32} + ) + + if(UNIX) + register_compiler_flags( + DESCRIPTION "Enable virtual tables" + LANGUAGES CXX + -fforce-emit-vtables + -fwhole-program-vtables + ) + + register_linker_flags( + DESCRIPTION "Enable link-time optimization (LTO)" + -flto=full + -fwhole-program-vtables + -fforce-emit-vtables + ) + endif() +endif() + +# --- Remapping --- +if(UNIX AND CI) + register_compiler_flags( + DESCRIPTION "Remap source files" + -ffile-prefix-map=${CWD}=. + -ffile-prefix-map=${VENDOR_PATH}=vendor + -ffile-prefix-map=${CACHE_PATH}=cache + ) +endif() + +# --- Features --- + +# Valgrind cannot handle SSE4.2 instructions +# This is needed for picohttpparser +if(ENABLE_VALGRIND AND ARCH STREQUAL "x64") + register_compiler_definitions(__SSE4_2__=0) +endif() + +# --- Other --- + +# Workaround for CMake and clang-cl bug. +# https://github.com/ninja-build/ninja/issues/2280 +if(WIN32 AND NOT CMAKE_CL_SHOWINCLUDES_PREFIX) + set(CMAKE_CL_SHOWINCLUDES_PREFIX "Note: including file:") +endif() + +# WebKit uses -std=gnu++20 on non-macOS non-Windows. +# If we do not set this, it will crash at startup on the first memory allocation. +if(NOT WIN32 AND NOT APPLE) + set(CMAKE_CXX_EXTENSIONS ON) + set(CMAKE_POSITION_INDEPENDENT_CODE OFF) +endif() diff --git a/cmake/Globals.cmake b/cmake/Globals.cmake new file mode 100644 index 0000000000..9760101274 --- /dev/null +++ b/cmake/Globals.cmake @@ -0,0 +1,944 @@ +include(CMakeParseArguments) + +# --- Global macros --- + +# setx() +# Description: +# Sets a variable, similar to `set()`, but also prints the value. +# Arguments: +# variable string - The variable to set +# value string - The value to set the variable to +macro(setx) + set(${ARGV}) + message(STATUS "Set ${ARGV0}: ${${ARGV0}}") +endmacro() + +# optionx() +# Description: +# Defines an option, similar to `option()`, but allows for bool, string, and regex types. +# Arguments: +# variable string - The variable to set +# type string - The type of the variable +# description string - The description of the variable +# DEFAULT string - The default value of the variable +# PREVIEW string - The preview value of the variable +# REGEX string - The regex to match the value +# REQUIRED bool - Whether the variable is required +macro(optionx variable type description) + set(options REQUIRED) + set(oneValueArgs DEFAULT PREVIEW REGEX) + set(multiValueArgs) + cmake_parse_arguments(${variable} "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN}) + + if(DEFINED ${variable}) + set(${variable}_VALUE ${${variable}}) + endif() + + if(NOT ${type} MATCHES "^(BOOL|STRING|FILEPATH|PATH|INTERNAL)$") + set(${variable}_REGEX ${type}) + set(${variable}_TYPE STRING) + else() + set(${variable}_TYPE ${type}) + endif() + + set(${variable} ${${variable}_DEFAULT} CACHE ${${variable}_TYPE} ${description}) + set(${variable}_SOURCE "argument") + set(${variable}_PREVIEW -D${variable}) + + if(DEFINED ENV{${variable}}) + set(${variable} $ENV{${variable}} CACHE ${${variable}_TYPE} ${description} FORCE) + set(${variable}_SOURCE "environment variable") + set(${variable}_PREVIEW ${variable}) + endif() + + if(NOT ${variable} AND ${${variable}_REQUIRED}) + message(FATAL_ERROR "Required ${${variable}_SOURCE} is missing: please set, ${${variable}_PREVIEW}=<${${variable}_REGEX}>") + endif() + + if(${type} STREQUAL "BOOL") + if("${${variable}}" MATCHES "^(TRUE|true|ON|on|YES|yes|1)$") + set(${variable} ON) + elseif("${${variable}}" MATCHES "^(FALSE|false|OFF|off|NO|no|0)$") + set(${variable} OFF) + else() + message(FATAL_ERROR "Invalid ${${variable}_SOURCE}: ${${variable}_PREVIEW}=\"${${variable}}\", please use ${${variable}_PREVIEW}=") + endif() + endif() + + if(DEFINED ${variable}_REGEX AND NOT "^(${${variable}_REGEX})$" MATCHES "${${variable}}") + message(FATAL_ERROR "Invalid ${${variable}_SOURCE}: ${${variable}_PREVIEW}=\"${${variable}}\", please use ${${variable}_PREVIEW}=<${${variable}_REGEX}>") + endif() + + if(NOT ${variable}_VALUE STREQUAL ${variable}) + message(STATUS "Set ${variable}: ${${variable}}") + endif() +endmacro() + +# unsupported() +# Description: +# Prints a message that the feature is not supported. +# Arguments: +# variable string - The variable that is not supported +macro(unsupported variable) + message(FATAL_ERROR "Unsupported ${variable}: \"${${variable}}\"") +endmacro() + +# --- CMake variables --- + +setx(CMAKE_VERSION ${CMAKE_VERSION}) +setx(CMAKE_COMMAND ${CMAKE_COMMAND}) +setx(CMAKE_HOST_SYSTEM_NAME ${CMAKE_HOST_SYSTEM_NAME}) + +# In script mode, using -P, this variable is not set +if(NOT DEFINED CMAKE_HOST_SYSTEM_PROCESSOR) + cmake_host_system_information(RESULT CMAKE_HOST_SYSTEM_PROCESSOR QUERY OS_PLATFORM) +endif() +setx(CMAKE_HOST_SYSTEM_PROCESSOR ${CMAKE_HOST_SYSTEM_PROCESSOR}) + +if(CMAKE_HOST_APPLE) + set(HOST_OS "darwin") +elseif(CMAKE_HOST_WIN32) + set(HOST_OS "windows") +elseif(CMAKE_HOST_LINUX) + set(HOST_OS "linux") +else() + unsupported(CMAKE_HOST_SYSTEM_NAME) +endif() + +if(EXISTS "/lib/ld-musl-aarch64.so.1") + set(IS_MUSL ON) +elseif(EXISTS "/lib/ld-musl-x86_64.so.1") + set(IS_MUSL ON) +else() + set(IS_MUSL OFF) +endif() + +if(CMAKE_HOST_SYSTEM_PROCESSOR MATCHES "arm64|ARM64|aarch64|AARCH64") + set(HOST_OS "aarch64") +elseif(CMAKE_HOST_SYSTEM_PROCESSOR MATCHES "x86_64|X86_64|x64|X64|amd64|AMD64") + set(HOST_OS "x64") +else() + unsupported(CMAKE_HOST_SYSTEM_PROCESSOR) +endif() + +setx(CMAKE_EXPORT_COMPILE_COMMANDS ON) +setx(CMAKE_COLOR_DIAGNOSTICS ON) + +cmake_host_system_information(RESULT CORE_COUNT QUERY NUMBER_OF_LOGICAL_CORES) +optionx(CMAKE_BUILD_PARALLEL_LEVEL STRING "The number of parallel build jobs" DEFAULT ${CORE_COUNT}) + +# --- Global variables --- + +setx(CWD ${CMAKE_SOURCE_DIR}) +setx(BUILD_PATH ${CMAKE_BINARY_DIR}) + +optionx(CACHE_PATH FILEPATH "The path to the cache directory" DEFAULT ${BUILD_PATH}/cache) +optionx(CACHE_STRATEGY "read-write|read-only|write-only|none" "The strategy to use for caching" DEFAULT "read-write") + +optionx(CI BOOL "If CI is enabled" DEFAULT OFF) +optionx(ENABLE_ANALYSIS BOOL "If static analysis targets should be enabled" DEFAULT OFF) + +if(CI) + set(WARNING FATAL_ERROR) +else() + set(WARNING WARNING) +endif() + +# TODO: This causes flaky zig builds in CI, so temporarily disable it. +# if(CI) +# set(DEFAULT_VENDOR_PATH ${CACHE_PATH}/vendor) +# else() +# set(DEFAULT_VENDOR_PATH ${CWD}/vendor) +# endif() + +optionx(VENDOR_PATH FILEPATH "The path to the vendor directory" DEFAULT ${CWD}/vendor) +optionx(TMP_PATH FILEPATH "The path to the temporary directory" DEFAULT ${BUILD_PATH}/tmp) + +# --- Helper functions --- + +# setenv() +# Description: +# Sets an environment variable during the build step, and writes it to a .env file. +# Arguments: +# variable string - The variable to set +# value string - The value to set the variable to +function(setenv variable value) + set(ENV_PATH ${BUILD_PATH}/.env) + if(value MATCHES "/|\\\\") + file(TO_NATIVE_PATH ${value} value) + endif() + set(ENV_LINE "${variable}=${value}") + + if(EXISTS ${ENV_PATH}) + file(STRINGS ${ENV_PATH} ENV_FILE ENCODING UTF-8) + + foreach(line ${ENV_FILE}) + if(line MATCHES "^${variable}=") + list(REMOVE_ITEM ENV_FILE ${line}) + set(ENV_MODIFIED ON) + endif() + endforeach() + + if(ENV_MODIFIED) + list(APPEND ENV_FILE "${variable}=${value}") + list(JOIN ENV_FILE "\n" ENV_FILE) + file(WRITE ${ENV_PATH} ${ENV_FILE}) + else() + file(APPEND ${ENV_PATH} "\n${variable}=${value}") + endif() + else() + file(WRITE ${ENV_PATH} ${ENV_LINE}) + endif() + + message(STATUS "Set ENV ${variable}: ${value}") +endfunction() + +# satisfies_range() +# Description: +# Check if a version satisfies a version range +# Arguments: +# version string - The version to check (e.g. "1.2.3") +# range string - The range to check against (e.g. ">=1.2.3") +# variable string - The variable to store the result in +function(satisfies_range version range variable) + if(range STREQUAL "ignore") + set(${variable} ON PARENT_SCOPE) + return() + endif() + + set(${variable} OFF PARENT_SCOPE) + + string(REGEX MATCH "([0-9]+)\\.([0-9]+)\\.([0-9]+)" match "${version}") + if(NOT match) + return() + endif() + set(version ${CMAKE_MATCH_1}.${CMAKE_MATCH_2}.${CMAKE_MATCH_3}) + + string(REGEX MATCH "(>=|<=|>|<)?([0-9]+)\\.([0-9]+)\\.([0-9]+)" match "${range}") + if(NOT match) + return() + endif() + set(comparator ${CMAKE_MATCH_1}) + set(range ${CMAKE_MATCH_2}.${CMAKE_MATCH_3}.${CMAKE_MATCH_4}) + + if(comparator STREQUAL ">=") + set(comparator VERSION_GREATER_EQUAL) + elseif(comparator STREQUAL ">") + set(comparator VERSION_GREATER) + elseif(comparator STREQUAL "<=") + set(comparator VERSION_LESS_EQUAL) + elseif(comparator STREQUAL "<") + set(comparator VERSION_LESS) + else() + set(comparator VERSION_EQUAL) + endif() + + if(version ${comparator} ${range}) + set(${variable} ON PARENT_SCOPE) + endif() +endfunction() + +# find_command() +# Description: +# Finds a command, similar to `find_program()`, but allows for version checking. +# Arguments: +# VARIABLE string - The variable to set +# VERSION_VARIABLE string - The variable to check for the version +# COMMAND string[] - The names of the command to find +# PATHS string[] - The paths to search for the command +# REQUIRED bool - If false, the command is optional +# VERSION string - The version of the command to find (e.g. "1.2.3" or ">1.2.3") +function(find_command) + set(args VARIABLE VERSION_VARIABLE REQUIRED VERSION) + set(multiArgs COMMAND PATHS) + cmake_parse_arguments(FIND "" "${args}" "${multiArgs}" ${ARGN}) + + if(NOT FIND_VARIABLE OR NOT FIND_COMMAND) + message(FATAL_ERROR "${CMAKE_CURRENT_FUNCTION}: VARIABLE and COMMAND are required") + endif() + + if(NOT FIND_VERSION_VARIABLE) + set(FIND_VERSION_VARIABLE ${FIND_VARIABLE}_VERSION) + endif() + + list(GET FIND_COMMAND 0 FIND_NAME) + if(FIND_VERSION) + optionx(${FIND_VERSION_VARIABLE} STRING "The version of ${FIND_NAME} to find" DEFAULT "${FIND_VERSION}") + + function(find_command_version variable exe) + set(${variable} OFF PARENT_SCOPE) + + if(${exe} MATCHES "(go|zig)(\.exe)?$") + set(command ${exe} version) + else() + set(command ${exe} --version) + endif() + + execute_process( + COMMAND ${command} + RESULT_VARIABLE result + OUTPUT_VARIABLE output + OUTPUT_STRIP_TRAILING_WHITESPACE + ERROR_QUIET + ) + + if(NOT result EQUAL 0) + set(reason "exited with ${result}") + elseif(NOT output) + set(reason "no output") + else() + string(REGEX MATCH "([0-9]+)\\.([0-9]+)\\.([0-9]+)" match "${output}") + if(match) + set(version ${CMAKE_MATCH_1}.${CMAKE_MATCH_2}.${CMAKE_MATCH_3}) + set(reason "\"${version}\"") + else() + set(reason "no version") + endif() + endif() + + set_property(GLOBAL PROPERTY ${FIND_NAME} "${exe}: ${reason}" APPEND) + + if(version) + satisfies_range(${version} ${${FIND_VERSION_VARIABLE}} ${variable}) + set(${variable} ${${variable}} PARENT_SCOPE) + endif() + endfunction() + + set(FIND_VALIDATOR VALIDATOR find_command_version) + endif() + + find_program( + ${FIND_VARIABLE} + NAMES ${FIND_COMMAND} + PATHS ${FIND_PATHS} + ${FIND_VALIDATOR} + ) + + if(NOT FIND_REQUIRED STREQUAL "OFF" AND ${FIND_VARIABLE} MATCHES "NOTFOUND") + set(error "Command not found: \"${FIND_NAME}\"") + + if(FIND_VERSION) + set(error "${error} that satisfies version \"${${FIND_VERSION_VARIABLE}}\"") + endif() + + get_property(FIND_RESULTS GLOBAL PROPERTY ${FIND_NAME}) + if(NOT FIND_RESULTS MATCHES "NOTFOUND") + set(error "${error}\nThe following commands did not satisfy the requirement:") + foreach(result ${FIND_RESULTS}) + set(error "${error}\n ${result}") + endforeach() + endif() + + set(error "${error}\nTo fix this, either: + 1. Install ${FIND_NAME} ${${FIND_VERSION_VARIABLE}} + 2. Set -D${FIND_VERSION_VARIABLE}= to require a different version + 3. Set -D${FIND_VERSION_VARIABLE}=ignore to allow any version +") + message(FATAL_ERROR ${error}) + endif() + + if(${FIND_VARIABLE} MATCHES "NOTFOUND") + unset(${FIND_VARIABLE} PARENT_SCOPE) + else() + setx(${FIND_VARIABLE} ${${FIND_VARIABLE}} PARENT_SCOPE) + endif() +endfunction() + +# register_command() +# Description: +# Registers a command, similar to `add_custom_command()`, but has more validation and features. +# Arguments: +# COMMAND string[] - The command to run +# COMMENT string - The comment to display in the log +# CWD string - The working directory to run the command in +# ENVIRONMENT string[] - The environment variables to set (e.g. "DEBUG=1") +# TARGETS string[] - The targets that this command depends on +# SOURCES string[] - The files that this command depends on +# OUTPUTS string[] - The files that this command produces +# ARTIFACTS string[] - The files that this command produces, and uploads as an artifact in CI +# ALWAYS_RUN bool - If true, the command will always run +# TARGET string - The target to register the command with +# TARGET_PHASE string - The target phase to register the command with (e.g. PRE_BUILD, PRE_LINK, POST_BUILD) +# GROUP string - The group to register the command with (e.g. similar to JOB_POOL) +function(register_command) + set(options ALWAYS_RUN) + set(args COMMENT CWD TARGET TARGET_PHASE GROUP) + set(multiArgs COMMAND ENVIRONMENT TARGETS SOURCES OUTPUTS ARTIFACTS) + cmake_parse_arguments(CMD "${options}" "${args}" "${multiArgs}" ${ARGN}) + + if(NOT CMD_COMMAND) + message(FATAL_ERROR "register_command: COMMAND is required") + endif() + + if(NOT CMD_CWD) + set(CMD_CWD ${CWD}) + endif() + + if(CMD_ENVIRONMENT) + set(CMD_COMMAND ${CMAKE_COMMAND} -E env ${CMD_ENVIRONMENT} ${CMD_COMMAND}) + endif() + + if(NOT CMD_COMMENT) + string(JOIN " " CMD_COMMENT ${CMD_COMMAND}) + endif() + + set(CMD_COMMANDS COMMAND ${CMD_COMMAND}) + set(CMD_EFFECTIVE_DEPENDS) + + list(GET CMD_COMMAND 0 CMD_EXECUTABLE) + if(CMD_EXECUTABLE MATCHES "/|\\\\") + list(APPEND CMD_EFFECTIVE_DEPENDS ${CMD_EXECUTABLE}) + endif() + + foreach(target ${CMD_TARGETS}) + if(target MATCHES "/|\\\\") + message(FATAL_ERROR "register_command: TARGETS contains \"${target}\", if it's a path add it to SOURCES instead") + endif() + if(NOT TARGET ${target}) + message(FATAL_ERROR "register_command: TARGETS contains \"${target}\", but it's not a target") + endif() + list(APPEND CMD_EFFECTIVE_DEPENDS ${target}) + endforeach() + + foreach(source ${CMD_SOURCES}) + if(NOT source MATCHES "^(${CWD}|${BUILD_PATH}|${CACHE_PATH}|${VENDOR_PATH})") + message(FATAL_ERROR "register_command: SOURCES contains \"${source}\", if it's a path, make it absolute, otherwise add it to TARGETS instead") + endif() + list(APPEND CMD_EFFECTIVE_DEPENDS ${source}) + endforeach() + + if(NOT CMD_EFFECTIVE_DEPENDS AND NOT CMD_ALWAYS_RUN) + message(FATAL_ERROR "register_command: TARGETS or SOURCES is required") + endif() + + set(CMD_EFFECTIVE_OUTPUTS) + + foreach(output ${CMD_OUTPUTS}) + if(NOT output MATCHES "^(${CWD}|${BUILD_PATH}|${CACHE_PATH}|${VENDOR_PATH})") + message(FATAL_ERROR "register_command: OUTPUTS contains \"${output}\", if it's a path, make it absolute") + endif() + list(APPEND CMD_EFFECTIVE_OUTPUTS ${output}) + endforeach() + + foreach(artifact ${CMD_ARTIFACTS}) + if(NOT artifact MATCHES "^(${CWD}|${BUILD_PATH}|${CACHE_PATH}|${VENDOR_PATH})") + message(FATAL_ERROR "register_command: ARTIFACTS contains \"${artifact}\", if it's a path, make it absolute") + endif() + list(APPEND CMD_EFFECTIVE_OUTPUTS ${artifact}) + if(BUILDKITE) + file(RELATIVE_PATH filename ${BUILD_PATH} ${artifact}) + list(APPEND CMD_COMMANDS COMMAND ${CMAKE_COMMAND} -E chdir ${BUILD_PATH} buildkite-agent artifact upload ${filename}) + endif() + endforeach() + + foreach(output ${CMD_EFFECTIVE_OUTPUTS}) + get_source_file_property(generated ${output} GENERATED) + if(generated) + list(REMOVE_ITEM CMD_EFFECTIVE_OUTPUTS ${output}) + list(APPEND CMD_EFFECTIVE_OUTPUTS ${output}.always_run_${CMD_TARGET}) + endif() + endforeach() + + if(CMD_ALWAYS_RUN) + list(APPEND CMD_EFFECTIVE_OUTPUTS ${CMD_CWD}/.always_run_${CMD_TARGET}) + endif() + + if(CMD_TARGET_PHASE) + if(NOT CMD_TARGET) + message(FATAL_ERROR "register_command: TARGET is required when TARGET_PHASE is set") + endif() + if(NOT TARGET ${CMD_TARGET}) + message(FATAL_ERROR "register_command: TARGET is not a valid target: ${CMD_TARGET}") + endif() + add_custom_command( + TARGET ${CMD_TARGET} ${CMD_TARGET_PHASE} + COMMENT ${CMD_COMMENT} + WORKING_DIRECTORY ${CMD_CWD} + VERBATIM ${CMD_COMMANDS} + ) + set_property(TARGET ${CMD_TARGET} PROPERTY OUTPUT ${CMD_EFFECTIVE_OUTPUTS} APPEND) + set_property(TARGET ${CMD_TARGET} PROPERTY DEPENDS ${CMD_EFFECTIVE_DEPENDS} APPEND) + return() + endif() + + if(NOT CMD_EFFECTIVE_OUTPUTS) + message(FATAL_ERROR "register_command: OUTPUTS or ARTIFACTS is required, or set ALWAYS_RUN") + endif() + + if(CMD_TARGET) + if(TARGET ${CMD_TARGET}) + message(FATAL_ERROR "register_command: TARGET is already registered: ${CMD_TARGET}") + endif() + add_custom_target(${CMD_TARGET} + COMMENT ${CMD_COMMENT} + DEPENDS ${CMD_EFFECTIVE_OUTPUTS} + JOB_POOL ${CMD_GROUP} + ) + if(TARGET clone-${CMD_TARGET}) + add_dependencies(${CMD_TARGET} clone-${CMD_TARGET}) + endif() + endif() + + add_custom_command( + VERBATIM ${CMD_COMMANDS} + WORKING_DIRECTORY ${CMD_CWD} + COMMENT ${CMD_COMMENT} + DEPENDS ${CMD_EFFECTIVE_DEPENDS} + OUTPUT ${CMD_EFFECTIVE_OUTPUTS} + JOB_POOL ${CMD_GROUP} + ) +endfunction() + +# parse_package_json() +# Description: +# Parses a package.json file. +# Arguments: +# CWD string - The directory to look for the package.json file +# VERSION_VARIABLE string - The variable to set to the package version +# NODE_MODULES_VARIABLE string - The variable to set to list of node_modules sources +function(parse_package_json) + set(args CWD VERSION_VARIABLE NODE_MODULES_VARIABLE) + cmake_parse_arguments(NPM "" "${args}" "" ${ARGN}) + + if(NOT NPM_CWD) + set(NPM_CWD ${CWD}) + endif() + + set(NPM_PACKAGE_JSON_PATH ${NPM_CWD}/package.json) + + if(NOT EXISTS ${NPM_PACKAGE_JSON_PATH}) + message(FATAL_ERROR "parse_package_json: package.json not found: ${NPM_PACKAGE_JSON_PATH}") + endif() + + file(READ ${NPM_PACKAGE_JSON_PATH} NPM_PACKAGE_JSON) + if(NOT NPM_PACKAGE_JSON) + message(FATAL_ERROR "parse_package_json: failed to read package.json: ${NPM_PACKAGE_JSON_PATH}") + endif() + + if(NPM_VERSION_VARIABLE) + string(JSON NPM_VERSION ERROR_VARIABLE error GET "${NPM_PACKAGE_JSON}" version) + if(error) + message(FATAL_ERROR "parse_package_json: failed to read 'version': ${error}") + endif() + set(${NPM_VERSION_VARIABLE} ${NPM_VERSION} PARENT_SCOPE) + endif() + + if(NPM_NODE_MODULES_VARIABLE) + set(NPM_NODE_MODULES) + set(NPM_NODE_MODULES_PATH ${NPM_CWD}/node_modules) + set(NPM_NODE_MODULES_PROPERTIES "devDependencies" "dependencies") + + foreach(property ${NPM_NODE_MODULES_PROPERTIES}) + string(JSON NPM_${property} ERROR_VARIABLE error GET "${NPM_PACKAGE_JSON}" "${property}") + if(error MATCHES "not found") + continue() + endif() + if(error) + message(FATAL_ERROR "parse_package_json: failed to read '${property}': ${error}") + endif() + + string(JSON NPM_${property}_LENGTH ERROR_VARIABLE error LENGTH "${NPM_${property}}") + if(error) + message(FATAL_ERROR "parse_package_json: failed to read '${property}' length: ${error}") + endif() + + math(EXPR NPM_${property}_MAX_INDEX "${NPM_${property}_LENGTH} - 1") + foreach(i RANGE 0 ${NPM_${property}_MAX_INDEX}) + string(JSON NPM_${property}_${i} ERROR_VARIABLE error MEMBER "${NPM_${property}}" ${i}) + if(error) + message(FATAL_ERROR "parse_package_json: failed to index '${property}' at ${i}: ${error}") + endif() + list(APPEND NPM_NODE_MODULES ${NPM_NODE_MODULES_PATH}/${NPM_${property}_${i}}/package.json) + endforeach() + endforeach() + + set(${NPM_NODE_MODULES_VARIABLE} ${NPM_NODE_MODULES} PARENT_SCOPE) + endif() +endfunction() + +# register_bun_install() +# Description: +# Registers a command to run `bun install` in a directory. +# Arguments: +# CWD string - The directory to run `bun install` +# NODE_MODULES_VARIABLE string - The variable to set to list of node_modules sources +function(register_bun_install) + set(args CWD NODE_MODULES_VARIABLE) + cmake_parse_arguments(NPM "" "${args}" "" ${ARGN}) + + if(NOT NPM_CWD) + set(NPM_CWD ${CWD}) + endif() + + if(NPM_CWD STREQUAL ${CWD}) + set(NPM_COMMENT "bun install") + else() + set(NPM_COMMENT "bun install --cwd ${NPM_CWD}") + endif() + + parse_package_json( + CWD + ${NPM_CWD} + NODE_MODULES_VARIABLE + NPM_NODE_MODULES + ) + + if(NOT NPM_NODE_MODULES) + message(FATAL_ERROR "register_bun_install: ${NPM_CWD}/package.json does not have dependencies?") + endif() + + register_command( + COMMENT + ${NPM_COMMENT} + CWD + ${NPM_CWD} + COMMAND + ${BUN_EXECUTABLE} + install + --frozen-lockfile + SOURCES + ${NPM_CWD}/package.json + OUTPUTS + ${NPM_NODE_MODULES} + ) + + set(${NPM_NODE_MODULES_VARIABLE} ${NPM_NODE_MODULES} PARENT_SCOPE) +endfunction() + +# register_repository() +# Description: +# Registers a git repository. +# Arguments: +# NAME string - The name of the repository +# REPOSITORY string - The repository to clone +# BRANCH string - The branch to clone +# TAG string - The tag to clone +# COMMIT string - The commit to clone +# PATH string - The path to clone the repository to +# OUTPUTS string - The outputs of the repository +function(register_repository) + set(args NAME REPOSITORY BRANCH TAG COMMIT PATH) + set(multiArgs OUTPUTS) + cmake_parse_arguments(GIT "" "${args}" "${multiArgs}" ${ARGN}) + + if(NOT GIT_REPOSITORY) + message(FATAL_ERROR "git_clone: REPOSITORY is required") + endif() + + if(NOT GIT_BRANCH AND NOT GIT_TAG AND NOT GIT_COMMIT) + message(FATAL_ERROR "git_clone: COMMIT, TAG, or BRANCH is required") + endif() + + if(NOT GIT_PATH) + set(GIT_PATH ${VENDOR_PATH}/${GIT_NAME}) + endif() + + set(GIT_EFFECTIVE_OUTPUTS) + foreach(output ${GIT_OUTPUTS}) + list(APPEND GIT_EFFECTIVE_OUTPUTS ${GIT_PATH}/${output}) + endforeach() + + register_command( + TARGET + clone-${GIT_NAME} + COMMENT + "Cloning ${GIT_NAME}" + COMMAND + ${CMAKE_COMMAND} + -DGIT_PATH=${GIT_PATH} + -DGIT_REPOSITORY=${GIT_REPOSITORY} + -DGIT_NAME=${GIT_NAME} + -DGIT_COMMIT=${GIT_COMMIT} + -DGIT_TAG=${GIT_TAG} + -DGIT_BRANCH=${GIT_BRANCH} + -P ${CWD}/cmake/scripts/GitClone.cmake + OUTPUTS + ${GIT_PATH} + ${GIT_EFFECTIVE_OUTPUTS} + ) +endfunction() + +# register_cmake_command() +# Description: +# Registers a command that builds an external CMake project. +# Arguments: +# TARGET string - The target to register the command with +# ARGS string[] - The arguments to pass to CMake (e.g. -DKEY=VALUE) +# CWD string - The directory where the CMake files are located +# BUILD_PATH string - The path to build the project to +# LIB_PATH string - The path to the libraries +# TARGETS string[] - The targets to build from CMake +# LIBRARIES string[] - The libraries that are built +# INCLUDES string[] - The include paths +function(register_cmake_command) + set(args TARGET CWD BUILD_PATH LIB_PATH) + set(multiArgs ARGS TARGETS LIBRARIES INCLUDES) + # Use "MAKE" instead of "CMAKE" to prevent conflicts with CMake's own CMAKE_* variables + cmake_parse_arguments(MAKE "" "${args}" "${multiArgs}" ${ARGN}) + + if(NOT MAKE_TARGET) + message(FATAL_ERROR "register_cmake_command: TARGET is required") + endif() + + if(TARGET ${MAKE_TARGET}) + message(FATAL_ERROR "register_cmake_command: TARGET is already a target: ${MAKE_TARGET}") + endif() + + if(NOT MAKE_CWD) + set(MAKE_CWD ${VENDOR_PATH}/${MAKE_TARGET}) + endif() + + if(NOT MAKE_BUILD_PATH) + set(MAKE_BUILD_PATH ${BUILD_PATH}/${MAKE_TARGET}) + endif() + + if(MAKE_LIB_PATH) + set(MAKE_LIB_PATH ${MAKE_BUILD_PATH}/${MAKE_LIB_PATH}) + else() + set(MAKE_LIB_PATH ${MAKE_BUILD_PATH}) + endif() + + set(MAKE_EFFECTIVE_ARGS -B${MAKE_BUILD_PATH} ${CMAKE_ARGS}) + + set(setFlags GENERATOR BUILD_TYPE) + set(appendFlags C_FLAGS CXX_FLAGS LINKER_FLAGS) + set(specialFlags POSITION_INDEPENDENT_CODE) + set(flags ${setFlags} ${appendFlags} ${specialFlags}) + + foreach(arg ${MAKE_ARGS}) + foreach(flag ${flags}) + if(arg MATCHES "-DCMAKE_${flag}=(.*)") + if(DEFINED MAKE_${flag}) + message(FATAL_ERROR "register_cmake_command: CMAKE_${flag} was already set: \"${MAKE_${flag}}\"") + endif() + set(MAKE_${flag} ${CMAKE_MATCH_1}) + set(${arg}_USED ON) + endif() + endforeach() + if(NOT ${arg}_USED) + list(APPEND MAKE_EFFECTIVE_ARGS ${arg}) + endif() + endforeach() + + foreach(flag ${setFlags}) + if(NOT DEFINED MAKE_${flag} AND DEFINED CMAKE_${flag}) + set(MAKE_${flag} ${CMAKE_${flag}}) + endif() + endforeach() + + foreach(flag ${appendFlags}) + if(MAKE_${flag}) + set(MAKE_${flag} "${CMAKE_${flag}} ${MAKE_${flag}}") + else() + set(MAKE_${flag} ${CMAKE_${flag}}) + endif() + endforeach() + + if(MAKE_POSITION_INDEPENDENT_CODE AND NOT WIN32) + set(MAKE_C_FLAGS "${MAKE_C_FLAGS} -fPIC") + set(MAKE_CXX_FLAGS "${MAKE_CXX_FLAGS} -fPIC") + elseif(APPLE) + set(MAKE_C_FLAGS "${MAKE_C_FLAGS} -fno-pic -fno-pie") + set(MAKE_CXX_FLAGS "${MAKE_CXX_FLAGS} -fno-pic -fno-pie") + endif() + + set(effectiveFlags ${setFlags} ${appendFlags}) + foreach(flag ${effectiveFlags}) + list(APPEND MAKE_EFFECTIVE_ARGS "-DCMAKE_${flag}=${MAKE_${flag}}") + endforeach() + + if(DEFINED FRESH) + list(APPEND MAKE_EFFECTIVE_ARGS --fresh) + endif() + + register_command( + COMMENT "Configuring ${MAKE_TARGET}" + TARGET configure-${MAKE_TARGET} + COMMAND ${CMAKE_COMMAND} ${MAKE_EFFECTIVE_ARGS} + CWD ${MAKE_CWD} + OUTPUTS ${MAKE_BUILD_PATH}/CMakeCache.txt + ) + + if(TARGET clone-${MAKE_TARGET}) + add_dependencies(configure-${MAKE_TARGET} clone-${MAKE_TARGET}) + endif() + + set(MAKE_BUILD_ARGS --build ${MAKE_BUILD_PATH} --config ${MAKE_BUILD_TYPE}) + + set(MAKE_EFFECTIVE_LIBRARIES) + set(MAKE_ARTIFACTS) + foreach(lib ${MAKE_LIBRARIES}) + if(lib MATCHES "^(WIN32|UNIX|APPLE)$") + if(${lib}) + continue() + else() + list(POP_BACK MAKE_ARTIFACTS) + endif() + else() + list(APPEND MAKE_EFFECTIVE_LIBRARIES ${lib}) + if(lib MATCHES "\\.") + list(APPEND MAKE_ARTIFACTS ${MAKE_LIB_PATH}/${lib}) + else() + list(APPEND MAKE_ARTIFACTS ${MAKE_LIB_PATH}/${CMAKE_STATIC_LIBRARY_PREFIX}${lib}${CMAKE_STATIC_LIBRARY_SUFFIX}) + endif() + endif() + endforeach() + + if(NOT MAKE_TARGETS) + set(MAKE_TARGETS ${MAKE_EFFECTIVE_LIBRARIES}) + endif() + + foreach(target ${MAKE_TARGETS}) + list(APPEND MAKE_BUILD_ARGS --target ${target}) + endforeach() + + set(MAKE_EFFECTIVE_INCLUDES) + foreach(include ${MAKE_INCLUDES}) + if(include STREQUAL ".") + list(APPEND MAKE_EFFECTIVE_INCLUDES ${MAKE_CWD}) + else() + list(APPEND MAKE_EFFECTIVE_INCLUDES ${MAKE_CWD}/${include}) + endif() + endforeach() + + register_command( + COMMENT "Building ${MAKE_TARGET}" + TARGET ${MAKE_TARGET} + TARGETS configure-${MAKE_TARGET} + COMMAND ${CMAKE_COMMAND} ${MAKE_BUILD_ARGS} + CWD ${MAKE_CWD} + ARTIFACTS ${MAKE_ARTIFACTS} + ) + + if(MAKE_EFFECTIVE_INCLUDES) + target_include_directories(${bun} PRIVATE ${MAKE_EFFECTIVE_INCLUDES}) + if(TARGET clone-${MAKE_TARGET} AND NOT BUN_LINK_ONLY) + add_dependencies(${bun} clone-${MAKE_TARGET}) + endif() + endif() + + # HACK: Workaround for duplicate symbols when linking mimalloc.o + # >| duplicate symbol '_mi_page_queue_append(mi_heap_s*, mi_page_queue_s*, mi_page_queue_s*)' in: + # >| mimalloc/CMakeFiles/mimalloc-obj.dir/src/static.c.o + # >| ld: 287 duplicate symbols for architecture arm64 + if(NOT BUN_LINK_ONLY OR NOT MAKE_ARTIFACTS MATCHES "static.c.o") + target_link_libraries(${bun} PRIVATE ${MAKE_ARTIFACTS}) + endif() + + if(BUN_LINK_ONLY) + target_sources(${bun} PRIVATE ${MAKE_ARTIFACTS}) + endif() +endfunction() + +# register_compiler_flag() +# Description: +# Registers a compiler flag, similar to `add_compile_options()`, but has more validation and features. +# Arguments: +# flags string[] - The flags to register +# DESCRIPTION string - The description of the flag +# LANGUAGES string[] - The languages to register the flag (default: C, CXX) +# TARGETS string[] - The targets to register the flag (default: all) +function(register_compiler_flags) + set(args DESCRIPTION) + set(multiArgs LANGUAGES TARGETS) + cmake_parse_arguments(COMPILER "" "${args}" "${multiArgs}" ${ARGN}) + + if(NOT COMPILER_LANGUAGES) + set(COMPILER_LANGUAGES C CXX) + endif() + + set(COMPILER_FLAGS) + foreach(flag ${COMPILER_UNPARSED_ARGUMENTS}) + if(flag STREQUAL "ON") + continue() + elseif(flag STREQUAL "OFF") + list(POP_BACK COMPILER_FLAGS) + elseif(flag MATCHES "^(-|/)") + list(APPEND COMPILER_FLAGS ${flag}) + else() + message(FATAL_ERROR "register_compiler_flags: Invalid flag: \"${flag}\"") + endif() + endforeach() + + foreach(target ${COMPILER_TARGETS}) + if(NOT TARGET ${target}) + message(FATAL_ERROR "register_compiler_flags: \"${target}\" is not a target") + endif() + endforeach() + + foreach(lang ${COMPILER_LANGUAGES}) + list(JOIN COMPILER_FLAGS " " COMPILER_FLAGS_STRING) + + if(NOT COMPILER_TARGETS) + set(CMAKE_${lang}_FLAGS "${CMAKE_${lang}_FLAGS} ${COMPILER_FLAGS_STRING}" PARENT_SCOPE) + endif() + + foreach(target ${COMPILER_TARGETS}) + set(${target}_CMAKE_${lang}_FLAGS "${${target}_CMAKE_${lang}_FLAGS} ${COMPILER_FLAGS_STRING}" PARENT_SCOPE) + endforeach() + endforeach() + + foreach(lang ${COMPILER_LANGUAGES}) + foreach(flag ${COMPILER_FLAGS}) + if(NOT COMPILER_TARGETS) + add_compile_options($<$:${flag}>) + endif() + + foreach(target ${COMPILER_TARGETS}) + get_target_property(type ${target} TYPE) + if(type MATCHES "EXECUTABLE|LIBRARY") + target_compile_options(${target} PRIVATE $<$:${flag}>) + endif() + endforeach() + endforeach() + endforeach() +endfunction() + +function(register_compiler_definitions) + +endfunction() + +# register_linker_flags() +# Description: +# Registers a linker flag, similar to `add_link_options()`. +# Arguments: +# flags string[] - The flags to register +# DESCRIPTION string - The description of the flag +function(register_linker_flags) + set(args DESCRIPTION) + cmake_parse_arguments(LINKER "" "${args}" "" ${ARGN}) + + foreach(flag ${LINKER_UNPARSED_ARGUMENTS}) + if(flag STREQUAL "ON") + continue() + elseif(flag STREQUAL "OFF") + list(POP_FRONT LINKER_FLAGS) + elseif(flag MATCHES "^(-|/)") + list(APPEND LINKER_FLAGS ${flag}) + else() + message(FATAL_ERROR "register_linker_flags: Invalid flag: \"${flag}\"") + endif() + endforeach() + + add_link_options(${LINKER_FLAGS}) +endfunction() + +function(print_compiler_flags) + get_property(targets DIRECTORY PROPERTY BUILDSYSTEM_TARGETS) + set(languages C CXX) + foreach(target ${targets}) + get_target_property(type ${target} TYPE) + message(STATUS "Target: ${target}") + foreach(lang ${languages}) + if(${target}_CMAKE_${lang}_FLAGS) + message(STATUS " ${lang} Flags: ${${target}_CMAKE_${lang}_FLAGS}") + endif() + endforeach() + endforeach() + foreach(lang ${languages}) + message(STATUS "Language: ${lang}") + if(CMAKE_${lang}_FLAGS) + message(STATUS " Flags: ${CMAKE_${lang}_FLAGS}") + endif() + endforeach() +endfunction() diff --git a/cmake/Options.cmake b/cmake/Options.cmake new file mode 100644 index 0000000000..7d15c98fbe --- /dev/null +++ b/cmake/Options.cmake @@ -0,0 +1,158 @@ +if(NOT CMAKE_SYSTEM_NAME OR NOT CMAKE_SYSTEM_PROCESSOR) + message(FATAL_ERROR "CMake included this file before project() was called") +endif() + +optionx(BUN_LINK_ONLY BOOL "If only the linking step should be built" DEFAULT OFF) +optionx(BUN_CPP_ONLY BOOL "If only the C++ part of Bun should be built" DEFAULT OFF) + +optionx(BUILDKITE BOOL "If Buildkite is enabled" DEFAULT OFF) +optionx(GITHUB_ACTIONS BOOL "If GitHub Actions is enabled" DEFAULT OFF) + +if(BUILDKITE) + optionx(BUILDKITE_COMMIT STRING "The commit hash") + optionx(BUILDKITE_MESSAGE STRING "The commit message") +endif() + +optionx(CMAKE_BUILD_TYPE "Debug|Release|RelWithDebInfo|MinSizeRel" "The build type to use" REQUIRED) + +if(CMAKE_BUILD_TYPE MATCHES "Release|RelWithDebInfo|MinSizeRel") + setx(RELEASE ON) +else() + setx(RELEASE OFF) +endif() + +if(CMAKE_BUILD_TYPE MATCHES "Debug|RelWithDebInfo") + setx(DEBUG ON) +else() + setx(DEBUG OFF) +endif() + +if(CMAKE_BUILD_TYPE MATCHES "MinSizeRel") + setx(ENABLE_SMOL ON) +endif() + +if(APPLE) + setx(OS "darwin") +elseif(WIN32) + setx(OS "windows") +elseif(LINUX) + setx(OS "linux") +else() + message(FATAL_ERROR "Unsupported operating system: ${CMAKE_SYSTEM_NAME}") +endif() + +if(CMAKE_SYSTEM_PROCESSOR MATCHES "aarch64|arm64|arm") + setx(ARCH "aarch64") +elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "amd64|x86_64|x64|AMD64") + setx(ARCH "x64") +else() + message(FATAL_ERROR "Unsupported architecture: ${CMAKE_SYSTEM_PROCESSOR}") +endif() + +if(ARCH STREQUAL "x64") + optionx(ENABLE_BASELINE BOOL "If baseline features should be used for older CPUs (e.g. disables AVX, AVX2)" DEFAULT OFF) +endif() + +optionx(ENABLE_LOGS BOOL "If debug logs should be enabled" DEFAULT ${DEBUG}) +optionx(ENABLE_ASSERTIONS BOOL "If debug assertions should be enabled" DEFAULT ${DEBUG}) + +if(BUILDKITE_MESSAGE AND BUILDKITE_MESSAGE MATCHES "\\[release build\\]") + message(STATUS "Switched to release build, since commit message contains: \"[release build]\"") + set(DEFAULT_CANARY OFF) +else() + set(DEFAULT_CANARY ON) +endif() + +optionx(ENABLE_CANARY BOOL "If canary features should be enabled" DEFAULT ${DEFAULT_CANARY}) + +if(ENABLE_CANARY AND BUILDKITE) + execute_process( + COMMAND buildkite-agent meta-data get "canary" + OUTPUT_VARIABLE DEFAULT_CANARY_REVISION + OUTPUT_STRIP_TRAILING_WHITESPACE + ) +elseif(ENABLE_CANARY) + set(DEFAULT_CANARY_REVISION "1") +else() + set(DEFAULT_CANARY_REVISION "0") +endif() + +optionx(CANARY_REVISION STRING "The canary revision of the build" DEFAULT ${DEFAULT_CANARY_REVISION}) + +if(RELEASE AND LINUX AND CI) + set(DEFAULT_LTO ON) +else() + set(DEFAULT_LTO OFF) +endif() + +optionx(ENABLE_LTO BOOL "If LTO (link-time optimization) should be used" DEFAULT ${DEFAULT_LTO}) + +if(LINUX) + optionx(ENABLE_VALGRIND BOOL "If Valgrind support should be enabled" DEFAULT OFF) +endif() + +optionx(ENABLE_PRETTIER BOOL "If prettier should be ran" DEFAULT OFF) + +if(USE_VALGRIND AND NOT USE_BASELINE) + message(WARNING "If valgrind is enabled, baseline must also be enabled") + setx(USE_BASELINE ON) +endif() + +if(BUILDKITE_COMMIT) + set(DEFAULT_REVISION ${BUILDKITE_COMMIT}) +else() + execute_process( + COMMAND git rev-parse HEAD + WORKING_DIRECTORY ${CWD} + OUTPUT_VARIABLE DEFAULT_REVISION + OUTPUT_STRIP_TRAILING_WHITESPACE + ERROR_QUIET + ) + if(NOT DEFAULT_REVISION AND NOT DEFINED ENV{GIT_SHA} AND NOT DEFINED ENV{GITHUB_SHA}) + set(DEFAULT_REVISION "unknown") + endif() +endif() + +optionx(REVISION STRING "The git revision of the build" DEFAULT ${DEFAULT_REVISION}) + +# Used in process.version, process.versions.node, napi, and elsewhere +optionx(NODEJS_VERSION STRING "The version of Node.js to report" DEFAULT "22.6.0") + +# Used in process.versions.modules and compared while loading V8 modules +optionx(NODEJS_ABI_VERSION STRING "The ABI version of Node.js to report" DEFAULT "127") + +if(APPLE) + set(DEFAULT_STATIC_SQLITE OFF) +else() + set(DEFAULT_STATIC_SQLITE ON) +endif() + +optionx(USE_STATIC_SQLITE BOOL "If SQLite should be statically linked" DEFAULT ${DEFAULT_STATIC_SQLITE}) + +set(DEFAULT_STATIC_LIBATOMIC ON) + +if(CMAKE_HOST_LINUX AND NOT WIN32 AND NOT APPLE) + execute_process( + COMMAND grep -w "NAME" /etc/os-release + OUTPUT_VARIABLE LINUX_DISTRO + OUTPUT_STRIP_TRAILING_WHITESPACE + ERROR_QUIET + ) + if(LINUX_DISTRO MATCHES "NAME=\"(Arch|Manjaro|Artix) Linux( ARM)?\"|NAME=\"openSUSE Tumbleweed\"") + set(DEFAULT_STATIC_LIBATOMIC OFF) + endif() +endif() + +optionx(USE_STATIC_LIBATOMIC BOOL "If libatomic should be statically linked" DEFAULT ${DEFAULT_STATIC_LIBATOMIC}) + +if(APPLE) + set(DEFAULT_WEBKIT_ICU OFF) +else() + set(DEFAULT_WEBKIT_ICU ON) +endif() + +optionx(USE_WEBKIT_ICU BOOL "Use the ICU libraries from WebKit" DEFAULT ${DEFAULT_WEBKIT_ICU}) + +optionx(ERROR_LIMIT STRING "Maximum number of errors to show when compiling C++ code" DEFAULT "100") + +list(APPEND CMAKE_ARGS -DCMAKE_EXPORT_COMPILE_COMMANDS=ON) diff --git a/cmake/Policies.cmake b/cmake/Policies.cmake new file mode 100644 index 0000000000..d55a4ae021 --- /dev/null +++ b/cmake/Policies.cmake @@ -0,0 +1,11 @@ +# Let the MSVC runtime be set using CMAKE_MSVC_RUNTIME_LIBRARY, instead of automatically. +# Since CMake 3.15. +cmake_policy(SET CMP0091 NEW) + +# If INTERPROCEDURAL_OPTIMIZATION is enabled and not supported by the compiler, throw an error. +# Since CMake 3.9. +cmake_policy(SET CMP0069 NEW) + +# Use CMAKE_{C,CXX}_STANDARD when evaluating try_compile(). +# Since CMake 3.8. +cmake_policy(SET CMP0067 NEW) diff --git a/cmake/analysis/RunClangFormat.cmake b/cmake/analysis/RunClangFormat.cmake new file mode 100644 index 0000000000..106ac54ef6 --- /dev/null +++ b/cmake/analysis/RunClangFormat.cmake @@ -0,0 +1,67 @@ +# https://clang.llvm.org/docs/ClangFormat.html + +file(GLOB BUN_H_SOURCES LIST_DIRECTORIES false ${CONFIGURE_DEPENDS} + ${CWD}/src/bun.js/bindings/*.h + ${CWD}/src/bun.js/modules/*.h +) + +set(CLANG_FORMAT_SOURCES ${BUN_C_SOURCES} ${BUN_CXX_SOURCES} ${BUN_H_SOURCES}) + +register_command( + TARGET + clang-format-check + COMMENT + "Running clang-format" + COMMAND + ${CLANG_FORMAT_PROGRAM} + -Werror + --dry-run + --verbose + ${CLANG_FORMAT_SOURCES} + ALWAYS_RUN +) + +register_command( + TARGET + clang-format + COMMENT + "Fixing clang-format" + COMMAND + ${CLANG_FORMAT_PROGRAM} + -i # edits files in-place + --verbose + ${CLANG_FORMAT_SOURCES} + ALWAYS_RUN +) + +if(GIT_CHANGED_SOURCES) + set(CLANG_FORMAT_CHANGED_SOURCES) + foreach(source ${CLANG_FORMAT_SOURCES}) + list(FIND GIT_CHANGED_SOURCES ${source} index) + if(NOT ${index} EQUAL -1) + list(APPEND CLANG_FORMAT_CHANGED_SOURCES ${source}) + endif() + endforeach() +endif() + +if(CLANG_FORMAT_CHANGED_SOURCES) + set(CLANG_FORMAT_DIFF_COMMAND ${CLANG_FORMAT_PROGRAM} + -i # edits files in-place + --verbose + ${CLANG_FORMAT_CHANGED_SOURCES} + ) +else() + set(CLANG_FORMAT_DIFF_COMMAND ${CMAKE_COMMAND} -E echo "No changed files for clang-format") +endif() + +register_command( + TARGET + clang-format-diff + COMMENT + "Running clang-format on changed files" + COMMAND + ${CLANG_FORMAT_DIFF_COMMAND} + CWD + ${BUILD_PATH} + ALWAYS_RUN +) diff --git a/cmake/analysis/RunClangTidy.cmake b/cmake/analysis/RunClangTidy.cmake new file mode 100644 index 0000000000..ee5782ade8 --- /dev/null +++ b/cmake/analysis/RunClangTidy.cmake @@ -0,0 +1,74 @@ +# https://clang.llvm.org/extra/clang-tidy/ + +set(CLANG_TIDY_SOURCES ${BUN_C_SOURCES} ${BUN_CXX_SOURCES}) + +set(CLANG_TIDY_COMMAND ${CLANG_TIDY_PROGRAM} + -p ${BUILD_PATH} + --config-file=${CWD}/.clang-tidy +) + +if(CMAKE_COLOR_DIAGNOSTICS) + list(APPEND CLANG_TIDY_COMMAND --use-color) +endif() + +register_command( + TARGET + clang-tidy + COMMENT + "Running clang-tidy" + COMMAND + ${CLANG_TIDY_COMMAND} + ${CLANG_TIDY_SOURCES} + --fix + --fix-errors + --fix-notes + CWD + ${BUILD_PATH} + ALWAYS_RUN +) + +register_command( + TARGET + clang-tidy-check + COMMENT + "Checking clang-tidy" + COMMAND + ${CLANG_TIDY_COMMAND} + ${CLANG_TIDY_SOURCES} + CWD + ${BUILD_PATH} + ALWAYS_RUN +) + +if(GIT_CHANGED_SOURCES) + set(CLANG_TIDY_CHANGED_SOURCES) + foreach(source ${CLANG_TIDY_SOURCES}) + list(FIND GIT_CHANGED_SOURCES ${source} index) + if(NOT ${index} EQUAL -1) + list(APPEND CLANG_TIDY_CHANGED_SOURCES ${source}) + endif() + endforeach() +endif() + +if(CLANG_TIDY_CHANGED_SOURCES) + set(CLANG_TIDY_DIFF_COMMAND ${CLANG_TIDY_PROGRAM} + ${CLANG_TIDY_CHANGED_SOURCES} + --fix + --fix-errors + --fix-notes + ) +else() + set(CLANG_TIDY_DIFF_COMMAND ${CMAKE_COMMAND} -E echo "No changed files for clang-tidy") +endif() + +register_command( + TARGET + clang-tidy-diff + COMMENT + "Running clang-tidy on changed files" + COMMAND + ${CLANG_TIDY_DIFF_COMMAND} + CWD + ${BUILD_PATH} + ALWAYS_RUN +) diff --git a/cmake/analysis/RunCppCheck.cmake b/cmake/analysis/RunCppCheck.cmake new file mode 100644 index 0000000000..a384a44863 --- /dev/null +++ b/cmake/analysis/RunCppCheck.cmake @@ -0,0 +1,33 @@ +# https://cppcheck.sourceforge.io/ + +find_command( + VARIABLE + CPPCHECK_EXECUTABLE + COMMAND + cppcheck + REQUIRED + OFF +) + +set(CPPCHECK_COMMAND ${CPPCHECK_EXECUTABLE} + --cppcheck-build-dir=${BUILD_PATH}/cppcheck + --project=${BUILD_PATH}/compile_commands.json + --clang=${CMAKE_CXX_COMPILER} + --std=c++${CMAKE_CXX_STANDARD} + --report-progress + --showtime=summary +) + +register_command( + TARGET + cppcheck + COMMENT + "Running cppcheck" + COMMAND + ${CMAKE_COMMAND} -E make_directory cppcheck + && ${CPPCHECK_COMMAND} + CWD + ${BUILD_PATH} + TARGETS + ${bun} +) diff --git a/cmake/analysis/RunCppLint.cmake b/cmake/analysis/RunCppLint.cmake new file mode 100644 index 0000000000..5b9264ecf5 --- /dev/null +++ b/cmake/analysis/RunCppLint.cmake @@ -0,0 +1,22 @@ +find_command( + VARIABLE + CPPLINT_PROGRAM + COMMAND + cpplint + REQUIRED + OFF +) + +register_command( + TARGET + cpplint + COMMENT + "Running cpplint" + COMMAND + ${CPPLINT_PROGRAM} + ${BUN_CPP_SOURCES} + CWD + ${BUILD_PATH} + TARGETS + ${bun} +) diff --git a/cmake/analysis/RunIWYU.cmake b/cmake/analysis/RunIWYU.cmake new file mode 100644 index 0000000000..0ea555f2f5 --- /dev/null +++ b/cmake/analysis/RunIWYU.cmake @@ -0,0 +1,67 @@ +# IWYU = "Include What You Use" +# https://include-what-you-use.org/ + +setx(IWYU_SOURCE_PATH ${CACHE_PATH}/iwyu-${LLVM_VERSION}) +setx(IWYU_BUILD_PATH ${IWYU_SOURCE_PATH}/build) +setx(IWYU_PROGRAM ${IWYU_BUILD_PATH}/bin/include-what-you-use) + +register_repository( + NAME + iwyu + REPOSITORY + include-what-you-use/include-what-you-use + BRANCH + clang_${LLVM_VERSION} + PATH + ${IWYU_SOURCE_PATH} +) + +register_command( + TARGET + build-iwyu + COMMENT + "Building iwyu" + COMMAND + ${CMAKE_COMMAND} + -B${IWYU_BUILD_PATH} + -G${CMAKE_GENERATOR} + -DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER} + -DCMAKE_CXX_COMPILER_LAUNCHER=${CMAKE_CXX_COMPILER_LAUNCHER} + -DIWYU_LLVM_ROOT_PATH=${LLVM_PREFIX} + && ${CMAKE_COMMAND} + --build ${IWYU_BUILD_PATH} + CWD + ${IWYU_SOURCE_PATH} + TARGETS + clone-iwyu +) + +find_command( + VARIABLE + PYTHON_EXECUTABLE + COMMAND + python3 + python + VERSION + >=3.0.0 + REQUIRED + OFF +) + +register_command( + TARGET + iwyu + COMMENT + "Running iwyu" + COMMAND + ${CMAKE_COMMAND} + -E env IWYU_BINARY=${IWYU_PROGRAM} + ${PYTHON_EXECUTABLE} + ${IWYU_SOURCE_PATH}/iwyu_tool.py + -p ${BUILD_PATH} + CWD + ${BUILD_PATH} + TARGETS + build-iwyu + ${bun} +) diff --git a/cmake/analysis/RunPrettier.cmake b/cmake/analysis/RunPrettier.cmake new file mode 100644 index 0000000000..8c8ceb1ba1 --- /dev/null +++ b/cmake/analysis/RunPrettier.cmake @@ -0,0 +1,123 @@ +if(CMAKE_HOST_WIN32) + setx(PRETTIER_EXECUTABLE ${CWD}/node_modules/.bin/prettier.exe) +else() + setx(PRETTIER_EXECUTABLE ${CWD}/node_modules/.bin/prettier) +endif() + +set(PRETTIER_PATHS + ${CWD}/src + ${CWD}/packages/bun-error + ${CWD}/packages/bun-types + ${CWD}/packages/bun-inspector-protocol + ${CWD}/packages/bun-inspector-frontend + ${CWD}/packages/bun-debug-adapter-protocol + ${CWD}/packages/bun-vscode + ${CWD}/test + ${CWD}/bench + ${CWD}/.vscode + ${CWD}/.buildkite + ${CWD}/.github +) + +set(PRETTIER_EXTENSIONS + *.jsonc? + *.ya?ml + *.jsx? + *.tsx? + *.mjs + *.cjs + *.mts + *.cts +) + +set(PRETTIER_GLOBS) +foreach(path ${PRETTIER_PATHS}) + foreach(extension ${PRETTIER_EXTENSIONS}) + list(APPEND PRETTIER_GLOBS ${path}/${extension}) + endforeach() +endforeach() + +file(GLOB_RECURSE PRETTIER_SOURCES ${PRETTIER_GLOBS}) + +register_command( + COMMAND + ${BUN_EXECUTABLE} + install + --frozen-lockfile + SOURCES + ${CWD}/package.json + OUTPUTS + ${PRETTIER_EXECUTABLE} +) + +set(PRETTIER_COMMAND ${PRETTIER_EXECUTABLE} + --config=${CWD}/.prettierrc + --cache +) + +register_command( + TARGET + prettier + COMMENT + "Running prettier" + COMMAND + ${PRETTIER_COMMAND} + --write + ${PRETTIER_SOURCES} + ALWAYS_RUN +) + +register_command( + TARGET + prettier-extra + COMMENT + "Running prettier with extra plugins" + COMMAND + ${PRETTIER_COMMAND} + --write + --plugin=prettier-plugin-organize-imports + ${PRETTIER_SOURCES} + ALWAYS_RUN +) + +register_command( + TARGET + prettier-check + COMMENT + "Checking prettier" + COMMAND + ${PRETTIER_COMMAND} + --check + ${PRETTIER_SOURCES} + ALWAYS_RUN +) + +if(GIT_CHANGED_SOURCES) + set(PRETTIER_CHANGED_SOURCES) + foreach(source ${PRETTIER_SOURCES}) + list(FIND GIT_CHANGED_SOURCES ${source} index) + if(NOT ${index} EQUAL -1) + list(APPEND PRETTIER_CHANGED_SOURCES ${source}) + endif() + endforeach() +endif() + +if(PRETTIER_CHANGED_SOURCES) + set(PRETTIER_DIFF_COMMAND ${PRETTIER_COMMAND} + --write + --plugin=prettier-plugin-organize-imports + ${PRETTIER_CHANGED_SOURCES} + ) +else() + set(PRETTIER_DIFF_COMMAND ${CMAKE_COMMAND} -E echo "No changed files for prettier") +endif() + +register_command( + TARGET + prettier-diff + COMMENT + "Running prettier on changed files" + COMMAND + ${PRETTIER_DIFF_COMMAND} + ALWAYS_RUN +) diff --git a/cmake/analysis/RunZigFormat.cmake b/cmake/analysis/RunZigFormat.cmake new file mode 100644 index 0000000000..0ff7c23865 --- /dev/null +++ b/cmake/analysis/RunZigFormat.cmake @@ -0,0 +1,57 @@ +set(ZIG_FORMAT_SOURCES ${BUN_ZIG_SOURCES}) + +register_command( + TARGET + zig-format-check + COMMENT + "Checking zig fmt" + COMMAND + ${ZIG_EXECUTABLE} + fmt + --check + ${ZIG_FORMAT_SOURCES} + ALWAYS_RUN +) + +register_command( + TARGET + zig-format + COMMENT + "Running zig fmt" + COMMAND + ${ZIG_EXECUTABLE} + fmt + ${ZIG_FORMAT_SOURCES} + ALWAYS_RUN +) + +if(GIT_CHANGED_SOURCES) + set(ZIG_FORMAT_CHANGED_SOURCES) + foreach(source ${ZIG_FORMAT_SOURCES}) + list(FIND GIT_CHANGED_SOURCES ${source} index) + if(NOT ${index} EQUAL -1) + list(APPEND ZIG_FORMAT_CHANGED_SOURCES ${source}) + endif() + endforeach() +endif() + +if(ZIG_FORMAT_CHANGED_SOURCES) + set(ZIG_FORMAT_DIFF_COMMAND ${ZIG_EXECUTABLE} + fmt + ${ZIG_FORMAT_CHANGED_SOURCES} + ) +else() + set(ZIG_FORMAT_DIFF_COMMAND ${CMAKE_COMMAND} -E echo "No changed files for zig-format") +endif() + +register_command( + TARGET + zig-format-diff + COMMENT + "Running zig fmt on changed files" + COMMAND + ${ZIG_FORMAT_DIFF_COMMAND} + CWD + ${BUILD_PATH} + ALWAYS_RUN +) diff --git a/cmake/scripts/DownloadUrl.cmake b/cmake/scripts/DownloadUrl.cmake new file mode 100644 index 0000000000..c8801de005 --- /dev/null +++ b/cmake/scripts/DownloadUrl.cmake @@ -0,0 +1,129 @@ +get_filename_component(SCRIPT_NAME ${CMAKE_CURRENT_LIST_FILE} NAME) +message(STATUS "Running script: ${SCRIPT_NAME}") + +if(NOT DOWNLOAD_URL OR NOT DOWNLOAD_PATH) + message(FATAL_ERROR "DOWNLOAD_URL and DOWNLOAD_PATH are required") +endif() + +if(CMAKE_SYSTEM_NAME STREQUAL "Windows") + set(TMP_PATH $ENV{TEMP}) +else() + set(TMP_PATH $ENV{TMPDIR}) +endif() + +if(NOT TMP_PATH) + set(TMP_PATH ${CMAKE_BINARY_DIR}/tmp) +endif() + +string(REGEX REPLACE "/+$" "" TMP_PATH ${TMP_PATH}) +string(REGEX REPLACE "[^a-zA-Z0-9]" "-" DOWNLOAD_ID ${DOWNLOAD_URL}) +string(RANDOM LENGTH 8 RANDOM_ID) + +set(DOWNLOAD_TMP_PATH ${TMP_PATH}/${DOWNLOAD_ID}-${RANDOM_ID}) +set(DOWNLOAD_TMP_FILE ${DOWNLOAD_TMP_PATH}/tmp) + +file(REMOVE_RECURSE ${DOWNLOAD_TMP_PATH}) + +if(DOWNLOAD_ACCEPT_HEADER) + set(DOWNLOAD_ACCEPT_HEADER "Accept: ${DOWNLOAD_ACCEPT_HEADER}") +else() + set(DOWNLOAD_ACCEPT_HEADER "Accept: */*") +endif() + +foreach(i RANGE 10) + set(DOWNLOAD_TMP_FILE_${i} ${DOWNLOAD_TMP_FILE}.${i}) + + if(i EQUAL 0) + message(STATUS "Downloading ${DOWNLOAD_URL}...") + else() + message(STATUS "Downloading ${DOWNLOAD_URL}... (retry ${i})") + endif() + + file(DOWNLOAD + ${DOWNLOAD_URL} + ${DOWNLOAD_TMP_FILE_${i}} + HTTPHEADER "User-Agent: cmake/${CMAKE_VERSION}" + HTTPHEADER ${DOWNLOAD_ACCEPT_HEADER} + STATUS DOWNLOAD_STATUS + INACTIVITY_TIMEOUT 60 + TIMEOUT 180 + SHOW_PROGRESS + ) + + list(GET DOWNLOAD_STATUS 0 DOWNLOAD_STATUS_CODE) + if(DOWNLOAD_STATUS_CODE EQUAL 0) + if(NOT EXISTS ${DOWNLOAD_TMP_FILE_${i}}) + message(WARNING "Download failed: result is ok, but file does not exist: ${DOWNLOAD_TMP_FILE_${i}}") + continue() + endif() + + file(RENAME ${DOWNLOAD_TMP_FILE_${i}} ${DOWNLOAD_TMP_FILE}) + break() + endif() + + list(GET DOWNLOAD_STATUS 1 DOWNLOAD_STATUS_TEXT) + file(REMOVE ${DOWNLOAD_TMP_FILE_${i}}) + message(WARNING "Download failed: ${DOWNLOAD_STATUS_CODE} ${DOWNLOAD_STATUS_TEXT}") +endforeach() + +if(NOT EXISTS ${DOWNLOAD_TMP_FILE}) + file(REMOVE_RECURSE ${DOWNLOAD_TMP_PATH}) + message(FATAL_ERROR "Download failed after too many attempts: ${DOWNLOAD_URL}") +endif() + +get_filename_component(DOWNLOAD_FILENAME ${DOWNLOAD_URL} NAME) +if(DOWNLOAD_FILENAME MATCHES "\\.(zip|tar|gz|xz)$") + message(STATUS "Extracting ${DOWNLOAD_FILENAME}...") + + set(DOWNLOAD_TMP_EXTRACT ${DOWNLOAD_TMP_PATH}/extract) + file(ARCHIVE_EXTRACT + INPUT ${DOWNLOAD_TMP_FILE} + DESTINATION ${DOWNLOAD_TMP_EXTRACT} + TOUCH + ) + + file(REMOVE ${DOWNLOAD_TMP_FILE}) + + if(DOWNLOAD_FILTERS) + list(TRANSFORM DOWNLOAD_FILTERS PREPEND ${DOWNLOAD_TMP_EXTRACT}/ OUTPUT_VARIABLE DOWNLOAD_GLOBS) + else() + set(DOWNLOAD_GLOBS ${DOWNLOAD_TMP_EXTRACT}/*) + endif() + + file(GLOB DOWNLOAD_TMP_EXTRACT_PATHS LIST_DIRECTORIES ON ${DOWNLOAD_GLOBS}) + list(LENGTH DOWNLOAD_TMP_EXTRACT_PATHS DOWNLOAD_COUNT) + + if(DOWNLOAD_COUNT EQUAL 0) + file(REMOVE_RECURSE ${DOWNLOAD_TMP_PATH}) + + if(DOWNLOAD_FILTERS) + message(FATAL_ERROR "Extract failed: No files found matching ${DOWNLOAD_FILTERS}") + else() + message(FATAL_ERROR "Extract failed: No files found") + endif() + endif() + + if(DOWNLOAD_FILTERS) + set(DOWNLOAD_TMP_FILE ${DOWNLOAD_TMP_EXTRACT_PATHS}) + elseif(DOWNLOAD_COUNT EQUAL 1) + list(GET DOWNLOAD_TMP_EXTRACT_PATHS 0 DOWNLOAD_TMP_FILE) + get_filename_component(DOWNLOAD_FILENAME ${DOWNLOAD_TMP_FILE} NAME) + message(STATUS "Hoisting ${DOWNLOAD_FILENAME}...") + else() + set(DOWNLOAD_TMP_FILE ${DOWNLOAD_TMP_EXTRACT}) + endif() +endif() + +if(DOWNLOAD_FILTERS) + foreach(file ${DOWNLOAD_TMP_FILE}) + file(RENAME ${file} ${DOWNLOAD_PATH}) + endforeach() +else() + file(REMOVE_RECURSE ${DOWNLOAD_PATH}) + get_filename_component(DOWNLOAD_PARENT_PATH ${DOWNLOAD_PATH} DIRECTORY) + file(MAKE_DIRECTORY ${DOWNLOAD_PARENT_PATH}) + file(RENAME ${DOWNLOAD_TMP_FILE} ${DOWNLOAD_PATH}) +endif() + +file(REMOVE_RECURSE ${DOWNLOAD_TMP_PATH}) +message(STATUS "Saved ${DOWNLOAD_PATH}") diff --git a/cmake/scripts/DownloadZig.cmake b/cmake/scripts/DownloadZig.cmake new file mode 100644 index 0000000000..f7f9d8789e --- /dev/null +++ b/cmake/scripts/DownloadZig.cmake @@ -0,0 +1,96 @@ +get_filename_component(SCRIPT_NAME ${CMAKE_CURRENT_LIST_FILE} NAME) +message(STATUS "Running script: ${SCRIPT_NAME}") + +if(NOT ZIG_PATH OR NOT ZIG_COMMIT OR NOT ZIG_VERSION) + message(FATAL_ERROR "ZIG_PATH, ZIG_COMMIT, and ZIG_VERSION are required") +endif() + +if(CMAKE_HOST_APPLE) + set(ZIG_OS "macos") +elseif(CMAKE_HOST_WIN32) + set(ZIG_OS "windows") +elseif(CMAKE_HOST_UNIX) + set(ZIG_OS "linux") +else() + message(FATAL_ERROR "Unsupported operating system: ${CMAKE_HOST_SYSTEM_NAME}") +endif() + +# In script mode, using -P, this variable is not set +if(NOT DEFINED CMAKE_HOST_SYSTEM_PROCESSOR) + cmake_host_system_information(RESULT CMAKE_HOST_SYSTEM_PROCESSOR QUERY OS_PLATFORM) +endif() + +if(CMAKE_HOST_SYSTEM_PROCESSOR MATCHES "arm64|ARM64|aarch64|AARCH64") + set(ZIG_ARCH "aarch64") +elseif(CMAKE_HOST_SYSTEM_PROCESSOR MATCHES "amd64|AMD64|x86_64|X86_64|x64|X64") + set(ZIG_ARCH "x86_64") +else() + message(FATAL_ERROR "Unsupported architecture: ${CMAKE_HOST_SYSTEM_PROCESSOR}") +endif() + +set(ZIG_NAME zig-${ZIG_OS}-${ZIG_ARCH}-${ZIG_VERSION}) + +if(CMAKE_HOST_WIN32) + set(ZIG_EXE "zig.exe") + set(ZIG_FILENAME ${ZIG_NAME}.zip) +else() + set(ZIG_EXE "zig") + set(ZIG_FILENAME ${ZIG_NAME}.tar.xz) +endif() + +set(ZIG_DOWNLOAD_URL https://ziglang.org/download/${ZIG_VERSION}/${ZIG_FILENAME}) + +execute_process( + COMMAND + ${CMAKE_COMMAND} + -DDOWNLOAD_URL=${ZIG_DOWNLOAD_URL} + -DDOWNLOAD_PATH=${ZIG_PATH} + -P ${CMAKE_CURRENT_LIST_DIR}/DownloadUrl.cmake + ERROR_STRIP_TRAILING_WHITESPACE + ERROR_VARIABLE + ZIG_DOWNLOAD_ERROR + RESULT_VARIABLE + ZIG_DOWNLOAD_RESULT +) + +if(NOT ZIG_DOWNLOAD_RESULT EQUAL 0) + message(FATAL_ERROR "Download failed: ${ZIG_DOWNLOAD_ERROR}") +endif() + +if(NOT EXISTS ${ZIG_PATH}/${ZIG_EXE}) + message(FATAL_ERROR "Executable not found: \"${ZIG_PATH}/${ZIG_EXE}\"") +endif() + +# Tools like VSCode need a stable path to the zig executable, on both Unix and Windows +# To workaround this, we create a `bun.exe` symlink on Unix. +if(NOT WIN32) + file(CREATE_LINK ${ZIG_PATH}/${ZIG_EXE} ${ZIG_PATH}/zig.exe SYMBOLIC) +endif() + +set(ZIG_REPOSITORY_PATH ${ZIG_PATH}/repository) + +execute_process( + COMMAND + ${CMAKE_COMMAND} + -DGIT_PATH=${ZIG_REPOSITORY_PATH} + -DGIT_REPOSITORY=oven-sh/zig + -DGIT_COMMIT=${ZIG_COMMIT} + -P ${CMAKE_CURRENT_LIST_DIR}/GitClone.cmake + ERROR_STRIP_TRAILING_WHITESPACE + ERROR_VARIABLE + ZIG_REPOSITORY_ERROR + RESULT_VARIABLE + ZIG_REPOSITORY_RESULT +) + +if(NOT ZIG_REPOSITORY_RESULT EQUAL 0) + message(FATAL_ERROR "Download failed: ${ZIG_REPOSITORY_ERROR}") +endif() + +file(REMOVE_RECURSE ${ZIG_PATH}/lib) + +# Use copy_directory instead of file(RENAME) because there were +# race conditions in CI where some files were not copied. +execute_process(COMMAND ${CMAKE_COMMAND} -E copy_directory ${ZIG_REPOSITORY_PATH}/lib ${ZIG_PATH}/lib) + +file(REMOVE_RECURSE ${ZIG_REPOSITORY_PATH}) diff --git a/cmake/scripts/GitClone.cmake b/cmake/scripts/GitClone.cmake new file mode 100644 index 0000000000..d02f0228b3 --- /dev/null +++ b/cmake/scripts/GitClone.cmake @@ -0,0 +1,85 @@ +get_filename_component(SCRIPT_NAME ${CMAKE_CURRENT_LIST_FILE} NAME) +message(STATUS "Running script: ${SCRIPT_NAME}") + +if(NOT GIT_PATH OR NOT GIT_REPOSITORY) + message(FATAL_ERROR "GIT_PATH and GIT_REPOSITORY are required") +endif() + +if(GIT_COMMIT) + set(GIT_REF ${GIT_COMMIT}) +elseif(GIT_TAG) + set(GIT_REF refs/tags/${GIT_TAG}) +elseif(GIT_BRANCH) + set(GIT_REF refs/heads/${GIT_BRANCH}) +else() + message(FATAL_ERROR "GIT_COMMIT, GIT_TAG, or GIT_BRANCH are required") +endif() + +string(REGEX MATCH "([^/]+)$" GIT_ORIGINAL_NAME ${GIT_REPOSITORY}) +if(NOT GIT_NAME) + set(GIT_NAME ${GIT_ORIGINAL_NAME}) +endif() + +set(GIT_DOWNLOAD_URL https://github.com/${GIT_REPOSITORY}/archive/${GIT_REF}.tar.gz) + +message(STATUS "Cloning ${GIT_REPOSITORY} at ${GIT_REF}...") +execute_process( + COMMAND + ${CMAKE_COMMAND} + -DDOWNLOAD_URL=${GIT_DOWNLOAD_URL} + -DDOWNLOAD_PATH=${GIT_PATH} + -DDOWNLOAD_FILTERS=${GIT_FILTERS} + -P ${CMAKE_CURRENT_LIST_DIR}/DownloadUrl.cmake + ERROR_STRIP_TRAILING_WHITESPACE + ERROR_VARIABLE + GIT_ERROR + RESULT_VARIABLE + GIT_RESULT +) + +if(NOT GIT_RESULT EQUAL 0) + message(FATAL_ERROR "Clone failed: ${GIT_ERROR}") +endif() + +file(GLOB_RECURSE GIT_PATCH_PATHS ${CMAKE_SOURCE_DIR}/patches/${GIT_NAME}/*) +list(LENGTH GIT_PATCH_PATHS GIT_PATCH_COUNT) + +if(GIT_PATCH_COUNT GREATER 0) + find_program(GIT_PROGRAM git REQUIRED) + + foreach(GIT_PATCH ${GIT_PATCH_PATHS}) + get_filename_component(GIT_PATCH_NAME ${GIT_PATCH} NAME) + + if(GIT_PATCH_NAME MATCHES "\\.patch$") + message(STATUS "Applying patch ${GIT_PATCH_NAME}...") + execute_process( + COMMAND + ${GIT_PROGRAM} + apply + --ignore-whitespace + --ignore-space-change + --no-index + --verbose + ${GIT_PATCH} + WORKING_DIRECTORY + ${GIT_PATH} + ERROR_STRIP_TRAILING_WHITESPACE + ERROR_VARIABLE + GIT_PATCH_ERROR + RESULT_VARIABLE + GIT_PATCH_RESULT + ) + + if(NOT GIT_PATCH_RESULT EQUAL 0 AND NOT GIT_PATCH_ERROR MATCHES "cleanly") + file(REMOVE_RECURSE ${GIT_PATH}) + message(FATAL_ERROR "Failed to apply patch: ${GIT_PATCH_ERROR}") + endif() + else() + message(STATUS "Copying file ${GIT_PATCH_NAME}...") + file(COPY ${GIT_PATCH} DESTINATION ${GIT_PATH}) + endif() + endforeach() +endif() + +file(WRITE ${GIT_PATH}/.ref ${GIT_REF}) +message(STATUS "Cloned ${GIT_REPOSITORY}") diff --git a/cmake/targets/BuildBoringSSL.cmake b/cmake/targets/BuildBoringSSL.cmake new file mode 100644 index 0000000000..28575eb35f --- /dev/null +++ b/cmake/targets/BuildBoringSSL.cmake @@ -0,0 +1,21 @@ +register_repository( + NAME + boringssl + REPOSITORY + oven-sh/boringssl + COMMIT + 29a2cd359458c9384694b75456026e4b57e3e567 +) + +register_cmake_command( + TARGET + boringssl + LIBRARIES + crypto + ssl + decrepit + ARGS + -DBUILD_SHARED_LIBS=OFF + INCLUDES + include +) diff --git a/cmake/targets/BuildBrotli.cmake b/cmake/targets/BuildBrotli.cmake new file mode 100644 index 0000000000..f9bc8d9601 --- /dev/null +++ b/cmake/targets/BuildBrotli.cmake @@ -0,0 +1,31 @@ +register_repository( + NAME + brotli + REPOSITORY + google/brotli + TAG + v1.1.0 +) + +# Tests fail with "BrotliDecompressionError" when LTO is enabled +# only on Linux x64 (non-baseline). It's a mystery. +if(LINUX AND CMAKE_SYSTEM_PROCESSOR MATCHES "x86_64|X86_64|x64|X64|amd64|AMD64" AND NOT ENABLE_BASELINE) + set(BROTLI_CMAKE_ARGS "-DCMAKE_C_FLAGS=-fno-lto") +endif() + +register_cmake_command( + TARGET + brotli + LIBRARIES + brotlicommon + brotlidec + brotlienc + ARGS + -DBUILD_SHARED_LIBS=OFF + -DBROTLI_BUILD_TOOLS=OFF + -DBROTLI_EMSCRIPTEN=OFF + -DBROTLI_DISABLE_TESTS=ON + ${BROTLI_CMAKE_ARGS} + INCLUDES + c/include +) diff --git a/cmake/targets/BuildBun.cmake b/cmake/targets/BuildBun.cmake new file mode 100644 index 0000000000..ec513d84aa --- /dev/null +++ b/cmake/targets/BuildBun.cmake @@ -0,0 +1,1235 @@ +if(DEBUG) + set(bun bun-debug) +elseif(ENABLE_SMOL) + set(bun bun-smol-profile) + set(bunStrip bun-smol) +elseif(ENABLE_VALGRIND) + set(bun bun-valgrind) +elseif(ENABLE_ASSERTIONS) + set(bun bun-assertions) +else() + set(bun bun-profile) + set(bunStrip bun) +endif() + +set(bunExe ${bun}${CMAKE_EXECUTABLE_SUFFIX}) + +if(bunStrip) + set(bunStripExe ${bunStrip}${CMAKE_EXECUTABLE_SUFFIX}) + set(buns ${bun} ${bunStrip}) +else() + set(buns ${bun}) +endif() + +optionx(CODEGEN_PATH FILEPATH "Path to the codegen directory" DEFAULT ${BUILD_PATH}/codegen) + +if(RELEASE OR CI) + set(DEFAULT_CODEGEN_EMBED ON) +else() + set(DEFAULT_CODEGEN_EMBED OFF) +endif() + +optionx(CODEGEN_EMBED BOOL "If codegen files should be embedded in the binary" DEFAULT ${DEFAULT_CODEGEN_EMBED}) + +if((NOT DEFINED CONFIGURE_DEPENDS AND NOT CI) OR CONFIGURE_DEPENDS) + set(CONFIGURE_DEPENDS "CONFIGURE_DEPENDS") +else() + set(CONFIGURE_DEPENDS "") +endif() + +# --- Codegen --- + +set(BUN_ERROR_SOURCE ${CWD}/packages/bun-error) + +file(GLOB BUN_ERROR_SOURCES ${CONFIGURE_DEPENDS} + ${BUN_ERROR_SOURCE}/*.json + ${BUN_ERROR_SOURCE}/*.ts + ${BUN_ERROR_SOURCE}/*.tsx + ${BUN_ERROR_SOURCE}/*.css + ${BUN_ERROR_SOURCE}/img/* +) + +set(BUN_ERROR_OUTPUT ${CODEGEN_PATH}/bun-error) +set(BUN_ERROR_OUTPUTS + ${BUN_ERROR_OUTPUT}/index.js + ${BUN_ERROR_OUTPUT}/bun-error.css +) + +register_bun_install( + CWD + ${BUN_ERROR_SOURCE} + NODE_MODULES_VARIABLE + BUN_ERROR_NODE_MODULES +) + +register_command( + TARGET + bun-error + COMMENT + "Building bun-error" + CWD + ${BUN_ERROR_SOURCE} + COMMAND + ${ESBUILD_EXECUTABLE} ${ESBUILD_ARGS} + index.tsx + bun-error.css + --outdir=${BUN_ERROR_OUTPUT} + --define:process.env.NODE_ENV=\"'production'\" + --minify + --bundle + --platform=browser + --format=esm + SOURCES + ${BUN_ERROR_SOURCES} + ${BUN_ERROR_NODE_MODULES} + OUTPUTS + ${BUN_ERROR_OUTPUTS} +) + +set(BUN_FALLBACK_DECODER_SOURCE ${CWD}/src/fallback.ts) +set(BUN_FALLBACK_DECODER_OUTPUT ${CODEGEN_PATH}/fallback-decoder.js) + +register_command( + TARGET + bun-fallback-decoder + COMMENT + "Building fallback-decoder.js" + COMMAND + ${ESBUILD_EXECUTABLE} ${ESBUILD_ARGS} + ${BUN_FALLBACK_DECODER_SOURCE} + --outfile=${BUN_FALLBACK_DECODER_OUTPUT} + --target=esnext + --bundle + --format=iife + --platform=browser + --minify + SOURCES + ${BUN_FALLBACK_DECODER_SOURCE} + OUTPUTS + ${BUN_FALLBACK_DECODER_OUTPUT} +) + +set(BUN_RUNTIME_JS_SOURCE ${CWD}/src/runtime.bun.js) +set(BUN_RUNTIME_JS_OUTPUT ${CODEGEN_PATH}/runtime.out.js) + +register_command( + TARGET + bun-runtime-js + COMMENT + "Building src/runtime.out.js" + COMMAND + ${ESBUILD_EXECUTABLE} ${ESBUILD_ARGS} + ${BUN_RUNTIME_JS_SOURCE} + --outfile=${BUN_RUNTIME_JS_OUTPUT} + --define:process.env.NODE_ENV=\"'production'\" + --target=esnext + --bundle + --format=esm + --platform=node + --minify + --external:/bun:* + SOURCES + ${BUN_RUNTIME_JS_SOURCE} + OUTPUTS + ${BUN_RUNTIME_JS_OUTPUT} +) + +set(BUN_NODE_FALLBACKS_SOURCE ${CWD}/src/node-fallbacks) + +file(GLOB BUN_NODE_FALLBACKS_SOURCES ${CONFIGURE_DEPENDS} + ${BUN_NODE_FALLBACKS_SOURCE}/*.js +) + +set(BUN_NODE_FALLBACKS_OUTPUT ${CODEGEN_PATH}/node-fallbacks) +set(BUN_NODE_FALLBACKS_OUTPUTS) +foreach(source ${BUN_NODE_FALLBACKS_SOURCES}) + get_filename_component(filename ${source} NAME) + list(APPEND BUN_NODE_FALLBACKS_OUTPUTS ${BUN_NODE_FALLBACKS_OUTPUT}/${filename}) +endforeach() + +register_bun_install( + CWD + ${BUN_NODE_FALLBACKS_SOURCE} + NODE_MODULES_VARIABLE + BUN_NODE_FALLBACKS_NODE_MODULES +) + +# This command relies on an older version of `esbuild`, which is why +# it uses ${BUN_EXECUTABLE} x instead of ${ESBUILD_EXECUTABLE}. +register_command( + TARGET + bun-node-fallbacks + COMMENT + "Building node-fallbacks/*.js" + CWD + ${BUN_NODE_FALLBACKS_SOURCE} + COMMAND + ${BUN_EXECUTABLE} x + esbuild ${ESBUILD_ARGS} + ${BUN_NODE_FALLBACKS_SOURCES} + --outdir=${BUN_NODE_FALLBACKS_OUTPUT} + --format=esm + --minify + --bundle + --platform=browser + SOURCES + ${BUN_NODE_FALLBACKS_SOURCES} + ${BUN_NODE_FALLBACKS_NODE_MODULES} + OUTPUTS + ${BUN_NODE_FALLBACKS_OUTPUTS} +) + +set(BUN_ERROR_CODE_SCRIPT ${CWD}/src/codegen/generate-node-errors.ts) + +set(BUN_ERROR_CODE_SOURCES + ${BUN_ERROR_CODE_SCRIPT} + ${CWD}/src/bun.js/bindings/ErrorCode.ts + ${CWD}/src/bun.js/bindings/ErrorCode.cpp + ${CWD}/src/bun.js/bindings/ErrorCode.h +) + +set(BUN_ERROR_CODE_OUTPUTS + ${CODEGEN_PATH}/ErrorCode+List.h + ${CODEGEN_PATH}/ErrorCode+Data.h + ${CODEGEN_PATH}/ErrorCode.zig +) + +register_command( + TARGET + bun-error-code + COMMENT + "Generating ErrorCode.{zig,h}" + COMMAND + ${BUN_EXECUTABLE} + run + ${BUN_ERROR_CODE_SCRIPT} + ${CODEGEN_PATH} + SOURCES + ${BUN_ERROR_CODE_SOURCES} + OUTPUTS + ${BUN_ERROR_CODE_OUTPUTS} +) + +set(BUN_ZIG_GENERATED_CLASSES_SCRIPT ${CWD}/src/codegen/generate-classes.ts) + +file(GLOB BUN_ZIG_GENERATED_CLASSES_SOURCES ${CONFIGURE_DEPENDS} + ${CWD}/src/bun.js/*.classes.ts + ${CWD}/src/bun.js/api/*.classes.ts + ${CWD}/src/bun.js/node/*.classes.ts + ${CWD}/src/bun.js/test/*.classes.ts + ${CWD}/src/bun.js/webcore/*.classes.ts +) + +set(BUN_ZIG_GENERATED_CLASSES_OUTPUTS + ${CODEGEN_PATH}/ZigGeneratedClasses.h + ${CODEGEN_PATH}/ZigGeneratedClasses.cpp + ${CODEGEN_PATH}/ZigGeneratedClasses+lazyStructureHeader.h + ${CODEGEN_PATH}/ZigGeneratedClasses+DOMClientIsoSubspaces.h + ${CODEGEN_PATH}/ZigGeneratedClasses+DOMIsoSubspaces.h + ${CODEGEN_PATH}/ZigGeneratedClasses+lazyStructureImpl.h + ${CODEGEN_PATH}/ZigGeneratedClasses.zig +) + +register_command( + TARGET + bun-zig-generated-classes + COMMENT + "Generating ZigGeneratedClasses.{zig,cpp,h}" + COMMAND + ${BUN_EXECUTABLE} + run + ${BUN_ZIG_GENERATED_CLASSES_SCRIPT} + ${BUN_ZIG_GENERATED_CLASSES_SOURCES} + ${CODEGEN_PATH} + SOURCES + ${BUN_ZIG_GENERATED_CLASSES_SCRIPT} + ${BUN_ZIG_GENERATED_CLASSES_SOURCES} + OUTPUTS + ${BUN_ZIG_GENERATED_CLASSES_OUTPUTS} +) + +set(BUN_JAVASCRIPT_CODEGEN_SCRIPT ${CWD}/src/codegen/bundle-modules.ts) + +file(GLOB_RECURSE BUN_JAVASCRIPT_SOURCES ${CONFIGURE_DEPENDS} + ${CWD}/src/js/*.js + ${CWD}/src/js/*.ts +) + +file(GLOB BUN_JAVASCRIPT_CODEGEN_SOURCES ${CONFIGURE_DEPENDS} + ${CWD}/src/codegen/*.ts +) + +list(APPEND BUN_JAVASCRIPT_CODEGEN_SOURCES + ${CWD}/src/bun.js/bindings/InternalModuleRegistry.cpp +) + +set(BUN_JAVASCRIPT_OUTPUTS + ${CODEGEN_PATH}/WebCoreJSBuiltins.cpp + ${CODEGEN_PATH}/WebCoreJSBuiltins.h + ${CODEGEN_PATH}/InternalModuleRegistryConstants.h + ${CODEGEN_PATH}/InternalModuleRegistry+createInternalModuleById.h + ${CODEGEN_PATH}/InternalModuleRegistry+enum.h + ${CODEGEN_PATH}/InternalModuleRegistry+numberOfModules.h + ${CODEGEN_PATH}/NativeModuleImpl.h + ${CODEGEN_PATH}/ResolvedSourceTag.zig + ${CODEGEN_PATH}/SyntheticModuleType.h + ${CODEGEN_PATH}/GeneratedJS2Native.h + # Zig will complain if files are outside of the source directory + ${CWD}/src/bun.js/bindings/GeneratedJS2Native.zig +) + +register_command( + TARGET + bun-js-modules + COMMENT + "Generating JavaScript modules" + COMMAND + ${BUN_EXECUTABLE} + run + ${BUN_JAVASCRIPT_CODEGEN_SCRIPT} + --debug=${DEBUG} + ${BUILD_PATH} + SOURCES + ${BUN_JAVASCRIPT_SOURCES} + ${BUN_JAVASCRIPT_CODEGEN_SOURCES} + ${BUN_JAVASCRIPT_CODEGEN_SCRIPT} + OUTPUTS + ${BUN_JAVASCRIPT_OUTPUTS} +) + +set(BUN_BAKE_RUNTIME_CODEGEN_SCRIPT ${CWD}/src/codegen/bake-codegen.ts) + +file(GLOB_RECURSE BUN_BAKE_RUNTIME_SOURCES ${CONFIGURE_DEPENDS} + ${CWD}/src/bake/*.ts + ${CWD}/src/bake/*/*.ts + ${CWD}/src/bake/*/*.css +) + +list(APPEND BUN_BAKE_RUNTIME_CODEGEN_SOURCES + ${CWD}/src/bun.js/bindings/InternalModuleRegistry.cpp +) + +set(BUN_BAKE_RUNTIME_OUTPUTS + ${CODEGEN_PATH}/bake.client.js + ${CODEGEN_PATH}/bake.server.js +) + +register_command( + TARGET + bun-bake-codegen + COMMENT + "Bundling Kit Runtime" + COMMAND + ${BUN_EXECUTABLE} + run + ${BUN_BAKE_RUNTIME_CODEGEN_SCRIPT} + --debug=${DEBUG} + --codegen_root=${CODEGEN_PATH} + SOURCES + ${BUN_BAKE_RUNTIME_SOURCES} + ${BUN_BAKE_RUNTIME_CODEGEN_SOURCES} + ${BUN_BAKE_RUNTIME_CODEGEN_SCRIPT} + OUTPUTS + ${CODEGEN_PATH}/bake_empty_file + ${BUN_BAKE_RUNTIME_OUTPUTS} +) + +set(BUN_JS_SINK_SCRIPT ${CWD}/src/codegen/generate-jssink.ts) + +set(BUN_JS_SINK_SOURCES + ${BUN_JS_SINK_SCRIPT} + ${CWD}/src/codegen/create-hash-table.ts +) + +set(BUN_JS_SINK_OUTPUTS + ${CODEGEN_PATH}/JSSink.cpp + ${CODEGEN_PATH}/JSSink.h + ${CODEGEN_PATH}/JSSink.lut.h +) + +register_command( + TARGET + bun-js-sink + COMMENT + "Generating JSSink.{cpp,h}" + COMMAND + ${BUN_EXECUTABLE} + run + ${BUN_JS_SINK_SCRIPT} + ${CODEGEN_PATH} + SOURCES + ${BUN_JS_SINK_SOURCES} + OUTPUTS + ${BUN_JS_SINK_OUTPUTS} +) + +set(BUN_OBJECT_LUT_SCRIPT ${CWD}/src/codegen/create-hash-table.ts) + +set(BUN_OBJECT_LUT_SOURCES + ${CWD}/src/bun.js/bindings/BunObject.cpp + ${CWD}/src/bun.js/bindings/ZigGlobalObject.lut.txt + ${CWD}/src/bun.js/bindings/JSBuffer.cpp + ${CWD}/src/bun.js/bindings/BunProcess.cpp + ${CWD}/src/bun.js/bindings/ProcessBindingConstants.cpp + ${CWD}/src/bun.js/bindings/ProcessBindingNatives.cpp + ${CWD}/src/bun.js/modules/NodeModuleModule.cpp +) + +set(BUN_OBJECT_LUT_OUTPUTS + ${CODEGEN_PATH}/BunObject.lut.h + ${CODEGEN_PATH}/ZigGlobalObject.lut.h + ${CODEGEN_PATH}/JSBuffer.lut.h + ${CODEGEN_PATH}/BunProcess.lut.h + ${CODEGEN_PATH}/ProcessBindingConstants.lut.h + ${CODEGEN_PATH}/ProcessBindingNatives.lut.h + ${CODEGEN_PATH}/NodeModuleModule.lut.h +) + + +macro(WEBKIT_ADD_SOURCE_DEPENDENCIES _source _deps) + set(_tmp) + get_source_file_property(_tmp ${_source} OBJECT_DEPENDS) + + if(NOT _tmp) + set(_tmp "") + endif() + + foreach(f ${_deps}) + list(APPEND _tmp "${f}") + endforeach() + + set_source_files_properties(${_source} PROPERTIES OBJECT_DEPENDS "${_tmp}") + unset(_tmp) +endmacro() + +list(LENGTH BUN_OBJECT_LUT_SOURCES BUN_OBJECT_LUT_SOURCES_COUNT) +math(EXPR BUN_OBJECT_LUT_SOURCES_MAX_INDEX "${BUN_OBJECT_LUT_SOURCES_COUNT} - 1") + +foreach(i RANGE 0 ${BUN_OBJECT_LUT_SOURCES_MAX_INDEX}) + list(GET BUN_OBJECT_LUT_SOURCES ${i} BUN_OBJECT_LUT_SOURCE) + list(GET BUN_OBJECT_LUT_OUTPUTS ${i} BUN_OBJECT_LUT_OUTPUT) + + get_filename_component(filename ${BUN_OBJECT_LUT_SOURCE} NAME_WE) + register_command( + TARGET + bun-codegen-lut-${filename} + COMMENT + "Generating ${filename}.lut.h" + COMMAND + ${BUN_EXECUTABLE} + run + ${BUN_OBJECT_LUT_SCRIPT} + ${BUN_OBJECT_LUT_SOURCE} + ${BUN_OBJECT_LUT_OUTPUT} + SOURCES + ${BUN_OBJECT_LUT_SCRIPT} + ${BUN_OBJECT_LUT_SOURCE} + OUTPUTS + ${BUN_OBJECT_LUT_OUTPUT} + ) + + WEBKIT_ADD_SOURCE_DEPENDENCIES(${BUN_OBJECT_LUT_SOURCE} ${BUN_OBJECT_LUT_OUTPUT}) +endforeach() + +WEBKIT_ADD_SOURCE_DEPENDENCIES( + ${CWD}/src/bun.js/bindings/ErrorCode.cpp + ${CODEGEN_PATH}/ErrorCode+List.h +) + +WEBKIT_ADD_SOURCE_DEPENDENCIES( + ${CWD}/src/bun.js/bindings/ErrorCode.h + ${CODEGEN_PATH}/ErrorCode+Data.h +) + +WEBKIT_ADD_SOURCE_DEPENDENCIES( + ${CWD}/src/bun.js/bindings/ZigGlobalObject.cpp + ${CODEGEN_PATH}/ZigGlobalObject.lut.h +) + +WEBKIT_ADD_SOURCE_DEPENDENCIES( + ${CWD}/src/bun.js/bindings/InternalModuleRegistry.cpp + ${CODEGEN_PATH}/InternalModuleRegistryConstants.h +) + +# --- Zig --- + +file(GLOB_RECURSE BUN_ZIG_SOURCES ${CONFIGURE_DEPENDS} + ${CWD}/src/*.zig +) + +list(APPEND BUN_ZIG_SOURCES + ${CWD}/build.zig + ${CWD}/root.zig + ${CWD}/root_wasm.zig +) + +set(BUN_ZIG_GENERATED_SOURCES + ${BUN_ERROR_OUTPUTS} + ${BUN_FALLBACK_DECODER_OUTPUT} + ${BUN_RUNTIME_JS_OUTPUT} + ${BUN_NODE_FALLBACKS_OUTPUTS} + ${BUN_ERROR_CODE_OUTPUTS} + ${BUN_ZIG_GENERATED_CLASSES_OUTPUTS} + ${BUN_JAVASCRIPT_OUTPUTS} +) + +# In debug builds, these are not embedded, but rather referenced at runtime. +if (DEBUG) + list(APPEND BUN_ZIG_GENERATED_SOURCES ${CODEGEN_PATH}/bake_empty_file) +else() + list(APPEND BUN_ZIG_GENERATED_SOURCES ${BUN_BAKE_RUNTIME_OUTPUTS}) +endif() + +set(BUN_ZIG_OUTPUT ${BUILD_PATH}/bun-zig.o) + + +if(CMAKE_SYSTEM_PROCESSOR MATCHES "arm|ARM|arm64|ARM64|aarch64|AARCH64") + set(IS_ARM64 ON) + if(APPLE) + set(ZIG_CPU "apple_m1") + else() + set(ZIG_CPU "native") + endif() +elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "x86_64|X86_64|x64|X64|amd64|AMD64") + set(IS_X86_64 ON) + if(ENABLE_BASELINE) + set(ZIG_CPU "nehalem") + else() + set(ZIG_CPU "haswell") + endif() +else() + unsupported(CMAKE_SYSTEM_PROCESSOR) +endif() + +set(ZIG_FLAGS_BUN) +if(NOT "${REVISION}" STREQUAL "") + set(ZIG_FLAGS_BUN ${ZIG_FLAGS_BUN} -Dsha=${REVISION}) +endif() + +register_command( + TARGET + bun-zig + GROUP + console + COMMENT + "Building src/*.zig for ${ZIG_TARGET}" + COMMAND + ${ZIG_EXECUTABLE} + build obj + ${CMAKE_ZIG_FLAGS} + --prefix ${BUILD_PATH} + -Dobj_format=${ZIG_OBJECT_FORMAT} + -Dtarget=${ZIG_TARGET} + -Doptimize=${ZIG_OPTIMIZE} + -Dcpu=${ZIG_CPU} + -Denable_logs=$,true,false> + -Dversion=${VERSION} + -Dreported_nodejs_version=${NODEJS_VERSION} + -Dcanary=${CANARY_REVISION} + -Dcodegen_path=${CODEGEN_PATH} + -Dcodegen_embed=$,true,false> + --prominent-compile-errors + ${ZIG_FLAGS_BUN} + ARTIFACTS + ${BUN_ZIG_OUTPUT} + TARGETS + clone-zig + SOURCES + ${BUN_ZIG_SOURCES} + ${BUN_ZIG_GENERATED_SOURCES} +) + +set_property(TARGET bun-zig PROPERTY JOB_POOL compile_pool) +set_property(DIRECTORY APPEND PROPERTY CMAKE_CONFIGURE_DEPENDS "build.zig") + +# --- C/C++ Sources --- + +set(BUN_USOCKETS_SOURCE ${CWD}/packages/bun-usockets) + +file(GLOB BUN_CXX_SOURCES ${CONFIGURE_DEPENDS} + ${CWD}/src/io/*.cpp + ${CWD}/src/bun.js/modules/*.cpp + ${CWD}/src/bun.js/bindings/*.cpp + ${CWD}/src/bun.js/bindings/webcore/*.cpp + ${CWD}/src/bun.js/bindings/sqlite/*.cpp + ${CWD}/src/bun.js/bindings/webcrypto/*.cpp + ${CWD}/src/bun.js/bindings/webcrypto/*/*.cpp + ${CWD}/src/bun.js/bindings/v8/*.cpp + ${CWD}/src/bun.js/bindings/v8/shim/*.cpp + ${CWD}/src/bake/*.cpp + ${CWD}/src/deps/*.cpp + ${BUN_USOCKETS_SOURCE}/src/crypto/*.cpp +) + +file(GLOB BUN_C_SOURCES ${CONFIGURE_DEPENDS} + ${BUN_USOCKETS_SOURCE}/src/*.c + ${BUN_USOCKETS_SOURCE}/src/eventing/*.c + ${BUN_USOCKETS_SOURCE}/src/internal/*.c + ${BUN_USOCKETS_SOURCE}/src/crypto/*.c +) + +if(WIN32) + list(APPEND BUN_C_SOURCES ${CWD}/src/bun.js/bindings/windows/musl-memmem.c) +endif() + +register_repository( + NAME + picohttpparser + REPOSITORY + h2o/picohttpparser + COMMIT + 066d2b1e9ab820703db0837a7255d92d30f0c9f5 + OUTPUTS + picohttpparser.c +) + +set(NODEJS_HEADERS_PATH ${VENDOR_PATH}/nodejs) + +register_command( + TARGET + bun-node-headers + COMMENT + "Download node ${NODEJS_VERSION} headers" + COMMAND + ${CMAKE_COMMAND} + -DDOWNLOAD_PATH=${NODEJS_HEADERS_PATH} + -DDOWNLOAD_URL=https://nodejs.org/dist/v${NODEJS_VERSION}/node-v${NODEJS_VERSION}-headers.tar.gz + -P ${CWD}/cmake/scripts/DownloadUrl.cmake + OUTPUTS + ${NODEJS_HEADERS_PATH}/include/node/node_version.h +) + +list(APPEND BUN_CPP_SOURCES + ${BUN_C_SOURCES} + ${BUN_CXX_SOURCES} + ${VENDOR_PATH}/picohttpparser/picohttpparser.c + ${NODEJS_HEADERS_PATH}/include/node/node_version.h + ${BUN_ZIG_GENERATED_CLASSES_OUTPUTS} + ${BUN_JS_SINK_OUTPUTS} + ${BUN_JAVASCRIPT_OUTPUTS} + ${BUN_OBJECT_LUT_OUTPUTS} +) + +if(WIN32) + if(ENABLE_CANARY) + set(Bun_VERSION_WITH_TAG ${VERSION}-canary.${CANARY_REVISION}) + else() + set(Bun_VERSION_WITH_TAG ${VERSION}) + endif() + set(BUN_ICO_PATH ${CWD}/src/bun.ico) + configure_file( + ${CWD}/src/windows-app-info.rc + ${CODEGEN_PATH}/windows-app-info.rc + ) + list(APPEND BUN_CPP_SOURCES ${CODEGEN_PATH}/windows-app-info.rc) +endif() + +# --- Executable --- + +set(BUN_CPP_OUTPUT ${BUILD_PATH}/${CMAKE_STATIC_LIBRARY_PREFIX}${bun}${CMAKE_STATIC_LIBRARY_SUFFIX}) + +if(BUN_LINK_ONLY) + add_executable(${bun} ${BUN_CPP_OUTPUT} ${BUN_ZIG_OUTPUT}) + set_target_properties(${bun} PROPERTIES LINKER_LANGUAGE CXX) + target_link_libraries(${bun} PRIVATE ${BUN_CPP_OUTPUT}) +elseif(BUN_CPP_ONLY) + add_library(${bun} STATIC ${BUN_CPP_SOURCES}) + register_command( + TARGET + ${bun} + TARGET_PHASE + POST_BUILD + COMMENT + "Uploading ${bun}" + COMMAND + ${CMAKE_COMMAND} -E true + ARTIFACTS + ${BUN_CPP_OUTPUT} + ) +else() + add_executable(${bun} ${BUN_CPP_SOURCES}) + target_link_libraries(${bun} PRIVATE ${BUN_ZIG_OUTPUT}) +endif() + +if(NOT bun STREQUAL "bun") + add_custom_target(bun DEPENDS ${bun}) +endif() + +# --- C/C++ Properties --- + +set_target_properties(${bun} PROPERTIES + CXX_STANDARD 20 + CXX_STANDARD_REQUIRED YES + CXX_EXTENSIONS YES + CXX_VISIBILITY_PRESET hidden + C_STANDARD 17 + C_STANDARD_REQUIRED YES + VISIBILITY_INLINES_HIDDEN YES +) + +# --- C/C++ Includes --- + +if(WIN32) + target_include_directories(${bun} PRIVATE ${CWD}/src/bun.js/bindings/windows) +endif() + +target_include_directories(${bun} PRIVATE + ${CWD}/packages + ${CWD}/packages/bun-usockets + ${CWD}/packages/bun-usockets/src + ${CWD}/src/bun.js/bindings + ${CWD}/src/bun.js/bindings/webcore + ${CWD}/src/bun.js/bindings/webcrypto + ${CWD}/src/bun.js/bindings/sqlite + ${CWD}/src/bun.js/bindings/v8 + ${CWD}/src/bun.js/modules + ${CWD}/src/js/builtins + ${CWD}/src/napi + ${CWD}/src/deps + ${CODEGEN_PATH} + ${VENDOR_PATH} + ${VENDOR_PATH}/picohttpparser + ${NODEJS_HEADERS_PATH}/include +) + +if(LINUX) + include(CheckIncludeFiles) + check_include_files("sys/queue.h" HAVE_SYS_QUEUE_H) + if(NOT HAVE_SYS_QUEUE_H) + target_include_directories(${bun} PRIVATE vendor/lshpack/compat/queue) + endif() +endif() + +# --- C/C++ Definitions --- + +if(ENABLE_ASSERTIONS) + target_compile_definitions(${bun} PRIVATE ASSERT_ENABLED=1) +endif() + +if(DEBUG) + target_compile_definitions(${bun} PRIVATE BUN_DEBUG=1) +endif() + +if(APPLE) + target_compile_definitions(${bun} PRIVATE _DARWIN_NON_CANCELABLE=1) +endif() + +if(WIN32) + target_compile_definitions(${bun} PRIVATE + WIN32 + _WINDOWS + WIN32_LEAN_AND_MEAN=1 + _CRT_SECURE_NO_WARNINGS + BORINGSSL_NO_CXX=1 # lol + ) +endif() + +target_compile_definitions(${bun} PRIVATE + _HAS_EXCEPTIONS=0 + LIBUS_USE_OPENSSL=1 + LIBUS_USE_BORINGSSL=1 + WITH_BORINGSSL=1 + STATICALLY_LINKED_WITH_JavaScriptCore=1 + STATICALLY_LINKED_WITH_BMALLOC=1 + BUILDING_WITH_CMAKE=1 + JSC_OBJC_API_ENABLED=0 + BUN_SINGLE_THREADED_PER_VM_ENTRY_SCOPE=1 + NAPI_EXPERIMENTAL=ON + NOMINMAX + IS_BUILD + BUILDING_JSCONLY__ + REPORTED_NODEJS_VERSION=\"${NODEJS_VERSION}\" + REPORTED_NODEJS_ABI_VERSION=${NODEJS_ABI_VERSION} +) + +if(DEBUG AND NOT CI) + target_compile_definitions(${bun} PRIVATE + BUN_DYNAMIC_JS_LOAD_PATH=\"${BUILD_PATH}/js\" + ) +endif() + + +# --- Compiler options --- + +if(NOT WIN32) + target_compile_options(${bun} PUBLIC + -fconstexpr-steps=2542484 + -fconstexpr-depth=54 + -fno-pic + -fno-pie + -faddrsig + ) + if(DEBUG) + # TODO: this shouldn't be necessary long term + if (NOT IS_MUSL) + set(ABI_PUBLIC_FLAGS + -fsanitize=null + -fsanitize-recover=all + -fsanitize=bounds + -fsanitize=return + -fsanitize=nullability-arg + -fsanitize=nullability-assign + -fsanitize=nullability-return + -fsanitize=returns-nonnull-attribute + -fsanitize=unreachable + ) + set(ABI_PRIVATE_FLAGS + -fsanitize=null + ) + else() + set(ABI_PUBLIC_FLAGS + ) + set(ABI_PRIVATE_FLAGS + ) + endif() + + target_compile_options(${bun} PUBLIC + -Werror=return-type + -Werror=return-stack-address + -Werror=implicit-function-declaration + -Werror=uninitialized + -Werror=conditional-uninitialized + -Werror=suspicious-memaccess + -Werror=int-conversion + -Werror=nonnull + -Werror=move + -Werror=sometimes-uninitialized + -Werror=unused + -Wno-unused-function + -Wno-nullability-completeness + -Werror + ${ABI_PUBLIC_FLAGS} + ) + target_link_libraries(${bun} PRIVATE + ${ABI_PRIVATE_FLAGS} + ) + else() + # Leave -Werror=unused off in release builds so we avoid errors from being used in ASSERT + target_compile_options(${bun} PUBLIC ${LTO_FLAG} + -Werror=return-type + -Werror=return-stack-address + -Werror=implicit-function-declaration + -Werror=uninitialized + -Werror=conditional-uninitialized + -Werror=suspicious-memaccess + -Werror=int-conversion + -Werror=nonnull + -Werror=move + -Werror=sometimes-uninitialized + -Wno-nullability-completeness + -Werror + ) + endif() +endif() + +# --- Linker options --- + +if(WIN32) + target_link_options(${bun} PUBLIC + /STACK:0x1200000,0x100000 + /errorlimit:0 + ) + if(RELEASE) + target_link_options(${bun} PUBLIC + /LTCG + /OPT:REF + /OPT:NOICF + /DEBUG:FULL + /delayload:ole32.dll + /delayload:WINMM.dll + /delayload:dbghelp.dll + /delayload:VCRUNTIME140_1.dll + # libuv loads these two immediately, but for some reason it seems to still be slightly faster to delayload them + /delayload:WS2_32.dll + /delayload:WSOCK32.dll + /delayload:ADVAPI32.dll + /delayload:IPHLPAPI.dll + ) + endif() +elseif(APPLE) + target_link_options(${bun} PUBLIC + -dead_strip + -dead_strip_dylibs + -Wl,-stack_size,0x1200000 + -fno-keep-static-consts + -Wl,-map,${bun}.linker-map + ) +else() + # Try to use lld-16 if available, otherwise fallback to lld + # Cache it so we don't have to re-run CMake to pick it up + if((NOT DEFINED LLD_NAME) AND (NOT CI OR BUN_LINK_ONLY)) + find_program(LLD_EXECUTABLE_NAME lld-${LLVM_VERSION_MAJOR}) + + if(NOT LLD_EXECUTABLE_NAME) + if(CI) + # Ensure we don't use a differing version of lld in CI vs clang + message(FATAL_ERROR "lld-${LLVM_VERSION_MAJOR} not found. Please make sure you have LLVM ${LLVM_VERSION_MAJOR}.x installed and set to lld-${LLVM_VERSION_MAJOR}") + endif() + + # To make it easier for contributors, allow differing versions of lld vs clang/cmake + find_program(LLD_EXECUTABLE_NAME lld) + endif() + + if(NOT LLD_EXECUTABLE_NAME) + message(FATAL_ERROR "LLD not found. Please make sure you have LLVM ${LLVM_VERSION_MAJOR}.x installed and lld is available in your PATH as lld-${LLVM_VERSION_MAJOR}") + endif() + + # normalize to basename so it can be used with -fuse-ld + get_filename_component(LLD_NAME ${LLD_EXECUTABLE_NAME} NAME CACHE) + message(STATUS "Using linker: ${LLD_NAME} (${LLD_EXECUTABLE_NAME})") + elseif(NOT DEFINED LLD_NAME) + set(LLD_NAME lld-${LLVM_VERSION_MAJOR}) + endif() + + if (NOT IS_MUSL) + if (IS_ARM64) + set(ARCH_WRAP_FLAGS + -Wl,--wrap=fcntl64 + -Wl,--wrap=statx + ) + elseif(IS_X86_64) + set(ARCH_WRAP_FLAGS + -Wl,--wrap=fcntl + -Wl,--wrap=fcntl64 + -Wl,--wrap=fstat + -Wl,--wrap=fstat64 + -Wl,--wrap=fstatat + -Wl,--wrap=fstatat64 + -Wl,--wrap=lstat + -Wl,--wrap=lstat64 + -Wl,--wrap=mknod + -Wl,--wrap=mknodat + -Wl,--wrap=stat + -Wl,--wrap=stat64 + -Wl,--wrap=statx + ) + endif() + else() + set(ARCH_WRAP_FLAGS + ) + endif() + + if (NOT IS_MUSL) + set(ABI_WRAP_FLAGS + -Wl,--wrap=cosf + -Wl,--wrap=exp + -Wl,--wrap=expf + -Wl,--wrap=fmod + -Wl,--wrap=fmodf + -Wl,--wrap=log + -Wl,--wrap=log10f + -Wl,--wrap=log2 + -Wl,--wrap=log2f + -Wl,--wrap=logf + -Wl,--wrap=pow + -Wl,--wrap=powf + -Wl,--wrap=sincosf + -Wl,--wrap=sinf + -Wl,--wrap=tanf + ) + else() + set(ABI_WRAP_FLAGS + ) + endif() + + target_link_options(${bun} PUBLIC + -fuse-ld=${LLD_NAME} + -fno-pic + -static-libstdc++ + -static-libgcc + -Wl,-no-pie + -Wl,-icf=safe + -Wl,--as-needed + -Wl,--gc-sections + -Wl,-z,stack-size=12800000 + ${ARCH_WRAP_FLAGS} + ${ABI_WRAP_FLAGS} + -Wl,--compress-debug-sections=zlib + -Wl,-z,lazy + -Wl,-z,norelro + -Wl,-Map=${bun}.linker-map + ) +endif() + +# --- Symbols list --- + +if(WIN32) + set(BUN_SYMBOLS_PATH ${CWD}/src/symbols.def) + target_link_options(${bun} PUBLIC /DEF:${BUN_SYMBOLS_PATH}) +elseif(APPLE) + set(BUN_SYMBOLS_PATH ${CWD}/src/symbols.txt) + target_link_options(${bun} PUBLIC -exported_symbols_list ${BUN_SYMBOLS_PATH}) +else() + set(BUN_SYMBOLS_PATH ${CWD}/src/symbols.dyn) + set(BUN_LINKER_LDS_PATH ${CWD}/src/linker.lds) + target_link_options(${bun} PUBLIC + -Bsymbolics-functions + -rdynamic + -Wl,--dynamic-list=${BUN_SYMBOLS_PATH} + -Wl,--version-script=${BUN_LINKER_LDS_PATH} + ) + set_target_properties(${bun} PROPERTIES LINK_DEPENDS ${BUN_LINKER_LDS_PATH}) +endif() + +set_target_properties(${bun} PROPERTIES LINK_DEPENDS ${BUN_SYMBOLS_PATH}) + +# --- WebKit --- + +include(SetupWebKit) + +if(WIN32) + if(DEBUG) + target_link_libraries(${bun} PRIVATE + ${WEBKIT_LIB_PATH}/WTF.lib + ${WEBKIT_LIB_PATH}/JavaScriptCore.lib + ${WEBKIT_LIB_PATH}/sicudtd.lib + ${WEBKIT_LIB_PATH}/sicuind.lib + ${WEBKIT_LIB_PATH}/sicuucd.lib + ) + else() + target_link_libraries(${bun} PRIVATE + ${WEBKIT_LIB_PATH}/WTF.lib + ${WEBKIT_LIB_PATH}/JavaScriptCore.lib + ${WEBKIT_LIB_PATH}/sicudt.lib + ${WEBKIT_LIB_PATH}/sicuin.lib + ${WEBKIT_LIB_PATH}/sicuuc.lib + ) + endif() +else() + target_link_libraries(${bun} PRIVATE + ${WEBKIT_LIB_PATH}/libWTF.a + ${WEBKIT_LIB_PATH}/libJavaScriptCore.a + ) + if(NOT APPLE OR EXISTS ${WEBKIT_LIB_PATH}/libbmalloc.a) + target_link_libraries(${bun} PRIVATE ${WEBKIT_LIB_PATH}/libbmalloc.a) + endif() +endif() + +include_directories(${WEBKIT_INCLUDE_PATH}) + +if(NOT WEBKIT_LOCAL AND NOT APPLE) + include_directories(${WEBKIT_INCLUDE_PATH}/wtf/unicode) +endif() + +# --- Dependencies --- + +set(BUN_DEPENDENCIES + BoringSSL + Brotli + Cares + LibDeflate + LolHtml + Lshpack + Mimalloc + TinyCC + Zlib + LibArchive # must be loaded after zlib + Zstd +) + +if(WIN32) + list(APPEND BUN_DEPENDENCIES Libuv) +endif() + +if(USE_STATIC_SQLITE) + list(APPEND BUN_DEPENDENCIES SQLite) +endif() + +foreach(dependency ${BUN_DEPENDENCIES}) + include(Build${dependency}) +endforeach() + +list(TRANSFORM BUN_DEPENDENCIES TOLOWER OUTPUT_VARIABLE BUN_TARGETS) +add_custom_target(dependencies DEPENDS ${BUN_TARGETS}) + +if(APPLE) + target_link_libraries(${bun} PRIVATE icucore resolv) +endif() + +if(USE_STATIC_SQLITE) + target_compile_definitions(${bun} PRIVATE LAZY_LOAD_SQLITE=0) +else() + target_compile_definitions(${bun} PRIVATE LAZY_LOAD_SQLITE=1) +endif() + +if(LINUX) + target_link_libraries(${bun} PRIVATE c pthread dl) + + if(USE_STATIC_LIBATOMIC) + target_link_libraries(${bun} PRIVATE libatomic.a) + else() + target_link_libraries(${bun} PUBLIC libatomic.so) + endif() + + if(USE_SYSTEM_ICU) + target_link_libraries(${bun} PRIVATE libicudata.a) + target_link_libraries(${bun} PRIVATE libicui18n.a) + target_link_libraries(${bun} PRIVATE libicuuc.a) + else() + target_link_libraries(${bun} PRIVATE ${WEBKIT_LIB_PATH}/libicudata.a) + target_link_libraries(${bun} PRIVATE ${WEBKIT_LIB_PATH}/libicui18n.a) + target_link_libraries(${bun} PRIVATE ${WEBKIT_LIB_PATH}/libicuuc.a) + endif() +endif() + +if(WIN32) + target_link_libraries(${bun} PRIVATE + winmm + bcrypt + ntdll + userenv + dbghelp + wsock32 # ws2_32 required by TransmitFile aka sendfile on windows + delayimp.lib + ) +endif() + +# --- Packaging --- + +if(NOT BUN_CPP_ONLY) + if(bunStrip) + register_command( + TARGET + ${bun} + TARGET_PHASE + POST_BUILD + COMMENT + "Stripping ${bun}" + COMMAND + ${CMAKE_STRIP} + ${bunExe} + --strip-all + --strip-debug + --discard-all + -o ${bunStripExe} + CWD + ${BUILD_PATH} + OUTPUTS + ${BUILD_PATH}/${bunStripExe} + ) + endif() + + register_command( + TARGET + ${bun} + TARGET_PHASE + POST_BUILD + COMMENT + "Testing ${bun}" + COMMAND + ${CMAKE_COMMAND} + -E env BUN_DEBUG_QUIET_LOGS=1 + ${BUILD_PATH}/${bunExe} + --revision + CWD + ${BUILD_PATH} + ) + + if(CI) + set(BUN_FEATURES_SCRIPT ${CWD}/scripts/features.mjs) + register_command( + TARGET + ${bun} + TARGET_PHASE + POST_BUILD + COMMENT + "Generating features.json" + COMMAND + ${CMAKE_COMMAND} + -E env + BUN_GARBAGE_COLLECTOR_LEVEL=1 + BUN_DEBUG_QUIET_LOGS=1 + BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING=1 + ${BUILD_PATH}/${bunExe} + ${BUN_FEATURES_SCRIPT} + CWD + ${BUILD_PATH} + ARTIFACTS + ${BUILD_PATH}/features.json + ) + endif() + + if(CMAKE_HOST_APPLE AND bunStrip) + register_command( + TARGET + ${bun} + TARGET_PHASE + POST_BUILD + COMMENT + "Generating ${bun}.dSYM" + COMMAND + ${CMAKE_DSYMUTIL} + ${bun} + --flat + --keep-function-for-static + --object-prefix-map .=${CWD} + -o ${bun}.dSYM + -j ${CMAKE_BUILD_PARALLEL_LEVEL} + CWD + ${BUILD_PATH} + OUTPUTS + ${BUILD_PATH}/${bun}.dSYM + ) + endif() + + if(CI) + if(ENABLE_BASELINE) + set(bunTriplet bun-${OS}-${ARCH}-baseline) + else() + set(bunTriplet bun-${OS}-${ARCH}) + endif() + string(REPLACE bun ${bunTriplet} bunPath ${bun}) + set(bunFiles ${bunExe} features.json) + if(WIN32) + list(APPEND bunFiles ${bun}.pdb) + elseif(APPLE) + list(APPEND bunFiles ${bun}.dSYM) + endif() + + if(APPLE OR LINUX) + list(APPEND bunFiles ${bun}.linker-map) + endif() + + + register_command( + TARGET + ${bun} + TARGET_PHASE + POST_BUILD + COMMENT + "Generating ${bunPath}.zip" + COMMAND + ${CMAKE_COMMAND} -E rm -rf ${bunPath} ${bunPath}.zip + && ${CMAKE_COMMAND} -E make_directory ${bunPath} + && ${CMAKE_COMMAND} -E copy ${bunFiles} ${bunPath} + && ${CMAKE_COMMAND} -E tar cfv ${bunPath}.zip --format=zip ${bunPath} + && ${CMAKE_COMMAND} -E rm -rf ${bunPath} + CWD + ${BUILD_PATH} + ARTIFACTS + ${BUILD_PATH}/${bunPath}.zip + ) + + if(bunStrip) + string(REPLACE bun ${bunTriplet} bunStripPath ${bunStrip}) + register_command( + TARGET + ${bun} + TARGET_PHASE + POST_BUILD + COMMENT + "Generating ${bunStripPath}.zip" + COMMAND + ${CMAKE_COMMAND} -E rm -rf ${bunStripPath} ${bunStripPath}.zip + && ${CMAKE_COMMAND} -E make_directory ${bunStripPath} + && ${CMAKE_COMMAND} -E copy ${bunStripExe} ${bunStripPath} + && ${CMAKE_COMMAND} -E tar cfv ${bunStripPath}.zip --format=zip ${bunStripPath} + && ${CMAKE_COMMAND} -E rm -rf ${bunStripPath} + CWD + ${BUILD_PATH} + ARTIFACTS + ${BUILD_PATH}/${bunStripPath}.zip + ) + endif() + endif() +endif() diff --git a/cmake/targets/BuildCares.cmake b/cmake/targets/BuildCares.cmake new file mode 100644 index 0000000000..e49d9a7ab9 --- /dev/null +++ b/cmake/targets/BuildCares.cmake @@ -0,0 +1,28 @@ +register_repository( + NAME + cares + REPOSITORY + c-ares/c-ares + COMMIT + d1722e6e8acaf10eb73fa995798a9cd421d9f85e +) + +register_cmake_command( + TARGET + cares + TARGETS + c-ares + ARGS + -DCARES_STATIC=ON + -DCARES_STATIC_PIC=ON # FORCE_PIC was set to 1, but CARES_STATIC_PIC was set to OFF?? + -DCMAKE_POSITION_INDEPENDENT_CODE=ON + -DCARES_SHARED=OFF + -DCARES_BUILD_TOOLS=OFF # this was set to ON? + -DCMAKE_INSTALL_LIBDIR=lib + LIB_PATH + lib + LIBRARIES + cares + INCLUDES + include +) diff --git a/cmake/targets/BuildLibArchive.cmake b/cmake/targets/BuildLibArchive.cmake new file mode 100644 index 0000000000..e0cffd020b --- /dev/null +++ b/cmake/targets/BuildLibArchive.cmake @@ -0,0 +1,53 @@ +register_repository( + NAME + libarchive + REPOSITORY + libarchive/libarchive + COMMIT + 898dc8319355b7e985f68a9819f182aaed61b53a +) + +register_cmake_command( + TARGET + libarchive + TARGETS + archive_static + ARGS + -DCMAKE_POSITION_INDEPENDENT_CODE=ON + -DBUILD_SHARED_LIBS=OFF + -DENABLE_INSTALL=OFF + -DENABLE_TEST=OFF + -DENABLE_WERROR=OFF + -DENABLE_BZIP2=OFF + -DENABLE_CAT=OFF + -DENABLE_EXPAT=OFF + -DENABLE_ICONV=OFF + -DENABLE_LIBB2=OFF + -DENABLE_LibGCC=OFF + -DENABLE_LIBXML2=OFF + -DENABLE_LZ4=OFF + -DENABLE_LZMA=OFF + -DENABLE_LZO=OFF + -DENABLE_MBEDTLS=OFF + -DENABLE_NETTLE=OFF + -DENABLE_OPENSSL=OFF + -DENABLE_PCRE2POSIX=OFF + -DENABLE_PCREPOSIX=OFF + -DENABLE_ZSTD=OFF + # libarchive depends on zlib headers, otherwise it will + # spawn a processes to compress instead of using the library. + -DENABLE_ZLIB=OFF + -DHAVE_ZLIB_H=ON + -DCMAKE_C_FLAGS="-I${VENDOR_PATH}/zlib" + LIB_PATH + libarchive + LIBRARIES + archive + INCLUDES + include +) + +# Must be loaded after zlib is defined +if(TARGET clone-zlib) + add_dependencies(libarchive clone-zlib) +endif() diff --git a/cmake/targets/BuildLibDeflate.cmake b/cmake/targets/BuildLibDeflate.cmake new file mode 100644 index 0000000000..3faf5963a7 --- /dev/null +++ b/cmake/targets/BuildLibDeflate.cmake @@ -0,0 +1,24 @@ +register_repository( + NAME + libdeflate + REPOSITORY + ebiggers/libdeflate + COMMIT + dc76454a39e7e83b68c3704b6e3784654f8d5ac5 +) + +register_cmake_command( + TARGET + libdeflate + TARGETS + libdeflate_static + ARGS + -DLIBDEFLATE_BUILD_STATIC_LIB=ON + -DLIBDEFLATE_BUILD_SHARED_LIB=OFF + -DLIBDEFLATE_BUILD_GZIP=OFF + LIBRARIES + deflatestatic WIN32 + deflate UNIX + INCLUDES + . +) diff --git a/cmake/targets/BuildLibuv.cmake b/cmake/targets/BuildLibuv.cmake new file mode 100644 index 0000000000..feba612c44 --- /dev/null +++ b/cmake/targets/BuildLibuv.cmake @@ -0,0 +1,29 @@ +register_repository( + NAME + libuv + REPOSITORY + libuv/libuv + COMMIT + da527d8d2a908b824def74382761566371439003 +) + +if(WIN32) + set(LIBUV_CMAKE_C_FLAGS "/DWIN32 /D_WINDOWS -Wno-int-conversion") +endif() + +register_cmake_command( + TARGET + libuv + TARGETS + uv_a + ARGS + -DLIBUV_BUILD_SHARED=OFF + -DLIBUV_BUILD_TESTS=OFF + -DLIBUV_BUILD_BENCH=OFF + -DCMAKE_C_FLAGS=${LIBUV_CMAKE_C_FLAGS} + LIBRARIES + libuv WIN32 + uv UNIX + INCLUDES + include +) diff --git a/cmake/targets/BuildLolHtml.cmake b/cmake/targets/BuildLolHtml.cmake new file mode 100644 index 0000000000..9a02362723 --- /dev/null +++ b/cmake/targets/BuildLolHtml.cmake @@ -0,0 +1,45 @@ +register_repository( + NAME + lolhtml + REPOSITORY + cloudflare/lol-html + COMMIT + 8d4c273ded322193d017042d1f48df2766b0f88b +) + +set(LOLHTML_CWD ${VENDOR_PATH}/lolhtml/c-api) +set(LOLHTML_BUILD_PATH ${BUILD_PATH}/lolhtml) + +if(DEBUG) + set(LOLHTML_BUILD_TYPE debug) +else() + set(LOLHTML_BUILD_TYPE release) +endif() + +set(LOLHTML_LIBRARY ${LOLHTML_BUILD_PATH}/${LOLHTML_BUILD_TYPE}/${CMAKE_STATIC_LIBRARY_PREFIX}lolhtml${CMAKE_STATIC_LIBRARY_SUFFIX}) + +set(LOLHTML_BUILD_ARGS + --target-dir ${BUILD_PATH}/lolhtml +) + +if(RELEASE) + list(APPEND LOLHTML_BUILD_ARGS --release) +endif() + +register_command( + TARGET + lolhtml + CWD + ${LOLHTML_CWD} + COMMAND + ${CARGO_EXECUTABLE} + build + ${LOLHTML_BUILD_ARGS} + ARTIFACTS + ${LOLHTML_LIBRARY} +) + +target_link_libraries(${bun} PRIVATE ${LOLHTML_LIBRARY}) +if(BUN_LINK_ONLY) + target_sources(${bun} PRIVATE ${LOLHTML_LIBRARY}) +endif() diff --git a/cmake/targets/BuildLshpack.cmake b/cmake/targets/BuildLshpack.cmake new file mode 100644 index 0000000000..c1cbb12ff5 --- /dev/null +++ b/cmake/targets/BuildLshpack.cmake @@ -0,0 +1,33 @@ +register_repository( + NAME + lshpack + REPOSITORY + litespeedtech/ls-hpack + COMMIT + 3d0f1fc1d6e66a642e7a98c55deb38aa986eb4b0 +) + +if(WIN32) + set(LSHPACK_INCLUDES . compat/queue) +else() + set(LSHPACK_INCLUDES .) +endif() + +register_cmake_command( + TARGET + lshpack + LIBRARIES + ls-hpack + ARGS + -DSHARED=OFF + -DLSHPACK_XXH=ON + # There are linking errors when built with non-Release + # Undefined symbols for architecture arm64: + # "___asan_handle_no_return", referenced from: + # _lshpack_enc_get_static_nameval in libls-hpack.a(lshpack.c.o) + # _lshpack_enc_get_static_name in libls-hpack.a(lshpack.c.o) + # _update_hash in libls-hpack.a(lshpack.c.o) + -DCMAKE_BUILD_TYPE=Release + INCLUDES + ${LSHPACK_INCLUDES} +) diff --git a/cmake/targets/BuildMimalloc.cmake b/cmake/targets/BuildMimalloc.cmake new file mode 100644 index 0000000000..1e88a1a5f0 --- /dev/null +++ b/cmake/targets/BuildMimalloc.cmake @@ -0,0 +1,60 @@ +register_repository( + NAME + mimalloc + REPOSITORY + oven-sh/mimalloc + COMMIT + 82b2c2277a4d570187c07b376557dc5bde81d848 +) + +set(MIMALLOC_CMAKE_ARGS + -DMI_BUILD_STATIC=ON + -DMI_BUILD_OBJECT=ON + -DMI_BUILD_SHARED=OFF + -DMI_BUILD_TESTS=OFF + -DMI_USE_CXX=ON + -DMI_OVERRIDE=OFF + -DMI_OSX_ZONE=OFF + -DMI_OSX_INTERPOSE=OFF + -DMI_SKIP_COLLECT_ON_EXIT=ON +) + +if(DEBUG) + list(APPEND MIMALLOC_CMAKE_ARGS -DMI_DEBUG_FULL=ON) +endif() + +if(ENABLE_VALGRIND) + list(APPEND MIMALLOC_CMAKE_ARGS -DMI_VALGRIND=ON) +endif() + +if(WIN32) + if(DEBUG) + set(MIMALLOC_LIBRARY mimalloc-static-debug) + else() + set(MIMALLOC_LIBRARY mimalloc-static) + endif() +elseif(DEBUG) + set(MIMALLOC_LIBRARY mimalloc-debug) +else() + set(MIMALLOC_LIBRARY mimalloc) +endif() + +# Workaround for linker issue on macOS and Linux x64 +# https://github.com/microsoft/mimalloc/issues/512 +if(APPLE OR (LINUX AND NOT DEBUG)) + set(MIMALLOC_LIBRARY CMakeFiles/mimalloc-obj.dir/src/static.c.o) +endif() + +register_cmake_command( + TARGET + mimalloc + TARGETS + mimalloc-static + mimalloc-obj + ARGS + ${MIMALLOC_CMAKE_ARGS} + LIBRARIES + ${MIMALLOC_LIBRARY} + INCLUDES + include +) diff --git a/cmake/targets/BuildSQLite.cmake b/cmake/targets/BuildSQLite.cmake new file mode 100644 index 0000000000..ce4cd8da24 --- /dev/null +++ b/cmake/targets/BuildSQLite.cmake @@ -0,0 +1,10 @@ +register_cmake_command( + TARGET + sqlite + CWD + ${CWD}/src/bun.js/bindings/sqlite + LIBRARIES + sqlite3 + INCLUDES + . +) diff --git a/cmake/targets/BuildTinyCC.cmake b/cmake/targets/BuildTinyCC.cmake new file mode 100644 index 0000000000..050eac4613 --- /dev/null +++ b/cmake/targets/BuildTinyCC.cmake @@ -0,0 +1,15 @@ +register_repository( + NAME + tinycc + REPOSITORY + oven-sh/tinycc + COMMIT + 29985a3b59898861442fa3b43f663fc1af2591d7 +) + +register_cmake_command( + TARGET + tinycc + LIBRARIES + tcc +) diff --git a/cmake/targets/BuildZlib.cmake b/cmake/targets/BuildZlib.cmake new file mode 100644 index 0000000000..1940bb2e33 --- /dev/null +++ b/cmake/targets/BuildZlib.cmake @@ -0,0 +1,40 @@ +register_repository( + NAME + zlib + REPOSITORY + cloudflare/zlib + COMMIT + 886098f3f339617b4243b286f5ed364b9989e245 +) + +# https://gitlab.kitware.com/cmake/cmake/-/issues/25755 +if(APPLE) + set(ZLIB_CMAKE_C_FLAGS "-fno-define-target-os-macros") + set(ZLIB_CMAKE_CXX_FLAGS "-fno-define-target-os-macros") +endif() + +if(WIN32) + if(DEBUG) + set(ZLIB_LIBRARY "zlibd") + else() + set(ZLIB_LIBRARY "zlib") + endif() +else() + set(ZLIB_LIBRARY "z") +endif() + +register_cmake_command( + TARGET + zlib + TARGETS + zlib + ARGS + -DBUILD_SHARED_LIBS=OFF + -DBUILD_EXAMPLES=OFF + "-DCMAKE_C_FLAGS=${ZLIB_CMAKE_C_FLAGS}" + "-DCMAKE_CXX_FLAGS=${ZLIB_CMAKE_CXX_FLAGS}" + LIBRARIES + ${ZLIB_LIBRARY} + INCLUDES + . +) diff --git a/cmake/targets/BuildZstd.cmake b/cmake/targets/BuildZstd.cmake new file mode 100644 index 0000000000..f58c3793fa --- /dev/null +++ b/cmake/targets/BuildZstd.cmake @@ -0,0 +1,26 @@ +register_repository( + NAME + zstd + REPOSITORY + facebook/zstd + COMMIT + 794ea1b0afca0f020f4e57b6732332231fb23c70 +) + +register_cmake_command( + TARGET + zstd + TARGETS + libzstd_static + ARGS + -Sbuild/cmake + -DZSTD_BUILD_STATIC=ON + -DZSTD_BUILD_PROGRAMS=OFF + -DZSTD_BUILD_TESTS=OFF + -DZSTD_BUILD_CONTRIB=OFF + LIB_PATH + lib + LIBRARIES + zstd_static WIN32 + zstd UNIX +) diff --git a/cmake/toolchains/darwin-aarch64.cmake b/cmake/toolchains/darwin-aarch64.cmake new file mode 100644 index 0000000000..b5a52c3fb2 --- /dev/null +++ b/cmake/toolchains/darwin-aarch64.cmake @@ -0,0 +1,5 @@ +set(CMAKE_SYSTEM_NAME Darwin) +set(CMAKE_SYSTEM_PROCESSOR aarch64) + +set(CMAKE_C_COMPILER_WORKS ON) +set(CMAKE_CXX_COMPILER_WORKS ON) diff --git a/cmake/toolchains/darwin-x64.cmake b/cmake/toolchains/darwin-x64.cmake new file mode 100644 index 0000000000..aef2c72d12 --- /dev/null +++ b/cmake/toolchains/darwin-x64.cmake @@ -0,0 +1,6 @@ +set(CMAKE_SYSTEM_NAME Darwin) +set(CMAKE_SYSTEM_PROCESSOR x64) +set(CMAKE_OSX_ARCHITECTURES x86_64) + +set(CMAKE_C_COMPILER_WORKS ON) +set(CMAKE_CXX_COMPILER_WORKS ON) \ No newline at end of file diff --git a/cmake/toolchains/linux-aarch64.cmake b/cmake/toolchains/linux-aarch64.cmake new file mode 100644 index 0000000000..bc23a06302 --- /dev/null +++ b/cmake/toolchains/linux-aarch64.cmake @@ -0,0 +1,5 @@ +set(CMAKE_SYSTEM_NAME Linux) +set(CMAKE_SYSTEM_PROCESSOR aarch64) + +set(CMAKE_C_COMPILER_WORKS ON) +set(CMAKE_CXX_COMPILER_WORKS ON) \ No newline at end of file diff --git a/cmake/toolchains/linux-x64-baseline.cmake b/cmake/toolchains/linux-x64-baseline.cmake new file mode 100644 index 0000000000..f521cfcc4a --- /dev/null +++ b/cmake/toolchains/linux-x64-baseline.cmake @@ -0,0 +1,6 @@ +set(CMAKE_SYSTEM_NAME Linux) +set(CMAKE_SYSTEM_PROCESSOR x64) +set(ENABLE_BASELINE ON) + +set(CMAKE_C_COMPILER_WORKS ON) +set(CMAKE_CXX_COMPILER_WORKS ON) \ No newline at end of file diff --git a/cmake/toolchains/linux-x64.cmake b/cmake/toolchains/linux-x64.cmake new file mode 100644 index 0000000000..66bc7a592f --- /dev/null +++ b/cmake/toolchains/linux-x64.cmake @@ -0,0 +1,5 @@ +set(CMAKE_SYSTEM_NAME Linux) +set(CMAKE_SYSTEM_PROCESSOR x64) + +set(CMAKE_C_COMPILER_WORKS ON) +set(CMAKE_CXX_COMPILER_WORKS ON) diff --git a/cmake/toolchains/windows-x64-baseline.cmake b/cmake/toolchains/windows-x64-baseline.cmake new file mode 100644 index 0000000000..fe2df9a930 --- /dev/null +++ b/cmake/toolchains/windows-x64-baseline.cmake @@ -0,0 +1,6 @@ +set(CMAKE_SYSTEM_NAME Windows) +set(CMAKE_SYSTEM_PROCESSOR x64) +set(ENABLE_BASELINE ON) + +set(CMAKE_C_COMPILER_WORKS ON) +set(CMAKE_CXX_COMPILER_WORKS ON) \ No newline at end of file diff --git a/cmake/toolchains/windows-x64.cmake b/cmake/toolchains/windows-x64.cmake new file mode 100644 index 0000000000..bb239656dc --- /dev/null +++ b/cmake/toolchains/windows-x64.cmake @@ -0,0 +1,5 @@ +set(CMAKE_SYSTEM_NAME Windows) +set(CMAKE_SYSTEM_PROCESSOR x64) + +set(CMAKE_C_COMPILER_WORKS ON) +set(CMAKE_CXX_COMPILER_WORKS ON) \ No newline at end of file diff --git a/cmake/tools/SetupBuildkite.cmake b/cmake/tools/SetupBuildkite.cmake new file mode 100644 index 0000000000..946ed25131 --- /dev/null +++ b/cmake/tools/SetupBuildkite.cmake @@ -0,0 +1,175 @@ +optionx(BUILDKITE_CACHE BOOL "If the build can use Buildkite caches, even if not running in Buildkite" DEFAULT ${BUILDKITE}) + +if(NOT BUILDKITE_CACHE OR NOT BUN_LINK_ONLY) + return() +endif() + +optionx(BUILDKITE_ORGANIZATION_SLUG STRING "The organization slug to use on Buildkite" DEFAULT "bun") +optionx(BUILDKITE_PIPELINE_SLUG STRING "The pipeline slug to use on Buildkite" DEFAULT "bun") +optionx(BUILDKITE_BUILD_ID STRING "The build ID to use on Buildkite") +optionx(BUILDKITE_GROUP_ID STRING "The group ID to use on Buildkite") + +if(ENABLE_BASELINE) + set(DEFAULT_BUILDKITE_GROUP_KEY ${OS}-${ARCH}-baseline) +else() + set(DEFAULT_BUILDKITE_GROUP_KEY ${OS}-${ARCH}) +endif() + +optionx(BUILDKITE_GROUP_KEY STRING "The group key to use on Buildkite" DEFAULT ${DEFAULT_BUILDKITE_GROUP_KEY}) + +if(BUILDKITE) + optionx(BUILDKITE_BUILD_ID_OVERRIDE STRING "The build ID to use on Buildkite") + if(BUILDKITE_BUILD_ID_OVERRIDE) + setx(BUILDKITE_BUILD_ID ${BUILDKITE_BUILD_ID_OVERRIDE}) + endif() +endif() + +set(BUILDKITE_PATH ${BUILD_PATH}/buildkite) +set(BUILDKITE_BUILDS_PATH ${BUILDKITE_PATH}/builds) + +if(NOT BUILDKITE_BUILD_ID) + # TODO: find the latest build on the main branch that passed + return() +endif() + +setx(BUILDKITE_BUILD_URL https://buildkite.com/${BUILDKITE_ORGANIZATION_SLUG}/${BUILDKITE_PIPELINE_SLUG}/builds/${BUILDKITE_BUILD_ID}) +setx(BUILDKITE_BUILD_PATH ${BUILDKITE_BUILDS_PATH}/builds/${BUILDKITE_BUILD_ID}) + +file( + DOWNLOAD ${BUILDKITE_BUILD_URL} + HTTPHEADER "Accept: application/json" + TIMEOUT 15 + STATUS BUILDKITE_BUILD_STATUS + ${BUILDKITE_BUILD_PATH}/build.json +) +if(NOT BUILDKITE_BUILD_STATUS EQUAL 0) + message(FATAL_ERROR "No build found: ${BUILDKITE_BUILD_STATUS} ${BUILDKITE_BUILD_URL}") + return() +endif() + +file(READ ${BUILDKITE_BUILD_PATH}/build.json BUILDKITE_BUILD) +string(JSON BUILDKITE_BUILD_UUID GET ${BUILDKITE_BUILD} id) +string(JSON BUILDKITE_JOBS GET ${BUILDKITE_BUILD} jobs) +string(JSON BUILDKITE_JOBS_COUNT LENGTH ${BUILDKITE_JOBS}) + +if(NOT BUILDKITE_JOBS_COUNT GREATER 0) + message(FATAL_ERROR "No jobs found: ${BUILDKITE_BUILD_URL}") + return() +endif() + +set(BUILDKITE_JOBS_FAILED) +set(BUILDKITE_JOBS_NOT_FOUND) +set(BUILDKITE_JOBS_NO_ARTIFACTS) +set(BUILDKITE_JOBS_NO_MATCH) +set(BUILDKITE_JOBS_MATCH) + +math(EXPR BUILDKITE_JOBS_MAX_INDEX "${BUILDKITE_JOBS_COUNT} - 1") +foreach(i RANGE ${BUILDKITE_JOBS_MAX_INDEX}) + string(JSON BUILDKITE_JOB GET ${BUILDKITE_JOBS} ${i}) + string(JSON BUILDKITE_JOB_ID GET ${BUILDKITE_JOB} id) + string(JSON BUILDKITE_JOB_PASSED GET ${BUILDKITE_JOB} passed) + string(JSON BUILDKITE_JOB_GROUP_ID GET ${BUILDKITE_JOB} group_uuid) + string(JSON BUILDKITE_JOB_GROUP_KEY GET ${BUILDKITE_JOB} group_identifier) + string(JSON BUILDKITE_JOB_NAME GET ${BUILDKITE_JOB} step_key) + if(NOT BUILDKITE_JOB_NAME) + string(JSON BUILDKITE_JOB_NAME GET ${BUILDKITE_JOB} name) + endif() + + if(NOT BUILDKITE_JOB_PASSED) + list(APPEND BUILDKITE_JOBS_FAILED ${BUILDKITE_JOB_NAME}) + continue() + endif() + + if(NOT (BUILDKITE_GROUP_ID AND BUILDKITE_GROUP_ID STREQUAL BUILDKITE_JOB_GROUP_ID) AND + NOT (BUILDKITE_GROUP_KEY AND BUILDKITE_GROUP_KEY STREQUAL BUILDKITE_JOB_GROUP_KEY)) + list(APPEND BUILDKITE_JOBS_NO_MATCH ${BUILDKITE_JOB_NAME}) + continue() + endif() + + set(BUILDKITE_ARTIFACTS_URL https://buildkite.com/organizations/${BUILDKITE_ORGANIZATION_SLUG}/pipelines/${BUILDKITE_PIPELINE_SLUG}/builds/${BUILDKITE_BUILD_UUID}/jobs/${BUILDKITE_JOB_ID}/artifacts) + set(BUILDKITE_ARTIFACTS_PATH ${BUILDKITE_BUILD_PATH}/artifacts/${BUILDKITE_JOB_ID}.json) + + file( + DOWNLOAD ${BUILDKITE_ARTIFACTS_URL} + HTTPHEADER "Accept: application/json" + TIMEOUT 15 + STATUS BUILDKITE_ARTIFACTS_STATUS + ${BUILDKITE_ARTIFACTS_PATH} + ) + + if(NOT BUILDKITE_ARTIFACTS_STATUS EQUAL 0) + list(APPEND BUILDKITE_JOBS_NOT_FOUND ${BUILDKITE_JOB_NAME}) + continue() + endif() + + file(READ ${BUILDKITE_ARTIFACTS_PATH} BUILDKITE_ARTIFACTS) + string(JSON BUILDKITE_ARTIFACTS_LENGTH LENGTH ${BUILDKITE_ARTIFACTS}) + if(NOT BUILDKITE_ARTIFACTS_LENGTH GREATER 0) + list(APPEND BUILDKITE_JOBS_NO_ARTIFACTS ${BUILDKITE_JOB_NAME}) + continue() + endif() + + math(EXPR BUILDKITE_ARTIFACTS_MAX_INDEX "${BUILDKITE_ARTIFACTS_LENGTH} - 1") + foreach(i RANGE 0 ${BUILDKITE_ARTIFACTS_MAX_INDEX}) + string(JSON BUILDKITE_ARTIFACT GET ${BUILDKITE_ARTIFACTS} ${i}) + string(JSON BUILDKITE_ARTIFACT_ID GET ${BUILDKITE_ARTIFACT} id) + string(JSON BUILDKITE_ARTIFACT_PATH GET ${BUILDKITE_ARTIFACT} path) + + if(NOT BUILDKITE_ARTIFACT_PATH MATCHES "\\.(o|a|lib|zip|tar|gz)") + continue() + endif() + + if(BUILDKITE) + set(BUILDKITE_DOWNLOAD_COMMAND buildkite-agent artifact download ${BUILDKITE_ARTIFACT_PATH} . --build ${BUILDKITE_BUILD_UUID} --step ${BUILDKITE_JOB_ID}) + else() + set(BUILDKITE_DOWNLOAD_COMMAND curl -L -o ${BUILDKITE_ARTIFACT_PATH} ${BUILDKITE_ARTIFACTS_URL}/${BUILDKITE_ARTIFACT_ID}) + endif() + + add_custom_command( + COMMENT + "Downloading ${BUILDKITE_ARTIFACT_PATH}" + VERBATIM COMMAND + ${BUILDKITE_DOWNLOAD_COMMAND} + WORKING_DIRECTORY + ${BUILD_PATH} + OUTPUT + ${BUILD_PATH}/${BUILDKITE_ARTIFACT_PATH} + ) + endforeach() + + list(APPEND BUILDKITE_JOBS_MATCH ${BUILDKITE_JOB_NAME}) +endforeach() + +if(BUILDKITE_JOBS_FAILED) + list(SORT BUILDKITE_JOBS_FAILED COMPARE STRING) + list(JOIN BUILDKITE_JOBS_FAILED " " BUILDKITE_JOBS_FAILED) + message(WARNING "The following jobs were found, but failed: ${BUILDKITE_JOBS_FAILED}") +endif() + +if(BUILDKITE_JOBS_NOT_FOUND) + list(SORT BUILDKITE_JOBS_NOT_FOUND COMPARE STRING) + list(JOIN BUILDKITE_JOBS_NOT_FOUND " " BUILDKITE_JOBS_NOT_FOUND) + message(WARNING "The following jobs were found, but could not fetch their data: ${BUILDKITE_JOBS_NOT_FOUND}") +endif() + +if(BUILDKITE_JOBS_NO_MATCH) + list(SORT BUILDKITE_JOBS_NO_MATCH COMPARE STRING) + list(JOIN BUILDKITE_JOBS_NO_MATCH " " BUILDKITE_JOBS_NO_MATCH) + message(WARNING "The following jobs were found, but did not match the group ID: ${BUILDKITE_JOBS_NO_MATCH}") +endif() + +if(BUILDKITE_JOBS_NO_ARTIFACTS) + list(SORT BUILDKITE_JOBS_NO_ARTIFACTS COMPARE STRING) + list(JOIN BUILDKITE_JOBS_NO_ARTIFACTS " " BUILDKITE_JOBS_NO_ARTIFACTS) + message(WARNING "The following jobs were found, but had no artifacts: ${BUILDKITE_JOBS_NO_ARTIFACTS}") +endif() + +if(BUILDKITE_JOBS_MATCH) + list(SORT BUILDKITE_JOBS_MATCH COMPARE STRING) + list(JOIN BUILDKITE_JOBS_MATCH " " BUILDKITE_JOBS_MATCH) + message(STATUS "The following jobs were found, and matched the group ID: ${BUILDKITE_JOBS_MATCH}") +endif() + +if(NOT BUILDKITE_JOBS_FAILED AND NOT BUILDKITE_JOBS_NOT_FOUND AND NOT BUILDKITE_JOBS_NO_MATCH AND NOT BUILDKITE_JOBS_NO_ARTIFACTS AND NOT BUILDKITE_JOBS_MATCH) + message(FATAL_ERROR "Something went wrong with Buildkite?") +endif() diff --git a/cmake/tools/SetupBun.cmake b/cmake/tools/SetupBun.cmake new file mode 100644 index 0000000000..5377eb1cff --- /dev/null +++ b/cmake/tools/SetupBun.cmake @@ -0,0 +1,21 @@ +find_command( + VARIABLE + BUN_EXECUTABLE + COMMAND + bun + PATHS + $ENV{HOME}/.bun/bin + VERSION + >=1.1.26 +) + +# If this is not set, some advanced features are not checked. +# https://github.com/oven-sh/bun/blob/cd7f6a1589db7f1e39dc4e3f4a17234afbe7826c/src/bun.js/javascript.zig#L1069-L1072 +setenv(BUN_GARBAGE_COLLECTOR_LEVEL 1) +setenv(BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING 1) +setenv(BUN_DEBUG_QUIET_LOGS 1) + +# FIXME: https://github.com/oven-sh/bun/issues/11250 +if(NOT WIN32) + setenv(BUN_INSTALL_CACHE_DIR ${CACHE_PATH}/bun) +endif() diff --git a/cmake/tools/SetupCcache.cmake b/cmake/tools/SetupCcache.cmake new file mode 100644 index 0000000000..d2367205c8 --- /dev/null +++ b/cmake/tools/SetupCcache.cmake @@ -0,0 +1,44 @@ +optionx(ENABLE_CCACHE BOOL "If ccache should be enabled" DEFAULT ON) + +if(NOT ENABLE_CCACHE OR CACHE_STRATEGY STREQUAL "none") + setenv(CCACHE_DISABLE 1) + return() +endif() + +find_command( + VARIABLE + CCACHE_PROGRAM + COMMAND + ccache + REQUIRED + ${CI} +) + +if(NOT CCACHE_PROGRAM) + return() +endif() + +set(CCACHE_ARGS CMAKE_C_COMPILER_LAUNCHER CMAKE_CXX_COMPILER_LAUNCHER) +foreach(arg ${CCACHE_ARGS}) + setx(${arg} ${CCACHE_PROGRAM}) + list(APPEND CMAKE_ARGS -D${arg}=${${arg}}) +endforeach() + +setenv(CCACHE_DIR ${CACHE_PATH}/ccache) +setenv(CCACHE_BASEDIR ${CWD}) +setenv(CCACHE_NOHASHDIR 1) + +if(CACHE_STRATEGY STREQUAL "read-only") + setenv(CCACHE_READONLY 1) +elseif(CACHE_STRATEGY STREQUAL "write-only") + setenv(CCACHE_RECACHE 1) +endif() + +setenv(CCACHE_FILECLONE 1) +setenv(CCACHE_STATSLOG ${BUILD_PATH}/ccache.log) + +if(CI) + setenv(CCACHE_SLOPPINESS "pch_defines,time_macros,locale,clang_index_store,gcno_cwd,include_file_ctime,include_file_mtime") +else() + setenv(CCACHE_SLOPPINESS "pch_defines,time_macros,locale,random_seed,clang_index_store,gcno_cwd") +endif() diff --git a/cmake/tools/SetupEsbuild.cmake b/cmake/tools/SetupEsbuild.cmake new file mode 100644 index 0000000000..90989b711c --- /dev/null +++ b/cmake/tools/SetupEsbuild.cmake @@ -0,0 +1,20 @@ +if(CMAKE_HOST_WIN32) + setx(ESBUILD_EXECUTABLE ${CWD}/node_modules/.bin/esbuild.exe) +else() + setx(ESBUILD_EXECUTABLE ${CWD}/node_modules/.bin/esbuild) +endif() + +if(CMAKE_COLOR_DIAGNOSTICS) + set(ESBUILD_ARGS --color) +endif() + +register_command( + COMMAND + ${BUN_EXECUTABLE} + install + --frozen-lockfile + SOURCES + ${CWD}/package.json + OUTPUTS + ${ESBUILD_EXECUTABLE} +) diff --git a/cmake/tools/SetupGit.cmake b/cmake/tools/SetupGit.cmake new file mode 100644 index 0000000000..8e0f87c312 --- /dev/null +++ b/cmake/tools/SetupGit.cmake @@ -0,0 +1,43 @@ +find_command( + VARIABLE + GIT_PROGRAM + COMMAND + git + REQUIRED + OFF +) + +if(NOT GIT_PROGRAM) + return() +endif() + +set(GIT_DIFF_COMMAND ${GIT_PROGRAM} diff --no-color --name-only --diff-filter=AMCR origin/main HEAD) + +execute_process( + COMMAND + ${GIT_DIFF_COMMAND} + WORKING_DIRECTORY + ${CWD} + OUTPUT_STRIP_TRAILING_WHITESPACE + OUTPUT_VARIABLE + GIT_DIFF + ERROR_STRIP_TRAILING_WHITESPACE + ERROR_VARIABLE + GIT_DIFF_ERROR + RESULT_VARIABLE + GIT_DIFF_RESULT +) + +if(NOT GIT_DIFF_RESULT EQUAL 0) + message(${WARNING} "Command failed: ${GIT_DIFF_COMMAND} ${GIT_DIFF_ERROR}") + return() +endif() + +string(REPLACE "\n" ";" GIT_CHANGED_SOURCES "${GIT_DIFF}") + +if(CI) + setx(GIT_CHANGED_SOURCES ${GIT_CHANGED_SOURCES}) +endif() + +list(TRANSFORM GIT_CHANGED_SOURCES PREPEND ${CWD}/) +list(LENGTH GIT_CHANGED_SOURCES GIT_CHANGED_SOURCES_COUNT) diff --git a/cmake/tools/SetupLLVM.cmake b/cmake/tools/SetupLLVM.cmake new file mode 100644 index 0000000000..a7046d996f --- /dev/null +++ b/cmake/tools/SetupLLVM.cmake @@ -0,0 +1,121 @@ +optionx(ENABLE_LLVM BOOL "If LLVM should be used for compilation" DEFAULT ON) + +if(NOT ENABLE_LLVM) + return() +endif() + +if(CMAKE_HOST_WIN32 OR CMAKE_HOST_APPLE OR IS_MUSL) + set(DEFAULT_LLVM_VERSION "18.1.8") +else() + set(DEFAULT_LLVM_VERSION "16.0.6") +endif() + +optionx(LLVM_VERSION STRING "The version of LLVM to use" DEFAULT ${DEFAULT_LLVM_VERSION}) + +string(REGEX MATCH "([0-9]+)\\.([0-9]+)\\.([0-9]+)" USE_LLVM_VERSION ${LLVM_VERSION}) +if(USE_LLVM_VERSION) + set(LLVM_VERSION_MAJOR ${CMAKE_MATCH_1}) + set(LLVM_VERSION_MINOR ${CMAKE_MATCH_2}) + set(LLVM_VERSION_PATCH ${CMAKE_MATCH_3}) +endif() + +set(LLVM_PATHS) + +if(APPLE) + execute_process( + COMMAND brew --prefix + OUTPUT_VARIABLE HOMEBREW_PREFIX + OUTPUT_STRIP_TRAILING_WHITESPACE + ERROR_QUIET + ) + + if(NOT HOMEBREW_PREFIX) + if(CMAKE_SYSTEM_PROCESSOR MATCHES "arm64|ARM64|aarch64|AARCH64") + set(HOMEBREW_PREFIX /opt/homebrew) + else() + set(HOMEBREW_PREFIX /usr/local) + endif() + endif() + + list(APPEND LLVM_PATHS ${HOMEBREW_PREFIX}/opt/llvm/bin) + + if(USE_LLVM_VERSION) + list(APPEND LLVM_PATHS ${HOMEBREW_PREFIX}/opt/llvm@${LLVM_VERSION_MAJOR}/bin) + endif() +endif() + +if(UNIX) + list(APPEND LLVM_PATHS /usr/lib/llvm/bin) + + if(USE_LLVM_VERSION) + list(APPEND LLVM_PATHS + /usr/lib/llvm-${LLVM_VERSION_MAJOR}.${LLVM_VERSION_MINOR}.${LLVM_VERSION_PATCH}/bin + /usr/lib/llvm-${LLVM_VERSION_MAJOR}.${LLVM_VERSION_MINOR}/bin + /usr/lib/llvm-${LLVM_VERSION_MAJOR}/bin + ) + endif() +endif() + +macro(find_llvm_command variable command) + set(commands ${command}) + + if(USE_LLVM_VERSION) + list(APPEND commands + ${command}-${LLVM_VERSION_MAJOR}.${LLVM_VERSION_MINOR}.${LLVM_VERSION_PATCH} + ${command}-${LLVM_VERSION_MAJOR}.${LLVM_VERSION_MINOR} + ${command}-${LLVM_VERSION_MAJOR} + ) + endif() + + find_command( + VARIABLE ${variable} + VERSION_VARIABLE LLVM_VERSION + COMMAND ${commands} + PATHS ${LLVM_PATHS} + VERSION ${LLVM_VERSION} + ) + list(APPEND CMAKE_ARGS -D${variable}=${${variable}}) +endmacro() + +macro(find_llvm_command_no_version variable command) + set(commands ${command}) + + if(USE_LLVM_VERSION) + list(APPEND commands + ${command}-${LLVM_VERSION_MAJOR}.${LLVM_VERSION_MINOR}.${LLVM_VERSION_PATCH} + ${command}-${LLVM_VERSION_MAJOR}.${LLVM_VERSION_MINOR} + ${command}-${LLVM_VERSION_MAJOR} + ) + endif() + + find_command( + VARIABLE ${variable} + VERSION_VARIABLE LLVM_VERSION + COMMAND ${commands} + PATHS ${LLVM_PATHS} + ) + list(APPEND CMAKE_ARGS -D${variable}=${${variable}}) +endmacro() + +if(WIN32) + find_llvm_command(CMAKE_C_COMPILER clang-cl) + find_llvm_command(CMAKE_CXX_COMPILER clang-cl) + find_llvm_command_no_version(CMAKE_LINKER lld-link) + find_llvm_command_no_version(CMAKE_AR llvm-lib) + find_llvm_command_no_version(CMAKE_STRIP llvm-strip) +else() + find_llvm_command(CMAKE_C_COMPILER clang) + find_llvm_command(CMAKE_CXX_COMPILER clang++) + find_llvm_command(CMAKE_LINKER llvm-link) + find_llvm_command(CMAKE_AR llvm-ar) + find_llvm_command(CMAKE_STRIP llvm-strip) + find_llvm_command(CMAKE_RANLIB llvm-ranlib) + if(APPLE) + find_llvm_command(CMAKE_DSYMUTIL dsymutil) + endif() +endif() + +if(ENABLE_ANALYSIS) + find_llvm_command(CLANG_FORMAT_PROGRAM clang-format) + find_llvm_command(CLANG_TIDY_PROGRAM clang-tidy) +endif() diff --git a/cmake/tools/SetupMacSDK.cmake b/cmake/tools/SetupMacSDK.cmake new file mode 100644 index 0000000000..1eff438b79 --- /dev/null +++ b/cmake/tools/SetupMacSDK.cmake @@ -0,0 +1,59 @@ +set(MIN_OSX_DEPLOYMENT_TARGET "13.0") + +if(DEFINED ENV{CI}) + set(DEFAULT_OSX_DEPLOYMENT_TARGET ${MIN_OSX_DEPLOYMENT_TARGET}) +else() + execute_process( + COMMAND xcrun --sdk macosx --show-sdk-version + OUTPUT_VARIABLE CURRENT_OSX_DEPLOYMENT_TARGET + OUTPUT_STRIP_TRAILING_WHITESPACE + ERROR_VARIABLE DEFAULT_OSX_DEPLOYMENT_TARGET_ERROR + ERROR_STRIP_TRAILING_WHITESPACE + ) + if(DEFAULT_OSX_DEPLOYMENT_TARGET_ERROR) + message(WARNING "Failed to find macOS SDK version, did you run `xcode-select --install`?") + message(FATAL_ERROR ${DEFAULT_OSX_DEPLOYMENT_TARGET_ERROR}) + endif() + + string(REGEX MATCH "^[0-9]*" DEFAULT_OSX_DEPLOYMENT_TARGET ${CURRENT_OSX_DEPLOYMENT_TARGET}) +endif() + +optionx(CMAKE_OSX_DEPLOYMENT_TARGET STRING "The macOS SDK version to target" DEFAULT ${DEFAULT_OSX_DEPLOYMENT_TARGET}) + +if(CMAKE_OSX_DEPLOYMENT_TARGET VERSION_LESS ${MIN_OSX_DEPLOYMENT_TARGET}) + message(FATAL_ERROR "The target macOS SDK version, ${CMAKE_OSX_DEPLOYMENT_TARGET}, is older than the minimum supported version, ${MIN_OSX_DEPLOYMENT_TARGET}.") +endif() + +execute_process( + COMMAND sw_vers -productVersion + OUTPUT_VARIABLE MACOS_VERSION + OUTPUT_STRIP_TRAILING_WHITESPACE + ERROR_QUIET +) + +if(MACOS_VERSION VERSION_LESS ${CMAKE_OSX_DEPLOYMENT_TARGET}) + message(FATAL_ERROR "Your computer is running macOS ${MACOS_VERSION}, which is older than the target macOS SDK ${CMAKE_OSX_DEPLOYMENT_TARGET}. To fix this, either:\n" + " - Upgrade your computer to macOS ${CMAKE_OSX_DEPLOYMENT_TARGET} or newer\n" + " - Download a newer version of the macOS SDK from Apple: https://developer.apple.com/download/all/?q=xcode\n" + " - Set -DCMAKE_OSX_DEPLOYMENT_TARGET=${MACOS_VERSION}\n") +endif() + +execute_process( + COMMAND xcrun --sdk macosx --show-sdk-path + OUTPUT_VARIABLE DEFAULT_CMAKE_OSX_SYSROOT + OUTPUT_STRIP_TRAILING_WHITESPACE + ERROR_VARIABLE DEFAULT_CMAKE_OSX_SYSROOT_ERROR + ERROR_STRIP_TRAILING_WHITESPACE +) + +if(CMAKE_OSX_SYSROOT_ERROR) + message(WARNING "Failed to find macOS SDK path, did you run `xcode-select --install`?") + message(FATAL_ERROR ${CMAKE_OSX_SYSROOT_ERROR}) +endif() + +optionx(CMAKE_OSX_SYSROOT STRING "The macOS SDK path to target" DEFAULT ${DEFAULT_CMAKE_OSX_SYSROOT}) + +list(APPEND CMAKE_ARGS + -DCMAKE_OSX_DEPLOYMENT_TARGET=${CMAKE_OSX_DEPLOYMENT_TARGET} + -DCMAKE_OSX_SYSROOT=${CMAKE_OSX_SYSROOT} +) diff --git a/cmake/tools/SetupRust.cmake b/cmake/tools/SetupRust.cmake new file mode 100644 index 0000000000..a83b28bc5f --- /dev/null +++ b/cmake/tools/SetupRust.cmake @@ -0,0 +1,25 @@ +find_command( + VARIABLE + CARGO_EXECUTABLE + COMMAND + cargo + PATHS + $ENV{HOME}/.cargo/bin + REQUIRED + OFF +) + +if(EXISTS ${CARGO_EXECUTABLE}) + return() +endif() + +if(CMAKE_HOST_WIN32) + set(CARGO_INSTALL_COMMAND "choco install rust") +else() + set(CARGO_INSTALL_COMMAND "curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh") +endif() + +message(FATAL_ERROR "Command not found: cargo\n" + "Do you have Rust installed? To fix this, try running:\n" + " ${CARGO_INSTALL_COMMAND}\n" +) diff --git a/cmake/tools/SetupWebKit.cmake b/cmake/tools/SetupWebKit.cmake new file mode 100644 index 0000000000..b71eff33e1 --- /dev/null +++ b/cmake/tools/SetupWebKit.cmake @@ -0,0 +1,90 @@ +option(WEBKIT_VERSION "The version of WebKit to use") +option(WEBKIT_LOCAL "If a local version of WebKit should be used instead of downloading") + +if(NOT WEBKIT_VERSION) + set(WEBKIT_VERSION 3bc4abf2d5875baf500b4687ef869987f6d19e00) +endif() + +if(WEBKIT_LOCAL) + set(DEFAULT_WEBKIT_PATH ${VENDOR_PATH}/WebKit/WebKitBuild/${CMAKE_BUILD_TYPE}) +else() + set(DEFAULT_WEBKIT_PATH ${CACHE_PATH}/webkit-${WEBKIT_VERSION}) +endif() + +option(WEBKIT_PATH "The path to the WebKit directory") + +if(NOT WEBKIT_PATH) + set(WEBKIT_PATH ${DEFAULT_WEBKIT_PATH}) +endif() + +set(WEBKIT_INCLUDE_PATH ${WEBKIT_PATH}/include) +set(WEBKIT_LIB_PATH ${WEBKIT_PATH}/lib) + +if(WEBKIT_LOCAL) + if(EXISTS ${WEBKIT_PATH}/cmakeconfig.h) + # You may need to run: + # make jsc-compile-debug jsc-copy-headers + include_directories( + ${WEBKIT_PATH} + ${WEBKIT_PATH}/JavaScriptCore/Headers/JavaScriptCore + ${WEBKIT_PATH}/JavaScriptCore/PrivateHeaders + ${WEBKIT_PATH}/bmalloc/Headers + ${WEBKIT_PATH}/WTF/Headers + ) + endif() + + # After this point, only prebuilt WebKit is supported + return() +endif() + +if(EXISTS ${WEBKIT_PATH}/package.json) + file(READ ${WEBKIT_PATH}/package.json WEBKIT_PACKAGE_JSON) + + if(WEBKIT_PACKAGE_JSON MATCHES ${WEBKIT_VERSION}) + return() + endif() +endif() + +if(WIN32) + set(WEBKIT_OS "windows") +elseif(APPLE) + set(WEBKIT_OS "macos") +elseif(UNIX) + set(WEBKIT_OS "linux") +else() + message(FATAL_ERROR "Unsupported operating system: ${CMAKE_SYSTEM_NAME}") +endif() + +if(CMAKE_SYSTEM_PROCESSOR MATCHES "arm64|aarch64") + set(WEBKIT_ARCH "arm64") +elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "amd64|x86_64|x64|AMD64") + set(WEBKIT_ARCH "amd64") +else() + message(FATAL_ERROR "Unsupported architecture: ${CMAKE_SYSTEM_PROCESSOR}") +endif() + +if(IS_MUSL) + set(WEBKIT_SUFFIX "-musl") +endif() + +if(DEBUG) + set(WEBKIT_SUFFIX "${WEBKIT_SUFFIX}-debug") +elseif(ENABLE_LTO AND NOT WIN32) + set(WEBKIT_SUFFIX "${WEBKIT_SUFFIX}-lto") +else() + set(WEBKIT_SUFFIX "${WEBKIT_SUFFIX}") +endif() + +set(WEBKIT_NAME bun-webkit-${WEBKIT_OS}-${WEBKIT_ARCH}${WEBKIT_SUFFIX}) +set(WEBKIT_FILENAME ${WEBKIT_NAME}.tar.gz) +setx(WEBKIT_DOWNLOAD_URL https://github.com/oven-sh/WebKit/releases/download/autobuild-${WEBKIT_VERSION}/${WEBKIT_FILENAME}) + +file(DOWNLOAD ${WEBKIT_DOWNLOAD_URL} ${CACHE_PATH}/${WEBKIT_FILENAME} SHOW_PROGRESS) +file(ARCHIVE_EXTRACT INPUT ${CACHE_PATH}/${WEBKIT_FILENAME} DESTINATION ${CACHE_PATH} TOUCH) +file(REMOVE ${CACHE_PATH}/${WEBKIT_FILENAME}) +file(REMOVE_RECURSE ${WEBKIT_PATH}) +file(RENAME ${CACHE_PATH}/bun-webkit ${WEBKIT_PATH}) + +if(APPLE) + file(REMOVE_RECURSE ${WEBKIT_INCLUDE_PATH}/unicode) +endif() diff --git a/cmake/tools/SetupZig.cmake b/cmake/tools/SetupZig.cmake new file mode 100644 index 0000000000..d34c4b53ff --- /dev/null +++ b/cmake/tools/SetupZig.cmake @@ -0,0 +1,87 @@ +if(CMAKE_SYSTEM_PROCESSOR MATCHES "arm64|aarch64") + set(DEFAULT_ZIG_ARCH "aarch64") +elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "amd64|x86_64|x64|AMD64") + set(DEFAULT_ZIG_ARCH "x86_64") +else() + unsupported(CMAKE_SYSTEM_PROCESSOR) +endif() + +if(APPLE) + set(DEFAULT_ZIG_TARGET ${DEFAULT_ZIG_ARCH}-macos-none) +elseif(WIN32) + set(DEFAULT_ZIG_TARGET ${DEFAULT_ZIG_ARCH}-windows-msvc) +elseif(LINUX) + if(IS_MUSL) + set(DEFAULT_ZIG_TARGET ${DEFAULT_ZIG_ARCH}-linux-musl) + else() + set(DEFAULT_ZIG_TARGET ${DEFAULT_ZIG_ARCH}-linux-gnu) + endif() +else() + unsupported(CMAKE_SYSTEM_NAME) +endif() + +optionx(ZIG_VERSION STRING "The zig version of the compiler to download" DEFAULT "0.13.0") +optionx(ZIG_COMMIT STRING "The zig commit to use in oven-sh/zig" DEFAULT "131a009ba2eb127a3447d05b9e12f710429aa5ee") +optionx(ZIG_TARGET STRING "The zig target to use" DEFAULT ${DEFAULT_ZIG_TARGET}) + +if(CMAKE_BUILD_TYPE STREQUAL "Release") + set(DEFAULT_ZIG_OPTIMIZE "ReleaseFast") +elseif(CMAKE_BUILD_TYPE STREQUAL "RelWithDebInfo") + set(DEFAULT_ZIG_OPTIMIZE "ReleaseSafe") +elseif(CMAKE_BUILD_TYPE STREQUAL "MinSizeRel") + set(DEFAULT_ZIG_OPTIMIZE "ReleaseSmall") +elseif(CMAKE_BUILD_TYPE STREQUAL "Debug") + set(DEFAULT_ZIG_OPTIMIZE "Debug") +else() + unsupported(CMAKE_BUILD_TYPE) +endif() + +# Since Bun 1.1, Windows has been built using ReleaseSafe. +# This is because it caught more crashes, but we can reconsider this in the future +if(WIN32 AND DEFAULT_ZIG_OPTIMIZE STREQUAL "ReleaseFast") + set(DEFAULT_ZIG_OPTIMIZE "ReleaseSafe") +endif() + +optionx(ZIG_OPTIMIZE "ReleaseFast|ReleaseSafe|ReleaseSmall|Debug" "The Zig optimize level to use" DEFAULT ${DEFAULT_ZIG_OPTIMIZE}) + +# To use LLVM bitcode from Zig, more work needs to be done. Currently, an install of +# LLVM 18.1.7 does not compatible with what bitcode Zig 0.13 outputs (has LLVM 18.1.7) +# Change to "bc" to experiment, "Invalid record" means it is not valid output. +optionx(ZIG_OBJECT_FORMAT "obj|bc" "Output file format for Zig object files" DEFAULT obj) + +optionx(ZIG_LOCAL_CACHE_DIR FILEPATH "The path to local the zig cache directory" DEFAULT ${CACHE_PATH}/zig/local) +optionx(ZIG_GLOBAL_CACHE_DIR FILEPATH "The path to the global zig cache directory" DEFAULT ${CACHE_PATH}/zig/global) + +setenv(ZIG_LOCAL_CACHE_DIR ${ZIG_LOCAL_CACHE_DIR}) +setenv(ZIG_GLOBAL_CACHE_DIR ${ZIG_GLOBAL_CACHE_DIR}) + +setx(ZIG_PATH ${VENDOR_PATH}/zig) + +if(WIN32) + setx(ZIG_EXECUTABLE ${ZIG_PATH}/zig.exe) +else() + setx(ZIG_EXECUTABLE ${ZIG_PATH}/zig) +endif() + +set(CMAKE_ZIG_FLAGS + --cache-dir ${ZIG_LOCAL_CACHE_DIR} + --global-cache-dir ${ZIG_GLOBAL_CACHE_DIR} + --zig-lib-dir ${ZIG_PATH}/lib +) + +register_command( + TARGET + clone-zig + COMMENT + "Downloading zig" + COMMAND + ${CMAKE_COMMAND} + -DZIG_PATH=${ZIG_PATH} + -DZIG_VERSION=${ZIG_VERSION} + -DZIG_COMMIT=${ZIG_COMMIT} + -P ${CWD}/cmake/scripts/DownloadZig.cmake + SOURCES + ${CWD}/cmake/scripts/DownloadZig.cmake + OUTPUTS + ${ZIG_EXECUTABLE} +) diff --git a/completions/bun.bash b/completions/bun.bash index b62d8ef247..ccabb1d73b 100644 --- a/completions/bun.bash +++ b/completions/bun.bash @@ -82,7 +82,7 @@ _bun_completions() { declare -A PACKAGE_OPTIONS; declare -A PM_OPTIONS; - local SUBCOMMANDS="dev bun create run install add remove upgrade completions discord help init pm x test repl update link unlink build"; + local SUBCOMMANDS="dev bun create run install add remove upgrade completions discord help init pm x test repl update outdated link unlink build"; GLOBAL_OPTIONS[LONG_OPTIONS]="--use --cwd --bunfile --server-bunfile --config --disable-react-fast-refresh --disable-hmr --env-file --extension-order --jsx-factory --jsx-fragment --extension-order --jsx-factory --jsx-fragment --jsx-import-source --jsx-production --jsx-runtime --main-fields --no-summary --version --platform --public-dir --tsconfig-override --define --external --help --inject --loader --origin --port --dump-environment-variables --dump-limits --disable-bun-js"; GLOBAL_OPTIONS[SHORT_OPTIONS]="-c -v -d -e -h -i -l -u -p"; diff --git a/completions/bun.fish b/completions/bun.fish index 3cb9366b7d..a5c51aef05 100644 --- a/completions/bun.fish +++ b/completions/bun.fish @@ -179,6 +179,7 @@ complete -c bun -n "__fish_use_subcommand" -a "remove" -d "Remove a dependency f complete -c bun -n "__fish_use_subcommand" -a "add" -d "Add a dependency to package.json" -f complete -c bun -n "__fish_use_subcommand" -a "init" -d "Initialize a Bun project in this directory" -f complete -c bun -n "__fish_use_subcommand" -a "link" -d "Register or link a local npm package" -f -complete -c bun -n "__fish_use_subcommand" -a "link" -d "Unregister a local npm package" -f +complete -c bun -n "__fish_use_subcommand" -a "unlink" -d "Unregister a local npm package" -f complete -c bun -n "__fish_use_subcommand" -a "pm" -d "Additional package management utilities" -f complete -c bun -n "__fish_use_subcommand" -a "x" -d "Execute a package binary, installing if needed" -f +complete -c bun -n "__fish_use_subcommand" -a "outdated" -d "Display the latest versions of outdated dependencies" -f diff --git a/completions/bun.zsh b/completions/bun.zsh index a19a44bec8..d75f2aa2f0 100644 --- a/completions/bun.zsh +++ b/completions/bun.zsh @@ -563,6 +563,22 @@ _bun_update_completion() { esac } +_bun_outdated_completion() { + _arguments -s -C \ + '--cwd[Set a specific cwd]:cwd' \ + '--verbose[Excessively verbose logging]' \ + '--no-progress[Disable the progress bar]' \ + '--help[Print this help menu]' && + ret=0 + + case $state in + config) + _bun_list_bunfig_toml + + ;; + esac +} + _bun_test_completion() { _arguments -s -C \ '1: :->cmd1' \ @@ -669,6 +685,7 @@ _bun() { 'add\:"Add a dependency to package.json (bun a)" ' 'remove\:"Remove a dependency from package.json (bun rm)" ' 'update\:"Update outdated dependencies & save to package.json" ' + 'outdated\:"Display the latest versions of outdated dependencies" ' 'link\:"Link an npm package globally" ' 'unlink\:"Globally unlink an npm package" ' 'pm\:"More commands for managing packages" ' @@ -740,6 +757,10 @@ _bun() { update) _bun_update_completion + ;; + outdated) + _bun_outdated_completion + ;; 'test') _bun_test_completion @@ -819,6 +840,10 @@ _bun() { update) _bun_update_completion + ;; + outdated) + _bun_outdated_completion + ;; 'test') _bun_test_completion diff --git a/docs/api/binary-data.md b/docs/api/binary-data.md index 7c0be246fc..e0820d44f3 100644 --- a/docs/api/binary-data.md +++ b/docs/api/binary-data.md @@ -219,6 +219,11 @@ The following classes are typed arrays, along with a description of how they int --- +- [`Float16Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Float16Array) +- Every two (2) bytes are interpreted as a 16-bit floating point number. Range -6.104e5 to 6.55e4. + +--- + - [`Float32Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Float32Array) - Every four (4) bytes are interpreted as a 32-bit floating point number. Range -3.4e38 to 3.4e38. @@ -377,6 +382,16 @@ Refer to the [MDN documentation](https://developer.mozilla.org/en-US/docs/Web/Ja It's worth specifically highlighting `Uint8Array`, as it represents a classic "byte array"—a sequence of 8-bit unsigned integers between 0 and 255. This is the most common typed array you'll encounter in JavaScript. +In Bun, and someday in other JavaScript engines, it has methods available for converting between byte arrays and serialized representations of those arrays as base64 or hex strings. + +```ts +new Uint8Array([1, 2, 3, 4, 5]).toBase64(); // "AQIDBA==" +Uint8Array.fromBase64("AQIDBA=="); // Uint8Array(4) [1, 2, 3, 4, 5] + +new Uint8Array([255, 254, 253, 252, 251]).toHex(); // "fffefdfcfb==" +Uint8Array.fromHex("fffefdfcfb"); // Uint8Array(5) [255, 254, 253, 252, 251] +``` + It is the return value of [`TextEncoder#encode`](https://developer.mozilla.org/en-US/docs/Web/API/TextEncoder), and the input type of [`TextDecoder#decode`](https://developer.mozilla.org/en-US/docs/Web/API/TextDecoder), two utility classes designed to translate strings and various binary encodings, most notably `"utf-8"`. ```ts @@ -437,6 +452,7 @@ The contents of a `Blob` can be asynchronously read in various formats. ```ts await blob.text(); // => hello +await blob.bytes(); // => Uint8Array (copies contents) await blob.arrayBuffer(); // => ArrayBuffer (copies contents) await blob.stream(); // => ReadableStream ``` @@ -506,7 +522,7 @@ for await (const chunk of stream) { } ``` -For a more complete discussion of streams in Bun, see [API > Streams](/docs/api/streams). +For a more complete discussion of streams in Bun, see [API > Streams](https://bun.sh/docs/api/streams). ## Conversion @@ -540,6 +556,7 @@ Buffer.from(buf, 0, 10); #### To `string` +As UTF-8: ```ts new TextDecoder().decode(buf); ``` @@ -620,6 +637,7 @@ Buffer.from(arr); #### To `string` +As UTF-8: ```ts new TextDecoder().decode(arr); ``` @@ -633,6 +651,7 @@ Array.from(arr); #### To `Blob` ```ts +// only if arr is a view of its entire backing TypedArray new Blob([arr.buffer], { type: "text/plain" }); ``` @@ -696,6 +715,7 @@ Buffer.from(view.buffer, view.byteOffset, view.byteLength); #### To `string` +As UTF-8: ```ts new TextDecoder().decode(view); ``` @@ -767,9 +787,18 @@ new DataView(buf.buffer, buf.byteOffset, buf.byteLength); #### To `string` +As UTF-8: ```ts buf.toString(); ``` +As base64: +```ts +buf.toString('base64'); +``` +As hex: +```ts +buf.toString('hex'); +``` #### To `number[]` @@ -829,7 +858,7 @@ await blob.arrayBuffer(); #### To `TypedArray` ```ts -new Uint8Array(await blob.arrayBuffer()); +await blob.bytes(); ``` #### To `DataView` @@ -846,6 +875,7 @@ Buffer.from(await blob.arrayBuffer()); #### To `string` +As UTF-8: ```ts await blob.text(); ``` @@ -853,7 +883,7 @@ await blob.text(); #### To `number[]` ```ts -Array.from(new Uint8Array(await blob.arrayBuffer())); +Array.from(await blob.bytes()); ``` #### To `ReadableStream` @@ -931,9 +961,10 @@ Buffer.from(Bun.readableStreamToArrayBuffer(stream)); #### To `string` +As UTF-8: ```ts // with Response -new Response(stream).text(); +await new Response(stream).text(); // with Bun function await Bun.readableStreamToText(stream); @@ -943,8 +974,8 @@ await Bun.readableStreamToText(stream); ```ts // with Response -const buf = await new Response(stream).arrayBuffer(); -Array.from(new Uint8Array(buf)); +const arr = await new Response(stream).bytes(); +Array.from(arr); // with Bun function Array.from(new Uint8Array(Bun.readableStreamToArrayBuffer(stream))); diff --git a/docs/api/cc.md b/docs/api/cc.md new file mode 100644 index 0000000000..212b928df5 --- /dev/null +++ b/docs/api/cc.md @@ -0,0 +1,197 @@ +`bun:ffi` has experimental support for compiling and running C from JavaScript with low overhead. + +## Usage (cc in `bun:ffi`) + +See the [introduction blog post](https://bun.sh/blog/compile-and-run-c-in-js) for more information. + +JavaScript: + +```ts#hello.js +import { cc } from "bun:ffi"; +import source from "./hello.c" with { type: "file" }; + +const { + symbols: { hello }, +} = cc({ + source, + symbols: { + hello: { + args: [], + returns: "int", + }, + }, +}); + +console.log("What is the answer to the universe?", hello()); +``` + +C source: + +```c#hello.c +int hello() { + return 42; +} +``` + +When you run `hello.js`, it will print: + +```sh +$ bun hello.js +What is the answer to the universe? 42 +``` + +Under the hood, `cc` uses [TinyCC](https://bellard.org/tcc/) to compile the C code and then link it with the JavaScript runtime, efficiently converting types in-place. + +### Primitive types + +The same `FFIType` values in [`dlopen`](/docs/api/ffi) are supported in `cc`. + +| `FFIType` | C Type | Aliases | +| ---------- | -------------- | --------------------------- | +| cstring | `char*` | | +| function | `(void*)(*)()` | `fn`, `callback` | +| ptr | `void*` | `pointer`, `void*`, `char*` | +| i8 | `int8_t` | `int8_t` | +| i16 | `int16_t` | `int16_t` | +| i32 | `int32_t` | `int32_t`, `int` | +| i64 | `int64_t` | `int64_t` | +| i64_fast | `int64_t` | | +| u8 | `uint8_t` | `uint8_t` | +| u16 | `uint16_t` | `uint16_t` | +| u32 | `uint32_t` | `uint32_t` | +| u64 | `uint64_t` | `uint64_t` | +| u64_fast | `uint64_t` | | +| f32 | `float` | `float` | +| f64 | `double` | `double` | +| bool | `bool` | | +| char | `char` | | +| napi_env | `napi_env` | | +| napi_value | `napi_value` | | + +### Strings, objects, and non-primitive types + +To make it easier to work with strings, objects, and other non-primitive types that don't map 1:1 to C types, `cc` supports N-API. + +To pass or receive a JavaScript values without any type conversions from a C function, you can use `napi_value`. + +You can also pass a `napi_env` to receive the N-API environment used to call the JavaScript function. + +#### Returning a C string to JavaScript + +For example, if you have a string in C, you can return it to JavaScript like this: + +```ts#hello.js +import { cc } from "bun:ffi"; +import source from "./hello.c" with { type: "file" }; + +const { + symbols: { hello }, +} = cc({ + source, + symbols: { + hello: { + args: ["napi_env"], + returns: "napi_value", + }, + }, +}); + +const result = hello(); +``` + +And in C: + +```c#hello.c +#include + +napi_value hello(napi_env env) { + napi_value result; + napi_create_string_utf8(env, "Hello, Napi!", NAPI_AUTO_LENGTH, &result); + return result; +} +``` + +You can also use this to return other types like objects and arrays: + +```c#hello.c +#include + +napi_value hello(napi_env env) { + napi_value result; + napi_create_object(env, &result); + return result; +} +``` + +### `cc` Reference + +#### `library: string[]` + +The `library` array is used to specify the libraries that should be linked with the C code. + +```ts +type Library = string[]; + +cc({ + source: "hello.c", + library: ["sqlite3"], +}); +``` + +#### `symbols` + +The `symbols` object is used to specify the functions and variables that should be exposed to JavaScript. + +```ts +type Symbols = { + [key: string]: { + args: FFIType[]; + returns: FFIType; + }; +}; +``` + +#### `source` + +The `source` is a file path to the C code that should be compiled and linked with the JavaScript runtime. + +```ts +type Source = string | URL | BunFile; + +cc({ + source: "hello.c", + symbols: { + hello: { + args: [], + returns: "int", + }, + }, +}); +``` + +#### `flags: string | string[]` + +The `flags` is an optional array of strings that should be passed to the TinyCC compiler. + +```ts +type Flags = string | string[]; +``` + +These are flags like `-I` for include directories and `-D` for preprocessor definitions. + +#### `defines: Record` + +The `defines` is an optional object that should be passed to the TinyCC compiler. + +```ts +type Defines = Record; + +cc({ + source: "hello.c", + defines: { + "NDEBUG": "1", + }, +}); +``` + +These are preprocessor definitions passed to the TinyCC compiler. diff --git a/docs/api/color.md b/docs/api/color.md new file mode 100644 index 0000000000..da27adc625 --- /dev/null +++ b/docs/api/color.md @@ -0,0 +1,262 @@ +`Bun.color(input, outputFormat?)` leverages Bun's CSS parser to parse, normalize, and convert colors from user input to a variety of output formats, including: + +| Format | Example | +| ------------ | -------------------------------- | +| `"css"` | `"red"` | +| `"ansi"` | `"\x1b[38;2;255;0;0m"` | +| `"ansi-16"` | `"\x1b[38;5;\tm"` | +| `"ansi-256"` | `"\x1b[38;5;196m"` | +| `"ansi-16m"` | `"\x1b[38;2;255;0;0m"` | +| `"number"` | `0x1a2b3c` | +| `"rgb"` | `"rgb(255, 99, 71)"` | +| `"rgba"` | `"rgba(255, 99, 71, 0.5)"` | +| `"hsl"` | `"hsl(120, 50%, 50%)"` | +| `"hex"` | `"#1a2b3c"` | +| `"HEX"` | `"#1A2B3C"` | +| `"{rgb}"` | `{ r: 255, g: 99, b: 71 }` | +| `"{rgba}"` | `{ r: 255, g: 99, b: 71, a: 1 }` | +| `"[rgb]"` | `[ 255, 99, 71 ]` | +| `"[rgba]"` | `[ 255, 99, 71, 255]` | + +There are many different ways to use this API: + +- Validate and normalize colors to persist in a database (`number` is the most database-friendly) +- Convert colors to different formats +- Colorful logging beyond the 16 colors many use today (use `ansi` if you don't want to figure out what the user's terminal supports, otherwise use `ansi-16`, `ansi-256`, or `ansi-16m` for how many colors the terminal supports) +- Format colors for use in CSS injected into HTML +- Get the `r`, `g`, `b`, and `a` color components as JavaScript objects or numbers from a CSS color string + +You can think of this as an alternative to the popular npm packages [`color`](https://github.com/Qix-/color) and [`tinycolor2`](https://github.com/bgrins/TinyColor) except with full support for parsing CSS color strings and zero dependencies built directly into Bun. + +### Flexible input + +You can pass in any of the following: + +- Standard CSS color names like `"red"` +- Numbers like `0xff0000` +- Hex strings like `"#f00"` +- RGB strings like `"rgb(255, 0, 0)"` +- RGBA strings like `"rgba(255, 0, 0, 1)"` +- HSL strings like `"hsl(0, 100%, 50%)"` +- HSLA strings like `"hsla(0, 100%, 50%, 1)"` +- RGB objects like `{ r: 255, g: 0, b: 0 }` +- RGBA objects like `{ r: 255, g: 0, b: 0, a: 1 }` +- RGB arrays like `[255, 0, 0]` +- RGBA arrays like `[255, 0, 0, 255]` +- LAB strings like `"lab(50% 50% 50%)"` +- ... anything else that CSS can parse as a single color value + +### Format colors as CSS + +The `"css"` format outputs valid CSS for use in stylesheets, inline styles, CSS variables, css-in-js, etc. It returns the most compact representation of the color as a string. + +```ts +Bun.color("red", "css"); // "red" +Bun.color(0xff0000, "css"); // "#f000" +Bun.color("#f00", "css"); // "red" +Bun.color("#ff0000", "css"); // "red" +Bun.color("rgb(255, 0, 0)", "css"); // "red" +Bun.color("rgba(255, 0, 0, 1)", "css"); // "red" +Bun.color("hsl(0, 100%, 50%)", "css"); // "red" +Bun.color("hsla(0, 100%, 50%, 1)", "css"); // "red" +Bun.color({ r: 255, g: 0, b: 0 }, "css"); // "red" +Bun.color({ r: 255, g: 0, b: 0, a: 1 }, "css"); // "red" +Bun.color([255, 0, 0], "css"); // "red" +Bun.color([255, 0, 0, 255], "css"); // "red" +``` + +If the input is unknown or fails to parse, `Bun.color` returns `null`. + +### Format colors as ANSI (for terminals) + +The `"ansi"` format outputs ANSI escape codes for use in terminals to make text colorful. + +```ts +Bun.color("red", "ansi"); // "\u001b[38;2;255;0;0m" +Bun.color(0xff0000, "ansi"); // "\u001b[38;2;255;0;0m" +Bun.color("#f00", "ansi"); // "\u001b[38;2;255;0;0m" +Bun.color("#ff0000", "ansi"); // "\u001b[38;2;255;0;0m" +Bun.color("rgb(255, 0, 0)", "ansi"); // "\u001b[38;2;255;0;0m" +Bun.color("rgba(255, 0, 0, 1)", "ansi"); // "\u001b[38;2;255;0;0m" +Bun.color("hsl(0, 100%, 50%)", "ansi"); // "\u001b[38;2;255;0;0m" +Bun.color("hsla(0, 100%, 50%, 1)", "ansi"); // "\u001b[38;2;255;0;0m" +Bun.color({ r: 255, g: 0, b: 0 }, "ansi"); // "\u001b[38;2;255;0;0m" +Bun.color({ r: 255, g: 0, b: 0, a: 1 }, "ansi"); // "\u001b[38;2;255;0;0m" +Bun.color([255, 0, 0], "ansi"); // "\u001b[38;2;255;0;0m" +Bun.color([255, 0, 0, 255], "ansi"); // "\u001b[38;2;255;0;0m" +``` + +This gets the color depth of stdout and automatically chooses one of `"ansi-16m"`, `"ansi-256"`, `"ansi-16"` based on the environment variables. If stdout doesn't support any form of ANSI color, it returns an empty string. As with the rest of Bun's color API, if the input is unknown or fails to parse, it returns `null`. + +#### 24-bit ANSI colors (`ansi-16m`) + +The `"ansi-16m"` format outputs 24-bit ANSI colors for use in terminals to make text colorful. 24-bit color means you can display 16 million colors on supported terminals, and requires a modern terminal that supports it. + +This converts the input color to RGBA, and then outputs that as an ANSI color. + +```ts +Bun.color("red", "ansi-16m"); // "\x1b[38;2;255;0;0m" +Bun.color(0xff0000, "ansi-16m"); // "\x1b[38;2;255;0;0m" +Bun.color("#f00", "ansi-16m"); // "\x1b[38;2;255;0;0m" +Bun.color("#ff0000", "ansi-16m"); // "\x1b[38;2;255;0;0m" +``` + +#### 256 ANSI colors (`ansi-256`) + +The `"ansi-256"` format approximates the input color to the nearest of the 256 ANSI colors supported by some terminals. + +```ts +Bun.color("red", "ansi-256"); // "\u001b[38;5;196m" +Bun.color(0xff0000, "ansi-256"); // "\u001b[38;5;196m" +Bun.color("#f00", "ansi-256"); // "\u001b[38;5;196m" +Bun.color("#ff0000", "ansi-256"); // "\u001b[38;5;196m" +``` + +To convert from RGBA to one of the 256 ANSI colors, we ported the algorithm that [`tmux` uses](https://github.com/tmux/tmux/blob/dae2868d1227b95fd076fb4a5efa6256c7245943/colour.c#L44-L55). + +#### 16 ANSI colors (`ansi-16`) + +The `"ansi-16"` format approximates the input color to the nearest of the 16 ANSI colors supported by most terminals. + +```ts +Bun.color("red", "ansi-16"); // "\u001b[38;5;\tm" +Bun.color(0xff0000, "ansi-16"); // "\u001b[38;5;\tm" +Bun.color("#f00", "ansi-16"); // "\u001b[38;5;\tm" +Bun.color("#ff0000", "ansi-16"); // "\u001b[38;5;\tm" +``` + +This works by first converting the input to a 24-bit RGB color space, then to `ansi-256`, and then we convert that to the nearest 16 ANSI color. + +### Format colors as numbers + +The `"number"` format outputs a 24-bit number for use in databases, configuration, or any other use case where a compact representation of the color is desired. + +```ts +Bun.color("red", "number"); // 16711680 +Bun.color(0xff0000, "number"); // 16711680 +Bun.color({ r: 255, g: 0, b: 0 }, "number"); // 16711680 +Bun.color([255, 0, 0], "number"); // 16711680 +Bun.color("rgb(255, 0, 0)", "number"); // 16711680 +Bun.color("rgba(255, 0, 0, 1)", "number"); // 16711680 +Bun.color("hsl(0, 100%, 50%)", "number"); // 16711680 +Bun.color("hsla(0, 100%, 50%, 1)", "number"); // 16711680 +``` + +### Get the red, green, blue, and alpha channels + +You can use the `"{rgba}"`, `"{rgb}"`, `"[rgba]"` and `"[rgb]"` formats to get the red, green, blue, and alpha channels as objects or arrays. + +#### `{rgba}` object + +The `"{rgba}"` format outputs an object with the red, green, blue, and alpha channels. + +```ts +type RGBAObject = { + // 0 - 255 + r: number; + // 0 - 255 + g: number; + // 0 - 255 + b: number; + // 0 - 1 + a: number; +}; +``` + +Example: + +```ts +Bun.color("hsl(0, 0%, 50%)", "{rgba}"); // { r: 128, g: 128, b: 128, a: 1 } +Bun.color("red", "{rgba}"); // { r: 255, g: 0, b: 0, a: 1 } +Bun.color(0xff0000, "{rgba}"); // { r: 255, g: 0, b: 0, a: 1 } +Bun.color({ r: 255, g: 0, b: 0 }, "{rgba}"); // { r: 255, g: 0, b: 0, a: 1 } +Bun.color([255, 0, 0], "{rgba}"); // { r: 255, g: 0, b: 0, a: 1 } +``` + +To behave similarly to CSS, the `a` channel is a decimal number between `0` and `1`. + +The `"{rgb}"` format is similar, but it doesn't include the alpha channel. + +```ts +Bun.color("hsl(0, 0%, 50%)", "{rgb}"); // { r: 128, g: 128, b: 128 } +Bun.color("red", "{rgb}"); // { r: 255, g: 0, b: 0 } +Bun.color(0xff0000, "{rgb}"); // { r: 255, g: 0, b: 0 } +Bun.color({ r: 255, g: 0, b: 0 }, "{rgb}"); // { r: 255, g: 0, b: 0 } +Bun.color([255, 0, 0], "{rgb}"); // { r: 255, g: 0, b: 0 } +``` + +#### `[rgba]` array + +The `"[rgba]"` format outputs an array with the red, green, blue, and alpha channels. + +```ts +// All values are 0 - 255 +type RGBAArray = [number, number, number, number]; +``` + +Example: + +```ts +Bun.color("hsl(0, 0%, 50%)", "[rgba]"); // [128, 128, 128, 255] +Bun.color("red", "[rgba]"); // [255, 0, 0, 255] +Bun.color(0xff0000, "[rgba]"); // [255, 0, 0, 255] +Bun.color({ r: 255, g: 0, b: 0 }, "[rgba]"); // [255, 0, 0, 255] +Bun.color([255, 0, 0], "[rgba]"); // [255, 0, 0, 255] +``` + +Unlike the `"{rgba}"` format, the alpha channel is an integer between `0` and `255`. This is useful for typed arrays where each channel must be the same underlying type. + +The `"[rgb]"` format is similar, but it doesn't include the alpha channel. + +```ts +Bun.color("hsl(0, 0%, 50%)", "[rgb]"); // [128, 128, 128] +Bun.color("red", "[rgb]"); // [255, 0, 0] +Bun.color(0xff0000, "[rgb]"); // [255, 0, 0] +Bun.color({ r: 255, g: 0, b: 0 }, "[rgb]"); // [255, 0, 0] +Bun.color([255, 0, 0], "[rgb]"); // [255, 0, 0] +``` + +### Format colors as hex strings + +The `"hex"` format outputs a lowercase hex string for use in CSS or other contexts. + +```ts +Bun.color("hsl(0, 0%, 50%)", "hex"); // "#808080" +Bun.color("red", "hex"); // "#ff0000" +Bun.color(0xff0000, "hex"); // "#ff0000" +Bun.color({ r: 255, g: 0, b: 0 }, "hex"); // "#ff0000" +Bun.color([255, 0, 0], "hex"); // "#ff0000" +``` + +The `"HEX"` format is similar, but it outputs a hex string with uppercase letters instead of lowercase letters. + +```ts +Bun.color("hsl(0, 0%, 50%)", "HEX"); // "#808080" +Bun.color("red", "HEX"); // "#FF0000" +Bun.color(0xff0000, "HEX"); // "#FF0000" +Bun.color({ r: 255, g: 0, b: 0 }, "HEX"); // "#FF0000" +Bun.color([255, 0, 0], "HEX"); // "#FF0000" +``` + +### Bundle-time client-side color formatting + +Like many of Bun's APIs, you can use macros to invoke `Bun.color` at bundle-time for use in client-side JavaScript builds: + +```ts#client-side.ts +import { color } from "bun" with { type: "macro" }; + +console.log(color("#f00", "css")); +``` + +Then, build the client-side code: + +```sh +bun build ./client-side.ts +``` + +This will output the following to `client-side.js`: + +```js +// client-side.ts +console.log("red"); +``` diff --git a/docs/api/dns.md b/docs/api/dns.md index bdc6c83e86..4553263fab 100644 --- a/docs/api/dns.md +++ b/docs/api/dns.md @@ -14,7 +14,7 @@ In Bun v1.1.9, we added support for DNS caching. This cache makes repeated conne At the time of writing, we cache up to 255 entries for a maximum of 30 seconds (each). If any connections to a host fail, we remove the entry from the cache. When multiple connections are made to the same host simultaneously, DNS lookups are deduplicated to avoid making multiple requests for the same host. -This cache is automatically used by; +This cache is automatically used by: - `bun install` - `fetch()` @@ -99,7 +99,7 @@ console.log(stats); ### Configuring DNS cache TTL -Bun defaults to 30 seconds for the TTL of DNS cache entries. To change this, you can set the envionrment variable `$BUN_CONFIG_DNS_TIME_TO_LIVE_SECONDS`. For example, to set the TTL to 5 seconds: +Bun defaults to 30 seconds for the TTL of DNS cache entries. To change this, you can set the environment variable `$BUN_CONFIG_DNS_TIME_TO_LIVE_SECONDS`. For example, to set the TTL to 5 seconds: ```sh BUN_CONFIG_DNS_TIME_TO_LIVE_SECONDS=5 bun run my-script.ts diff --git a/docs/api/fetch.md b/docs/api/fetch.md index e02f064e18..afbf53c5c1 100644 --- a/docs/api/fetch.md +++ b/docs/api/fetch.md @@ -42,7 +42,7 @@ const response = await fetch("http://example.com", { }); ``` -`body` can be a string, a `FormData` object, an `ArrayBuffer`, a `Blob`, and more. See the [MDN documentation](https://developer.mozilla.org/en-US/docs/Web/API/Body/body) for more information. +`body` can be a string, a `FormData` object, an `ArrayBuffer`, a `Blob`, and more. See the [MDN documentation](https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API/Using_Fetch#setting_a_body) for more information. ### Proxying requests @@ -241,7 +241,7 @@ dns.prefetch("bun.sh", 443); By default, Bun caches and deduplicates DNS queries in-memory for up to 30 seconds. You can see the cache stats by calling `dns.getCacheStats()`: -To learn more about DNS caching in Bun, see the [DNS caching](/docs/api/dns) documentation. +To learn more about DNS caching in Bun, see the [DNS caching](https://bun.sh/docs/api/dns) documentation. ### Preconnect to a host diff --git a/docs/api/ffi.md b/docs/api/ffi.md index 1a276ba035..6284689cb2 100644 --- a/docs/api/ffi.md +++ b/docs/api/ffi.md @@ -1,6 +1,6 @@ Use the built-in `bun:ffi` module to efficiently call native libraries from JavaScript. It works with languages that support the C ABI (Zig, Rust, C/C++, C#, Nim, Kotlin, etc). -## Usage (`bun:ffi`) +## dlopen usage (`bun:ffi`) To print the version number of `sqlite3`: @@ -108,25 +108,30 @@ $ zig build-lib add.cpp -dynamic -lc -lc++ The following `FFIType` values are supported. -| `FFIType` | C Type | Aliases | -| --------- | -------------- | --------------------------- | -| cstring | `char*` | | -| function | `(void*)(*)()` | `fn`, `callback` | -| ptr | `void*` | `pointer`, `void*`, `char*` | -| i8 | `int8_t` | `int8_t` | -| i16 | `int16_t` | `int16_t` | -| i32 | `int32_t` | `int32_t`, `int` | -| i64 | `int64_t` | `int64_t` | -| i64_fast | `int64_t` | | -| u8 | `uint8_t` | `uint8_t` | -| u16 | `uint16_t` | `uint16_t` | -| u32 | `uint32_t` | `uint32_t` | -| u64 | `uint64_t` | `uint64_t` | -| u64_fast | `uint64_t` | | -| f32 | `float` | `float` | -| f64 | `double` | `double` | -| bool | `bool` | | -| char | `char` | | +| `FFIType` | C Type | Aliases | +| ---------- | -------------- | --------------------------- | +| buffer | `char*` | | +| cstring | `char*` | | +| function | `(void*)(*)()` | `fn`, `callback` | +| ptr | `void*` | `pointer`, `void*`, `char*` | +| i8 | `int8_t` | `int8_t` | +| i16 | `int16_t` | `int16_t` | +| i32 | `int32_t` | `int32_t`, `int` | +| i64 | `int64_t` | `int64_t` | +| i64_fast | `int64_t` | | +| u8 | `uint8_t` | `uint8_t` | +| u16 | `uint16_t` | `uint16_t` | +| u32 | `uint32_t` | `uint32_t` | +| u64 | `uint64_t` | `uint64_t` | +| u64_fast | `uint64_t` | | +| f32 | `float` | `float` | +| f64 | `double` | `double` | +| bool | `bool` | | +| char | `char` | | +| napi_env | `napi_env` | | +| napi_value | `napi_value` | | + +Note: `buffer` arguments must be a `TypedArray` or `DataView`. ## Strings diff --git a/docs/api/file-io.md b/docs/api/file-io.md index 206abfe475..f9fd2368f3 100644 --- a/docs/api/file-io.md +++ b/docs/api/file-io.md @@ -1,8 +1,8 @@ {% callout %} - + -**Note** — The `Bun.file` and `Bun.write` APIs documented on this page are heavily optimized and represent the recommended way to perform file-system tasks using Bun. For operations that are not yet available with `Bun.file`, such as `mkdir` or `readdir`, you can use Bun's [nearly complete](/docs/runtime/nodejs-apis#node-fs) implementation of the [`node:fs`](https://nodejs.org/api/fs.html) module. +**Note** — The `Bun.file` and `Bun.write` APIs documented on this page are heavily optimized and represent the recommended way to perform file-system tasks using Bun. For operations that are not yet available with `Bun.file`, such as `mkdir` or `readdir`, you can use Bun's [nearly complete](https://bun.sh/docs/runtime/nodejs-apis#node-fs) implementation of the [`node:fs`](https://nodejs.org/api/fs.html) module. {% /callout %} diff --git a/docs/api/globals.md b/docs/api/globals.md index fe7cd60c64..8e5a89651a 100644 --- a/docs/api/globals.md +++ b/docs/api/globals.md @@ -34,7 +34,7 @@ Bun implements the following globals. - [`Buffer`](https://nodejs.org/api/buffer.html#class-buffer) - Node.js -- See [Node.js > `Buffer`](/docs/runtime/nodejs-apis#node-buffer) +- See [Node.js > `Buffer`](https://bun.sh/docs/runtime/nodejs-apis#node-buffer) --- @@ -172,7 +172,7 @@ Bun implements the following globals. - [`global`](https://nodejs.org/api/globals.html#global) - Node.js -- See [Node.js > `global`](/docs/runtime/nodejs-apis#global). +- See [Node.js > `global`](https://bun.sh/docs/runtime/nodejs-apis#global). --- @@ -188,7 +188,7 @@ Bun implements the following globals. --- -- [`HTMLRewriter`](/docs/api/html-rewriter) +- [`HTMLRewriter`](https://bun.sh/docs/api/html-rewriter) - Cloudflare -   @@ -220,7 +220,7 @@ Bun implements the following globals. - [`process`](https://nodejs.org/api/process.html) - Node.js -- See [Node.js > `process`](/docs/runtime/nodejs-apis#node-process) +- See [Node.js > `process`](https://bun.sh/docs/runtime/nodejs-apis#node-process) --- diff --git a/docs/api/hashing.md b/docs/api/hashing.md index 1ff4a83f4f..5cc40e2a75 100644 --- a/docs/api/hashing.md +++ b/docs/api/hashing.md @@ -65,6 +65,73 @@ const isMatch = Bun.password.verifySync(password, hash); // => true ``` +### Salt + +When you use `Bun.password.hash`, a salt is automatically generated and included in the hash. + +### bcrypt - Modular Crypt Format + +In the following [Modular Crypt Format](https://passlib.readthedocs.io/en/stable/modular_crypt_format.html) hash (used by `bcrypt`): + +Input: + +```ts +await Bun.password.hash("hello", { + algorithm: "bcrypt", +}); +``` + +Output: + +```sh +$2b$10$Lyj9kHYZtiyfxh2G60TEfeqs7xkkGiEFFDi3iJGc50ZG/XJ1sxIFi; +``` + +The format is composed of: + +- `bcrypt`: `$2b` +- `rounds`: `$10` - rounds (log10 of the actual number of rounds) +- `salt`: `$Lyj9kHYZtiyfxh2G60TEfeqs7xkkGiEFFDi3iJGc50ZG/XJ1sxIFi` +- `hash`: `$GzJ8PuBi+K+BVojzPfS5mjnC8OpLGtv8KJqF99eP6a4` + +By default, the bcrypt library truncates passwords longer than 72 bytes. In Bun, if you pass `Bun.password.hash` a password longer than 72 bytes and use the `bcrypt` algorithm, the password will be hashed via SHA-512 before being passed to bcrypt. + +```ts +await Bun.password.hash("hello".repeat(100), { + algorithm: "bcrypt", +}); +``` + +So instead of sending bcrypt a 500-byte password silently truncated to 72 bytes, Bun will hash the password using SHA-512 and send the hashed password to bcrypt (only if it exceeds 72 bytes). This is a more secure default behavior. + +### argon2 - PHC format + +In the following [PHC format](https://github.com/P-H-C/phc-string-format/blob/master/phc-sf-spec.md) hash (used by `argon2`): + +Input: + +```ts +await Bun.password.hash("hello", { + algorithm: "argon2id", +}); +``` + +Output: + +```sh +$argon2id$v=19$m=65536,t=2,p=1$xXnlSvPh4ym5KYmxKAuuHVlDvy2QGHBNuI6bJJrRDOs$2YY6M48XmHn+s5NoBaL+ficzXajq2Yj8wut3r0vnrwI +``` + +The format is composed of: + +- `algorithm`: `$argon2id` +- `version`: `$v=19` +- `memory cost`: `65536` +- `iterations`: `t=2` +- `parallelism`: `p=1` +- `salt`: `$xXnlSvPh4ym5KYmxKAuuHVlDvy2QGHBNuI6bJJrRDOs` +- `hash`: `$2YY6M48XmHn+s5NoBaL+ficzXajq2Yj8wut3r0vnrwI` + ## `Bun.hash` `Bun.hash` is a collection of utilities for _non-cryptographic_ hashing. Non-cryptographic hashing algorithms are optimized for speed of computation over collision-resistance or security. @@ -206,4 +273,42 @@ console.log(arr); // => Uint8Array(32) [ 185, 77, 39, 185, 147, ... ] ``` - +### HMAC in `Bun.CryptoHasher` + +`Bun.CryptoHasher` can be used to compute HMAC digests. To do so, pass the key to the constructor. + +```ts +const hasher = new Bun.CryptoHasher("sha256", "secret-key"); +hasher.update("hello world"); +console.log(hasher.digest("hex")); +// => "095d5a21fe6d0646db223fdf3de6436bb8dfb2fab0b51677ecf6441fcf5f2a67" +``` + +When using HMAC, a more limited set of algorithms are supported: + +- `"blake2b512"` +- `"md5"` +- `"sha1"` +- `"sha224"` +- `"sha256"` +- `"sha384"` +- `"sha512-224"` +- `"sha512-256"` +- `"sha512"` + +Unlike the non-HMAC `Bun.CryptoHasher`, the HMAC `Bun.CryptoHasher` instance is not reset after `.digest()` is called, and attempting to use the same instance again will throw an error. + +Other methods like `.copy()` and `.update()` are supported (as long as it's before `.digest()`), but methods like `.digest()` that finalize the hasher are not. + +```ts +const hasher = new Bun.CryptoHasher("sha256", "secret-key"); +hasher.update("hello world"); + +const copy = hasher.copy(); +copy.update("!"); +console.log(copy.digest("hex")); +// => "3840176c3d8923f59ac402b7550404b28ab11cb0ef1fa199130a5c37864b5497" + +console.log(hasher.digest("hex")); +// => "095d5a21fe6d0646db223fdf3de6436bb8dfb2fab0b51677ecf6441fcf5f2a67" +``` diff --git a/docs/api/http.md b/docs/api/http.md index 1f873dbb5d..f6e6499dc4 100644 --- a/docs/api/http.md +++ b/docs/api/http.md @@ -1,7 +1,7 @@ The page primarily documents the Bun-native `Bun.serve` API. Bun also implements [`fetch`](https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API) and the Node.js [`http`](https://nodejs.org/api/http.html) and [`https`](https://nodejs.org/api/https.html) modules. {% callout %} -These modules have been re-implemented to use Bun's fast internal HTTP infrastructure. Feel free to use these modules directly; frameworks like [Express](https://expressjs.com/) that depend on these modules should work out of the box. For granular compatibility information, see [Runtime > Node.js APIs](/docs/runtime/nodejs-apis). +These modules have been re-implemented to use Bun's fast internal HTTP infrastructure. Feel free to use these modules directly; frameworks like [Express](https://expressjs.com/) that depend on these modules should work out of the box. For granular compatibility information, see [Runtime > Node.js APIs](https://bun.sh/docs/runtime/nodejs-apis). {% /callout %} To start a high-performance HTTP server with a clean API, the recommended approach is [`Bun.serve`](#start-a-server-bun-serve). @@ -70,6 +70,116 @@ const server = Bun.serve({ }); ``` +### Static routes + +Use the `static` option to serve static `Response` objects by route. + +```ts +// Bun v1.1.27+ required +Bun.serve({ + static: { + // health-check endpoint + "/api/health-check": new Response("All good!"), + + // redirect from /old-link to /new-link + "/old-link": Response.redirect("/new-link", 301), + + // serve static text + "/": new Response("Hello World"), + + // serve a file by buffering it in memory + "/index.html": new Response(await Bun.file("./index.html").bytes(), { + headers: { + "Content-Type": "text/html", + }, + }), + "/favicon.ico": new Response(await Bun.file("./favicon.ico").bytes(), { + headers: { + "Content-Type": "image/x-icon", + }, + }), + + // serve JSON + "/api/version.json": Response.json({ version: "1.0.0" }), + }, + + fetch(req) { + return new Response("404!"); + }, +}); +``` + +Static routes support headers, status code, and other `Response` options. + +```ts +Bun.serve({ + static: { + "/api/time": new Response(new Date().toISOString(), { + headers: { + "X-Custom-Header": "Bun!", + }, + }), + }, + + fetch(req) { + return new Response("404!"); + }, +}); +``` + +Static routes can serve Response bodies faster than `fetch` handlers because they don't create `Request` objects, they don't create `AbortSignal`, they don't create additional `Response` objects. The only per-request memory allocation is the TCP/TLS socket data needed for each request. + +{% note %} +`static` is experimental +{% /note %} + +Static route responses are cached for the lifetime of the server object. To reload static routes, call `server.reload(options)`. + +```ts +const server = Bun.serve({ + static: { + "/api/time": new Response(new Date().toISOString()), + }, + + fetch(req) { + return new Response("404!"); + }, +}); + +// Update the time every second. +setInterval(() => { + server.reload({ + static: { + "/api/time": new Response(new Date().toISOString()), + }, + + fetch(req) { + return new Response("404!"); + }, + }); +}, 1000); +``` + +Reloading static routes only impact the next request. In-flight requests continue to use the old static routes. After in-flight requests to old static routes are finished, the old static routes are freed from memory. + +To simplify error handling, static routes do not support streaming response bodies from `ReadableStream` or an `AsyncIterator`. Fortunately, you can still buffer the response in memory first: + +```ts +const time = await fetch("https://api.example.com/v1/data"); +// Buffer the response in memory first. +const blob = await time.blob(); + +const server = Bun.serve({ + static: { + "/api/data": new Response(blob), + }, + + fetch(req) { + return new Response("404!"); + }, +}); +``` + ### Changing the `port` and `hostname` To configure which port and hostname the server will listen on, set `port` and `hostname` in the options object. @@ -292,7 +402,7 @@ Bun.serve({ }); ``` -### Sever name indication (SNI) +### Server name indication (SNI) To configure the server name indication (SNI) for the server, set the `serverName` field in the `tls` object. @@ -326,7 +436,24 @@ Bun.serve({ }); ``` -## Object syntax +## idleTimeout + +To configure the idle timeout, set the `idleTimeout` field in Bun.serve. + +```ts +Bun.serve({ + // 10 seconds: + idleTimeout: 10, + + fetch(req) { + return new Response("Bun!"); + }, +}); +``` + +This is the maximum amount of time a connection is allowed to be idle before the server closes it. A connection is idling if there is no data sent or received. + +## export default syntax Thus far, the examples on this page have used the explicit `Bun.serve` API. Bun also supports an alternate syntax. @@ -348,7 +475,7 @@ Instead of passing the server options into `Bun.serve`, `export default` it. Thi $ bun --hot server.ts ``` --> - + ## Streaming files diff --git a/docs/api/semver.md b/docs/api/semver.md index f27ec53e66..dacafa95c7 100644 --- a/docs/api/semver.md +++ b/docs/api/semver.md @@ -4,7 +4,7 @@ It's about 20x faster than `node-semver`. ![Benchmark](https://github.com/oven-sh/bun/assets/709451/94746adc-8aba-4baf-a143-3c355f8e0f78) -Currently, this API is two functions. +Currently, this API provides two functions : #### `Bun.semver.satisfies(version: string, range: string): boolean` diff --git a/docs/api/spawn.md b/docs/api/spawn.md index e540cc8316..3097af8585 100644 --- a/docs/api/spawn.md +++ b/docs/api/spawn.md @@ -179,7 +179,7 @@ proc.kill(); // specify an exit code The parent `bun` process will not terminate until all child processes have exited. Use `proc.unref()` to detach the child process from the parent. -``` +```ts const proc = Bun.spawn(["bun", "--version"]); proc.unref(); ``` diff --git a/docs/api/sqlite.md b/docs/api/sqlite.md index 553ca03902..d39b3d88a9 100644 --- a/docs/api/sqlite.md +++ b/docs/api/sqlite.md @@ -325,6 +325,28 @@ As a performance optimization, the class constructor is not called, default init The database columns are set as properties on the class instance. +### `.iterate()` (`@@iterator`) + +Use `.iterate()` to run a query and incrementally return results. This is useful for large result sets that you want to process one row at a time without loading all the results into memory. + +```ts +const query = db.query("SELECT * FROM foo"); +for (const row of query.iterate()) { + console.log(row); +} +``` + +You can also use the `@@iterator` protocol: + +```ts +const query = db.query("SELECT * FROM foo"); +for (const row of query) { + console.log(row); +} +``` + +This feature was added in Bun v1.1.31. + ### `.values()` Use `values()` to run a query and get back all results as an array of arrays. @@ -419,7 +441,7 @@ const results = query.all("hello", "goodbye"); sqlite supports signed 64 bit integers, but JavaScript only supports signed 52 bit integers or arbitrary precision integers with `bigint`. -`bigint` input is supported everywhere, but by default `bun:sqlite` returns integers as `number` types. If you need to handle integers larger than 2^53, set `safeInteger` option to `true` when creating a `Database` instance. This also validates that `bigint` passed to `bun:sqlite` do not exceed 64 bits. +`bigint` input is supported everywhere, but by default `bun:sqlite` returns integers as `number` types. If you need to handle integers larger than 2^53, set `safeIntegers` option to `true` when creating a `Database` instance. This also validates that `bigint` passed to `bun:sqlite` do not exceed 64 bits. By default, `bun:sqlite` returns integers as `number` types. If you need to handle integers larger than 2^53, you can use the `bigint` type. diff --git a/docs/api/test.md b/docs/api/test.md index 6704d407d6..d9898ffc0f 100644 --- a/docs/api/test.md +++ b/docs/api/test.md @@ -1 +1 @@ -See the [`bun test`](/docs/cli/test) documentation. +See the [`bun test`](https://bun.sh/docs/cli/test) documentation. diff --git a/docs/api/utils.md b/docs/api/utils.md index fa28674f86..3b87922106 100644 --- a/docs/api/utils.md +++ b/docs/api/utils.md @@ -106,6 +106,57 @@ const ls = Bun.which("ls", { console.log(ls); // null ``` +You can think of this as a builtin alternative to the [`which`](https://www.npmjs.com/package/which) npm package. + +## `Bun.randomUUIDv7()` + +`Bun.randomUUIDv7()` returns a [UUID v7](https://www.ietf.org/archive/id/draft-peabody-dispatch-new-uuid-format-01.html#name-uuidv7-layout-and-bit-order), which is monotonic and suitable for sorting and databases. + +```ts +import { randomUUIDv7 } from "bun"; + +const id = randomUUIDv7(); +// => "0192ce11-26d5-7dc3-9305-1426de888c5a" +``` + +A UUID v7 is a 128-bit value that encodes the current timestamp, a random value, and a counter. The timestamp is encoded using the lowest 48 bits, and the random value and counter are encoded using the remaining bits. + +The `timestamp` parameter defaults to the current time in milliseconds. When the timestamp changes, the counter is reset to a psuedo-random integer wrapped to 4096. This counter is atomic and threadsafe, meaning that using `Bun.randomUUIDv7()` in many Workers within the same process running at the same timestamp will not have colliding counter values. + +The final 8 bytes of the UUID are a cryptographically secure random value. It uses the same random number generator used by `crypto.randomUUID()` (which comes from BoringSSL, which in turn comes from the platform-specific system random number generator usually provided by the underlying hardware). + +```ts +namespace Bun { + function randomUUIDv7( + encoding?: "hex" | "base64" | "base64url" = "hex", + timestamp?: number = Date.now(), + ): string; + /** + * If you pass "buffer", you get a 16-byte buffer instead of a string. + */ + function randomUUIDv7( + encoding: "buffer", + timestamp?: number = Date.now(), + ): Buffer; + + // If you only pass a timestamp, you get a hex string + function randomUUIDv7(timestamp?: number = Date.now()): string; +} +``` + +You can optionally set encoding to `"buffer"` to get a 16-byte buffer instead of a string. This can sometimes avoid string conversion overhead. + +```ts#buffer.ts +const buffer = Bun.randomUUIDv7("buffer"); +``` + +`base64` and `base64url` encodings are also supported when you want a slightly shorter string. + +```ts#base64.ts +const base64 = Bun.randomUUIDv7("base64"); +const base64url = Bun.randomUUIDv7("base64url"); +``` + ## `Bun.peek()` `Bun.peek(prom: Promise)` @@ -183,7 +234,7 @@ const currentFile = import.meta.url; Bun.openInEditor(currentFile); ``` -You can override this via the `debug.editor` setting in your [`bunfig.toml`](/docs/runtime/bunfig). +You can override this via the `debug.editor` setting in your [`bunfig.toml`](https://bun.sh/docs/runtime/bunfig). ```toml-diff#bunfig.toml + [debug] @@ -580,6 +631,65 @@ const foo = new Foo(); console.log(foo); // => "foo" ``` +## `Bun.inspect.table(tabularData, properties, options)` + +Format tabular data into a string. Like [`console.table`](https://developer.mozilla.org/en-US/docs/Web/API/console/table_static), except it returns a string rather than printing to the console. + +```ts +console.log( + Bun.inspect.table([ + { a: 1, b: 2, c: 3 }, + { a: 4, b: 5, c: 6 }, + { a: 7, b: 8, c: 9 }, + ]), +); +// +// ┌───┬───┬───┬───┐ +// │ │ a │ b │ c │ +// ├───┼───┼───┼───┤ +// │ 0 │ 1 │ 2 │ 3 │ +// │ 1 │ 4 │ 5 │ 6 │ +// │ 2 │ 7 │ 8 │ 9 │ +// └───┴───┴───┴───┘ +``` + +Additionally, you can pass an array of property names to display only a subset of properties. + +```ts +console.log( + Bun.inspect.table( + [ + { a: 1, b: 2, c: 3 }, + { a: 4, b: 5, c: 6 }, + ], + ["a", "c"], + ), +); +// +// ┌───┬───┬───┐ +// │ │ a │ c │ +// ├───┼───┼───┤ +// │ 0 │ 1 │ 3 │ +// │ 1 │ 4 │ 6 │ +// └───┴───┴───┘ +``` + +You can also conditionally enable ANSI colors by passing `{ colors: true }`. + +```ts +console.log( + Bun.inspect.table( + [ + { a: 1, b: 2, c: 3 }, + { a: 4, b: 5, c: 6 }, + ], + { + colors: true, + }, + ), +); +``` + ## `Bun.nanoseconds()` Returns the number of nanoseconds since the current `bun` process started, as a `number`. Useful for high-precision timing and benchmarking. diff --git a/docs/api/workers.md b/docs/api/workers.md index b20fb78085..04e1ff8f8d 100644 --- a/docs/api/workers.md +++ b/docs/api/workers.md @@ -50,6 +50,28 @@ const worker = new Worker("/not-found.js"); The specifier passed to `Worker` is resolved relative to the project root (like typing `bun ./path/to/file.js`). +### `preload` - load modules before the worker starts + +You can pass an array of module specifiers to the `preload` option to load modules before the worker starts. This is useful when you want to ensure some code is always loaded before the application starts, like loading OpenTelemetry, Sentry, DataDog, etc. + +```js +const worker = new Worker("./worker.ts", { + preload: ["./load-sentry.js"], +}); +``` + +Like the `--preload` CLI argument, the `preload` option is processed before the worker starts. + +You can also pass a single string to the `preload` option: + +```js +const worker = new Worker("./worker.ts", { + preload: "./load-sentry.js", +}); +``` + +This feature was added in Bun v1.1.35. + ### `blob:` URLs As of Bun v1.1.13, you can also pass a `blob:` URL to `Worker`. This is useful for creating workers from strings or other sources. diff --git a/docs/bundler/executables.md b/docs/bundler/executables.md index 2e9459279d..6ae39a574c 100644 --- a/docs/bundler/executables.md +++ b/docs/bundler/executables.md @@ -100,12 +100,55 @@ When deploying to production, we recommend the following: bun build --compile --minify --sourcemap ./path/to/my/app.ts --outfile myapp ``` -**What do these flags do?** +### Bytecode compilation + +To improve startup time, enable bytecode compilation: + +```sh +bun build --compile --minify --sourcemap --bytecode ./path/to/my/app.ts --outfile myapp +``` + +Using bytecode compilation, `tsc` starts 2x faster: + +{% image src="https://github.com/user-attachments/assets/dc8913db-01d2-48f8-a8ef-ac4e984f9763" width="689" /%} + +Bytecode compilation moves parsing overhead for large input files from runtime to bundle time. Your app starts faster, in exchange for making the `bun build` command a little slower. It doesn't obscure source code. + +**Experimental:** Bytecode compilation is an experimental feature introduced in Bun v1.1.30. Only `cjs` format is supported (which means no top-level-await). Let us know if you run into any issues! + +### What do these flags do? The `--minify` argument optimizes the size of the transpiled output code. If you have a large application, this can save megabytes of space. For smaller applications, it might still improve start time a little. The `--sourcemap` argument embeds a sourcemap compressed with zstd, so that errors & stacktraces point to their original locations instead of the transpiled location. Bun will automatically decompress & resolve the sourcemap when an error occurs. +The `--bytecode` argument enables bytecode compilation. Every time you run JavaScript code in Bun, JavaScriptCore (the engine) will compile your source code into bytecode. We can move this parsing work from runtime to bundle time, saving you startup time. + +## Worker + +To use workers in a standalone executable, add the worker's entrypoint to the CLI arguments: + +```sh +$ bun build --compile ./index.ts ./my-worker.ts --outfile myapp +``` + +Then, reference the worker in your code: + +```ts +console.log("Hello from Bun!"); + +// Any of these will work: +new Worker("./my-worker.ts"); +new Worker(new URL("./my-worker.ts", import.meta.url)); +new Worker(new URL("./my-worker.ts", import.meta.url).href); +``` + +As of Bun v1.1.25, when you add multiple entrypoints to a standalone executable, they will be bundled separately into the executable. + +In the future, we may automatically detect usages of statically-known paths in `new Worker(path)` and then bundle those into the executable, but for now, you'll need to add it to the shell command manually like the above example. + +If you use a relative path to a file not included in the standalone executable, it will attempt to load that path from disk relative to the current working directory of the process (and then error if it doesn't exist). + ## SQLite You can use `bun:sqlite` imports with `bun build --compile`. @@ -179,6 +222,59 @@ console.log(addon.hello()); Unfortunately, if you're using `@mapbox/node-pre-gyp` or other similar tools, you'll need to make sure the `.node` file is directly required or it won't bundle correctly. +### Embed directories + +To embed a directory with `bun build --compile`, use a shell glob in your `bun build` command: + +```sh +$ bun build --compile ./index.ts ./public/**/*.png +``` + +Then, you can reference the files in your code: + +```ts +import icon from "./public/assets/icon.png" with { type: "file" }; +import { file } from "bun"; + +export default { + fetch(req) { + // Embedded files can be streamed from Response objects + return new Response(file(icon)); + }, +}; +``` + +This is honestly a workaround, and we expect to improve this in the future with a more direct API. + +### Listing embedded files + +To get a list of all embedded files, use `Bun.embeddedFiles`: + +```js +import "./icon.png" with { type: "file" }; +import { embeddedFiles } from "bun"; + +console.log(embeddedFiles[0].name); // `icon-${hash}.png` +``` + +`Bun.embeddedFiles` returns an array of `Blob` objects which you can use to get the size, contents, and other properties of the files. + +```ts +embeddedFiles: Blob[] +``` + +The list of embedded files excludes bundled source code like `.ts` and `.js` files. + +#### Content hash + +By default, embedded files have a content hash appended to their name. This is useful for situations where you want to serve the file from a URL or CDN and have fewer cache invalidation issues. But sometimes, this is unexpected and you might want the original name instead: + +To disable the content hash, pass `--asset-naming` to `bun build --compile` like this: + +```sh +$ bun build --compile --asset-naming="[name].[ext]" ./index.ts +``` + ## Minification To trim down the size of the executable a little, pass `--minify` to `bun build --compile`. This uses Bun's minifier to reduce the code size. Overall though, Bun's binary is still way too big and we need to make it smaller. diff --git a/docs/bundler/index.md b/docs/bundler/index.md index d5524f8a8a..4680d8cc5a 100644 --- a/docs/bundler/index.md +++ b/docs/bundler/index.md @@ -146,7 +146,7 @@ $ bun build ./index.tsx --outdir ./out --watch ## Content types -Like the Bun runtime, the bundler supports an array of file types out of the box. The following table breaks down the bundler's set of standard "loaders". Refer to [Bundler > File types](/docs/runtime/loaders) for full documentation. +Like the Bun runtime, the bundler supports an array of file types out of the box. The following table breaks down the bundler's set of standard "loaders". Refer to [Bundler > File types](https://bun.sh/docs/runtime/loaders) for full documentation. {% table %} @@ -219,11 +219,11 @@ console.log(logo); The exact behavior of the file loader is also impacted by [`naming`](#naming) and [`publicPath`](#publicpath). {% /callout %} -Refer to the [Bundler > Loaders](/docs/bundler/loaders#file) page for more complete documentation on the file loader. +Refer to the [Bundler > Loaders](https://bun.sh/docs/bundler/loaders#file) page for more complete documentation on the file loader. ### Plugins -The behavior described in this table can be overridden or extended with [plugins](/docs/bundler/plugins). Refer to the [Bundler > Loaders](/docs/bundler/plugins) page for complete documentation. +The behavior described in this table can be overridden or extended with [plugins](https://bun.sh/docs/bundler/plugins). Refer to the [Bundler > Loaders](https://bun.sh/docs/bundler/plugins) page for complete documentation. ## API @@ -330,6 +330,8 @@ Depending on the target, Bun will apply different module resolution rules and op If any entrypoints contains a Bun shebang (`#!/usr/bin/env bun`) the bundler will default to `target: "bun"` instead of `"browser"`. + When using `target: "bun"` and `format: "cjs"` together, the `// @bun @bun-cjs` pragma is added and the CommonJS wrapper function is not compatible with Node.js. + --- - `node` @@ -341,7 +343,11 @@ Depending on the target, Bun will apply different module resolution rules and op Specifies the module format to be used in the generated bundles. -Currently the bundler only supports one module format: `"esm"`. Support for `"cjs"` and `"iife"` are planned. +Bun defaults to `"esm"`, and provides experimental support for `"cjs"` and `"iife"`. + +#### `format: "esm"` - ES Module + +This is the default format, which supports ES Module syntax including top-level `await`, import.meta, and more. {% codetabs %} @@ -359,44 +365,31 @@ $ bun build ./index.tsx --outdir ./out --format esm {% /codetabs %} - +TODO: document IIFE once we support globalNames. ### `splitting` @@ -490,7 +483,7 @@ n/a {% /codetabs %} -Bun implements a universal plugin system for both Bun's runtime and bundler. Refer to the [plugin documentation](/docs/bundler/plugins) for complete documentation. +Bun implements a universal plugin system for both Bun's runtime and bundler. Refer to the [plugin documentation](https://bun.sh/docs/bundler/plugins) for complete documentation. - SQLite -- [`bun:sqlite`](/docs/api/sqlite) +- [`bun:sqlite`](https://bun.sh/docs/api/sqlite) --- - FFI -- [`bun:ffi`](/docs/api/ffi) +- [`bun:ffi`](https://bun.sh/docs/api/ffi) --- - Testing -- [`bun:test`](/docs/cli/test) +- [`bun:test`](https://bun.sh/docs/cli/test) --- - Node-API -- [`Node-API`](/docs/api/node-api) +- [`Node-API`](https://bun.sh/docs/api/node-api) --- - Glob -- [`Bun.Glob`](/docs/api/glob) +- [`Bun.Glob`](https://bun.sh/docs/api/glob) --- - Utilities -- [`Bun.version`](/docs/api/utils#bun-version) - [`Bun.revision`](/docs/api/utils#bun-revision) - [`Bun.env`](/docs/api/utils#bun-env) - [`Bun.main`](/docs/api/utils#bun-main) - [`Bun.sleep()`](/docs/api/utils#bun-sleep) - [`Bun.sleepSync()`](/docs/api/utils#bun-sleepsync) - [`Bun.which()`](/docs/api/utils#bun-which) - [`Bun.peek()`](/docs/api/utils#bun-peek) - [`Bun.openInEditor()`](/docs/api/utils#bun-openineditor) - [`Bun.deepEquals()`](/docs/api/utils#bun-deepequals) - [`Bun.escapeHTML()`](/docs/api/utils#bun-escapehtml) - [`Bun.fileURLToPath()`](/docs/api/utils#bun-fileurltopath) - [`Bun.pathToFileURL()`](/docs/api/utils#bun-pathtofileurl) - [`Bun.gzipSync()`](/docs/api/utils#bun-gzipsync) - [`Bun.gunzipSync()`](/docs/api/utils#bun-gunzipsync) - [`Bun.deflateSync()`](/docs/api/utils#bun-deflatesync) - [`Bun.inflateSync()`](/docs/api/utils#bun-inflatesync) - [`Bun.inspect()`](/docs/api/utils#bun-inspect) - [`Bun.nanoseconds()`](/docs/api/utils#bun-nanoseconds) - [`Bun.readableStreamTo*()`](/docs/api/utils#bun-readablestreamto) - [`Bun.resolveSync()`](/docs/api/utils#bun-resolvesync) +- [`Bun.version`](https://bun.sh/docs/api/utils#bun-version) + [`Bun.revision`](https://bun.sh/docs/api/utils#bun-revision) + [`Bun.env`](https://bun.sh/docs/api/utils#bun-env) + [`Bun.main`](https://bun.sh/docs/api/utils#bun-main) + [`Bun.sleep()`](https://bun.sh/docs/api/utils#bun-sleep) + [`Bun.sleepSync()`](https://bun.sh/docs/api/utils#bun-sleepsync) + [`Bun.which()`](https://bun.sh/docs/api/utils#bun-which) + [`Bun.peek()`](https://bun.sh/docs/api/utils#bun-peek) + [`Bun.openInEditor()`](https://bun.sh/docs/api/utils#bun-openineditor) + [`Bun.deepEquals()`](https://bun.sh/docs/api/utils#bun-deepequals) + [`Bun.escapeHTML()`](https://bun.sh/docs/api/utils#bun-escapehtml) + [`Bun.fileURLToPath()`](https://bun.sh/docs/api/utils#bun-fileurltopath) + [`Bun.pathToFileURL()`](https://bun.sh/docs/api/utils#bun-pathtofileurl) + [`Bun.gzipSync()`](https://bun.sh/docs/api/utils#bun-gzipsync) + [`Bun.gunzipSync()`](https://bun.sh/docs/api/utils#bun-gunzipsync) + [`Bun.deflateSync()`](https://bun.sh/docs/api/utils#bun-deflatesync) + [`Bun.inflateSync()`](https://bun.sh/docs/api/utils#bun-inflatesync) + [`Bun.inspect()`](https://bun.sh/docs/api/utils#bun-inspect) + [`Bun.nanoseconds()`](https://bun.sh/docs/api/utils#bun-nanoseconds) + [`Bun.readableStreamTo*()`](https://bun.sh/docs/api/utils#bun-readablestreamto) + [`Bun.resolveSync()`](https://bun.sh/docs/api/utils#bun-resolvesync) {% /table %} diff --git a/docs/runtime/bunfig.md b/docs/runtime/bunfig.md index 4af5187445..1bfcd540e5 100644 --- a/docs/runtime/bunfig.md +++ b/docs/runtime/bunfig.md @@ -370,6 +370,19 @@ myorg = { username = "myusername", password = "$npm_password", url = "https://re myorg = { token = "$npm_token", url = "https://registry.myorg.com/" } ``` +### `install.ca` and `install.cafile` + +To configure a CA certificate, use `install.ca` or `install.cafile` to specify a path to a CA certificate file. + +```toml +[install] +# The CA certificate as a string +ca = "-----BEGIN CERTIFICATE-----\n...\n-----END CERTIFICATE-----" + +# A path to a CA certificate file. The file can contain multiple certificates. +cafile = "path/to/cafile" +``` + ### `install.cache` To configure the cache behavior: diff --git a/docs/runtime/index.md b/docs/runtime/index.md index 4c557cab49..61971f0262 100644 --- a/docs/runtime/index.md +++ b/docs/runtime/index.md @@ -6,7 +6,7 @@ Bun is designed to start fast and run fast. Its transpiler and runtime are writt {% image src="/images/bun-run-speed.jpeg" caption="Bun vs Node.js vs Deno running Hello World" /%} - + Performance sensitive APIs like `Buffer`, `fetch`, and `Response` are heavily profiled and optimized. Under the hood Bun uses the [JavaScriptCore engine](https://developer.apple.com/documentation/javascriptcore), which is developed by Apple for Safari. It starts and runs faster than V8, the engine used by Node.js and Chromium-based browsers. @@ -21,7 +21,7 @@ $ bun index.ts $ bun index.tsx ``` -Some aspects of Bun's runtime behavior are affected by the contents of your `tsconfig.json` file. Refer to [Runtime > TypeScript](/docs/runtime/typescript) page for details. +Some aspects of Bun's runtime behavior are affected by the contents of your `tsconfig.json` file. Refer to [Runtime > TypeScript](https://bun.sh/docs/runtime/typescript) page for details. @@ -101,7 +101,7 @@ import pkg from "./package.json"; import bunfig from "./bunfig.toml"; ``` -## WASM +## WASI {% callout %} 🚧 **Experimental** @@ -122,17 +122,17 @@ $ bun run ./my-wasm-app.whatever ## Node.js compatibility -Long-term, Bun aims for complete Node.js compatibility. Most Node.js packages already work with Bun out of the box, but certain low-level APIs like `dgram` are still unimplemented. Track the current compatibility status at [Ecosystem > Node.js](/docs/runtime/nodejs-apis). +Long-term, Bun aims for complete Node.js compatibility. Most Node.js packages already work with Bun out of the box, but certain low-level APIs like `dgram` are still unimplemented. Track the current compatibility status at [Ecosystem > Node.js](https://bun.sh/docs/runtime/nodejs-apis). Bun implements the Node.js module resolution algorithm, so dependencies can still be managed with `package.json`, `node_modules`, and CommonJS-style imports. {% callout %} -**Note** — We recommend using Bun's [built-in package manager](/docs/cli/install) for a performance boost over other npm clients. +**Note** — We recommend using Bun's [built-in package manager](https://bun.sh/docs/cli/install) for a performance boost over other npm clients. {% /callout %} ## Web APIs - + Some Web APIs aren't relevant in the context of a server-first runtime like Bun, such as the [DOM API](https://developer.mozilla.org/en-US/docs/Web/API/HTML_DOM_API#html_dom_api_interfaces) or [History API](https://developer.mozilla.org/en-US/docs/Web/API/History_API). Many others, though, are broadly useful outside of the browser context; when possible, Bun implements these Web-standard APIs instead of introducing new APIs. @@ -237,67 +237,67 @@ Bun exposes a set of Bun-specific APIs on the `Bun` global object and through a --- -- [HTTP](/docs/api/http) +- [HTTP](https://bun.sh/docs/api/http) - `Bun.serve` --- -- [File I/O](/docs/api/file-io) +- [File I/O](https://bun.sh/docs/api/file-io) - `Bun.file` `Bun.write` --- -- [Processes](/docs/api/spawn) +- [Processes](https://bun.sh/docs/api/spawn) - `Bun.spawn` `Bun.spawnSync` --- -- [TCP](/docs/api/tcp) +- [TCP](https://bun.sh/docs/api/tcp) - `Bun.listen` `Bun.connect` --- -- [Transpiler](/docs/api/transpiler) +- [Transpiler](https://bun.sh/docs/api/transpiler) - `Bun.Transpiler` --- -- [Routing](/docs/api/file-system-router) +- [Routing](https://bun.sh/docs/api/file-system-router) - `Bun.FileSystemRouter` --- -- [HTMLRewriter](/docs/api/html-rewriter) +- [HTMLRewriter](https://bun.sh/docs/api/html-rewriter) - `HTMLRewriter` --- -- [Utils](/docs/api/utils) +- [Utils](https://bun.sh/docs/api/utils) - `Bun.peek` `Bun.which` --- -- [SQLite](/docs/api/sqlite) +- [SQLite](https://bun.sh/docs/api/sqlite) - `bun:sqlite` --- -- [FFI](/docs/api/ffi) +- [FFI](https://bun.sh/docs/api/ffi) - `bun:ffi` --- -- [DNS](/docs/api/dns) +- [DNS](https://bun.sh/docs/api/dns) - `bun:dns` --- -- [Testing](/docs/api/test) +- [Testing](https://bun.sh/docs/api/test) - `bun:test` --- -- [Node-API](/docs/api/node-api) +- [Node-API](https://bun.sh/docs/api/node-api) - `Node-API` --- @@ -306,4 +306,4 @@ Bun exposes a set of Bun-specific APIs on the `Bun` global object and through a ## Plugins -Support for additional file types can be implemented with plugins. Refer to [Runtime > Plugins](/docs/bundler/plugins) for full documentation. +Support for additional file types can be implemented with plugins. Refer to [Runtime > Plugins](https://bun.sh/docs/bundler/plugins) for full documentation. diff --git a/docs/runtime/jsx.md b/docs/runtime/jsx.md index 31a61652bb..ab08255599 100644 --- a/docs/runtime/jsx.md +++ b/docs/runtime/jsx.md @@ -14,7 +14,7 @@ console.log(); ## Configuration -Bun reads your `tsconfig.json` or `jsconfig.json` configuration files to determines how to perform the JSX transform internally. To avoid using either of these, the following options can also be defined in [`bunfig.toml`](/docs/runtime/bunfig). +Bun reads your `tsconfig.json` or `jsconfig.json` configuration files to determines how to perform the JSX transform internally. To avoid using either of these, the following options can also be defined in [`bunfig.toml`](https://bun.sh/docs/runtime/bunfig). The following compiler options are respected. @@ -197,7 +197,7 @@ The module from which the component factory function (`createElement`, `jsx`, `j - ```jsonc { - "jsx": "react" + "jsx": "react", // jsxImportSource is not defined // default to "react" } @@ -213,7 +213,7 @@ The module from which the component factory function (`createElement`, `jsx`, `j - ```jsonc { "jsx": "react-jsx", - "jsxImportSource": "preact" + "jsxImportSource": "preact", } ``` @@ -227,7 +227,7 @@ The module from which the component factory function (`createElement`, `jsx`, `j - ```jsonc { "jsx": "react-jsxdev", - "jsxImportSource": "preact" + "jsxImportSource": "preact", } ``` @@ -263,7 +263,7 @@ All of these values can be set on a per-file basis using _pragmas_. A pragma is - ```jsonc { - "jsxFactory": "h" + "jsxFactory": "h", } ``` @@ -274,7 +274,7 @@ All of these values can be set on a per-file basis using _pragmas_. A pragma is ``` - ```jsonc { - "jsxFragmentFactory": "MyFragment" + "jsxFragmentFactory": "MyFragment", } ``` @@ -285,7 +285,7 @@ All of these values can be set on a per-file basis using _pragmas_. A pragma is ``` - ```jsonc { - "jsxImportSource": "preact" + "jsxImportSource": "preact", } ``` diff --git a/docs/runtime/loaders.md b/docs/runtime/loaders.md index 6b226823d0..3909a1de90 100644 --- a/docs/runtime/loaders.md +++ b/docs/runtime/loaders.md @@ -9,7 +9,7 @@ $ bun index.ts $ bun index.tsx ``` -Some aspects of Bun's runtime behavior are affected by the contents of your `tsconfig.json` file. Refer to [Runtime > TypeScript](/docs/runtime/typescript) page for details. +Some aspects of Bun's runtime behavior are affected by the contents of your `tsconfig.json` file. Refer to [Runtime > TypeScript](https://bun.sh/docs/runtime/typescript) page for details. ## JSX @@ -61,7 +61,7 @@ import pkg from "./package.json"; import data from "./data.toml"; ``` -## WASM +## WASI {% callout %} 🚧 **Experimental** @@ -85,15 +85,15 @@ $ bun run ./my-wasm-app.whatever You can import sqlite databases directly into your code. Bun will automatically load the database and return a `Database` object. ```ts -import db from "./my.db" with {type: "sqlite"}; +import db from "./my.db" with { type: "sqlite" }; console.log(db.query("select * from users LIMIT 1").get()); ``` -This uses [`bun:sqlite`](/docs/api/sqlite). +This uses [`bun:sqlite`](https://bun.sh/docs/api/sqlite). ## Custom loaders -Support for additional file types can be implemented with plugins. Refer to [Runtime > Plugins](/docs/bundler/plugins) for full documentation. +Support for additional file types can be implemented with plugins. Refer to [Runtime > Plugins](https://bun.sh/docs/bundler/plugins) for full documentation. ", id + 1); +if (!(id > -1 && endIdLine > -1)) { + throw new Error("Missing sentry_id"); +} +const sentryId = body.slice(id + " %s\n" "${bun_is_at}" - printf "\n" - printf "You should remove this binary and switch it to ./build:\n" - printf ' export PATH="$PATH:%s"\n' $(realpath "$PWD/build") - fi -else - printf "\n" - printf "You should add ./build to your path:\n" - printf ' export PATH="$PATH:%s"\n' $(realpath "$PWD/build") -fi -printf "\n" -printf "To rebuild bun, run '${C_GREEN}bun run build${C_RESET}'\n\n" diff --git a/scripts/update-submodules.ps1 b/scripts/update-submodules.ps1 deleted file mode 100755 index c822d31d8d..0000000000 --- a/scripts/update-submodules.ps1 +++ /dev/null @@ -1,25 +0,0 @@ -param( - [switch]$WebKit = $false -) - -$ErrorActionPreference = 'Stop' -$ScriptDir = Split-Path -Path $MyInvocation.MyCommand.Definition -Parent -Push-Location (Join-Path $ScriptDir '..') -try { - $Names = Get-Content .gitmodules | Select-String 'path = (.*)' | ForEach-Object { $_.Matches.Groups[1].Value } - - # we will exclude webkit unless you explicitly clone it yourself (a huge download) - if (!($WebKit) -and (-not (Test-Path "src/bun.js/WebKit/.git"))) { - $Names = $Names | Where-Object { $_ -ne 'src/bun.js/WebKit' } - } - if ($env:FORCE_UPDATE_SUBMODULES -eq "1") { - # Set --force in CI. - git submodule update --init --recursive --progress --depth 1 --checkout --force @NAMES - } else { - git submodule update --init --recursive --progress --depth 1 --checkout @NAMES - } - - if ($LASTEXITCODE -ne 0) { - throw "git submodule update failed" - } -} finally { Pop-Location } \ No newline at end of file diff --git a/scripts/update-submodules.sh b/scripts/update-submodules.sh deleted file mode 100755 index 1040f4b59b..0000000000 --- a/scripts/update-submodules.sh +++ /dev/null @@ -1,20 +0,0 @@ -#!/usr/bin/env bash -cd "$(dirname "${BASH_SOURCE[0]}")" -cd .. -NAMES=$(cat .gitmodules | grep 'path = ' | awk '{print $3}') - -if ! [ "$1" == '--webkit' ]; then - # we will exclude webkit unless you explicitly clone it yourself (a huge download) - if [ ! -e "src/bun.js/WebKit/.git" ]; then - NAMES=$(echo "$NAMES" | grep -v 'WebKit') - fi -fi - -set -exo pipefail -git submodule update --init --recursive --progress --depth=1 --checkout $NAMES -if [ "$FORCE_UPDATE_SUBMODULES" == "1" ]; then - # Set --force in CI. - git submodule update --init --recursive --progress --depth=1 --checkout --force $NAMES -else - git submodule update --init --recursive --progress --depth=1 --checkout $NAMES -fi diff --git a/scripts/utils.mjs b/scripts/utils.mjs new file mode 100644 index 0000000000..de7ec0b686 --- /dev/null +++ b/scripts/utils.mjs @@ -0,0 +1,1623 @@ +// Contains utility functions for various scripts, including: +// CI, running tests, and code generation. + +import { spawn as nodeSpawn, spawnSync as nodeSpawnSync } from "node:child_process"; +import { createHash } from "node:crypto"; +import { appendFileSync, existsSync, mkdtempSync, readdirSync, readFileSync, writeFileSync } from "node:fs"; +import { writeFile } from "node:fs/promises"; +import { hostname, tmpdir as nodeTmpdir, userInfo } from "node:os"; +import { dirname, join, relative, resolve } from "node:path"; +import { normalize as normalizeWindows } from "node:path/win32"; + +export const isWindows = process.platform === "win32"; +export const isMacOS = process.platform === "darwin"; +export const isLinux = process.platform === "linux"; +export const isPosix = isMacOS || isLinux; + +/** + * @param {string} name + * @param {boolean} [required] + * @returns {string} + */ +export function getEnv(name, required = true) { + const value = process.env[name]; + + if (required && !value) { + throw new Error(`Environment variable is missing: ${name}`); + } + + return value; +} + +export const isBuildkite = getEnv("BUILDKITE", false) === "true"; +export const isGithubAction = getEnv("GITHUB_ACTIONS", false) === "true"; +export const isCI = getEnv("CI", false) === "true" || isBuildkite || isGithubAction; +export const isDebug = getEnv("DEBUG", false) === "1"; + +/** + * @param {string} name + * @param {object} [options] + * @param {boolean} [options.required] + * @param {boolean} [options.redact] + * @returns {string} + */ +export function getSecret(name, options = { required: true, redact: true }) { + const value = getEnv(name, false); + if (value) { + return value; + } + + if (isBuildkite) { + const command = ["buildkite-agent", "secret", "get", name]; + if (options["redact"] === false) { + command.push("--skip-redaction"); + } + + const { error, stdout: secret } = spawnSync(command); + if (error || !secret.trim()) { + const orgId = getEnv("BUILDKITE_ORGANIZATION_SLUG", false); + const clusterId = getEnv("BUILDKITE_CLUSTER_ID", false); + + let hint; + if (orgId && clusterId) { + hint = `https://buildkite.com/organizations/${orgId}/clusters/${clusterId}/secrets`; + } else { + hint = "https://buildkite.com/docs/pipelines/buildkite-secrets"; + } + + throw new Error(`Secret not found: ${name} (hint: go to ${hint} and create a secret)`, { cause: error }); + } + + setEnv(name, secret); + return secret; + } + + return getEnv(name, options["required"]); +} + +/** + * @param {...unknown} args + */ +export function debugLog(...args) { + if (isDebug) { + console.log(...args); + } +} + +/** + * @param {string} name + * @param {string | undefined} value + */ +export function setEnv(name, value) { + process.env[name] = value; + + if (isGithubAction && !/^GITHUB_/i.test(name)) { + const envFilePath = process.env["GITHUB_ENV"]; + if (envFilePath) { + const delimeter = Math.random().toString(36).substring(2, 15); + const content = `${name}<<${delimeter}\n${value}\n${delimeter}\n`; + appendFileSync(outputPath, content); + } + } +} + +/** + * @typedef {object} SpawnOptions + * @property {string} [cwd] + * @property {number} [timeout] + * @property {Record} [env] + * @property {string} [stdout] + * @property {string} [stderr] + */ + +/** + * @typedef {object} SpawnResult + * @property {number} exitCode + * @property {number} [signalCode] + * @property {string} stdout + * @property {string} stderr + * @property {Error} [error] + */ + +/** + * @param {string[]} command + * @param {SpawnOptions} options + * @returns {Promise} + */ +export async function spawn(command, options = {}) { + debugLog("$", ...command); + + const [cmd, ...args] = command; + const spawnOptions = { + cwd: options["cwd"] ?? process.cwd(), + timeout: options["timeout"] ?? undefined, + env: options["env"] ?? undefined, + stdio: ["ignore", "pipe", "pipe"], + ...options, + }; + + let exitCode = 1; + let signalCode; + let stdout = ""; + let stderr = ""; + let error; + + const result = new Promise((resolve, reject) => { + const subprocess = nodeSpawn(cmd, args, spawnOptions); + + subprocess.stdout?.on("data", chunk => { + stdout += chunk; + }); + subprocess.stderr?.on("data", chunk => { + stderr += chunk; + }); + + subprocess.on("error", error => reject(error)); + subprocess.on("exit", (code, signal) => { + exitCode = code; + signalCode = signal; + resolve(); + }); + }); + + try { + await result; + } catch (cause) { + error = cause; + } + + if (exitCode !== 0 && isWindows) { + const exitReason = getWindowsExitReason(exitCode); + if (exitReason) { + exitCode = exitReason; + } + } + + if (error || signalCode || exitCode !== 0) { + const description = command.map(arg => (arg.includes(" ") ? `"${arg.replace(/"/g, '\\"')}"` : arg)).join(" "); + const cause = error || stderr.trim() || stdout.trim() || undefined; + + if (signalCode) { + error = new Error(`Command killed with ${signalCode}: ${description}`, { cause }); + } else { + error = new Error(`Command exited with code ${exitCode}: ${description}`, { cause }); + } + } + + return { + exitCode, + signalCode, + stdout, + stderr, + error, + }; +} + +/** + * @param {string[]} command + * @param {SpawnOptions} options + * @returns {Promise} + */ +export async function spawnSafe(command, options) { + const result = await spawn(command, options); + + const { error } = result; + if (error) { + throw error; + } + + return result; +} + +/** + * @param {string[]} command + * @param {SpawnOptions} options + * @returns {SpawnResult} + */ +export function spawnSync(command, options = {}) { + debugLog("$", ...command); + + const [cmd, ...args] = command; + const spawnOptions = { + cwd: options["cwd"] ?? process.cwd(), + timeout: options["timeout"] ?? undefined, + env: options["env"] ?? undefined, + stdio: ["ignore", "pipe", "pipe"], + ...options, + }; + + let exitCode = 1; + let signalCode; + let stdout = ""; + let stderr = ""; + let error; + + let result; + try { + result = nodeSpawnSync(cmd, args, spawnOptions); + } catch (error) { + result = { error }; + } + + const { error: spawnError, status, signal, stdout: stdoutBuffer, stderr: stderrBuffer } = result; + if (spawnError) { + error = spawnError; + } else { + exitCode = status ?? 1; + signalCode = signal || undefined; + stdout = stdoutBuffer.toString(); + stderr = stderrBuffer.toString(); + } + + if (exitCode !== 0 && isWindows) { + const exitReason = getWindowsExitReason(exitCode); + if (exitReason) { + exitCode = exitReason; + } + } + + if (error || signalCode || exitCode !== 0) { + const description = command.map(arg => (arg.includes(" ") ? `"${arg.replace(/"/g, '\\"')}"` : arg)).join(" "); + const cause = error || stderr.trim() || stdout.trim() || undefined; + + if (signalCode) { + error = new Error(`Command killed with ${signalCode}: ${description}`, { cause }); + } else { + error = new Error(`Command exited with code ${exitCode}: ${description}`, { cause }); + } + } + + return { + exitCode, + signalCode, + stdout, + stderr, + error, + }; +} + +/** + * @param {string[]} command + * @param {SpawnOptions} options + * @returns {SpawnResult} + */ +export function spawnSyncSafe(command, options) { + const result = spawnSync(command, options); + + const { error } = result; + if (error) { + throw error; + } + + return result; +} + +/** + * @param {number} exitCode + * @returns {string | undefined} + */ +export function getWindowsExitReason(exitCode) { + const ntStatusPath = "C:\\Program Files (x86)\\Windows Kits\\10\\Include\\10.0.22621.0\\shared\\ntstatus.h"; + const nthStatus = readFile(ntStatusPath, { cache: true }); + + const match = nthStatus.match(new RegExp(`(STATUS_\\w+).*0x${exitCode?.toString(16)}`, "i")); + if (match) { + const [, exitReason] = match; + return exitReason; + } +} + +/** + * @param {string} url + * @returns {URL} + */ +export function parseGitUrl(url) { + const string = typeof url === "string" ? url : url.toString(); + + const githubUrl = getEnv("GITHUB_SERVER_URL", false) || "https://github.com"; + if (/^git@github\.com:/.test(string)) { + return new URL(string.slice(15).replace(/\.git$/, ""), githubUrl); + } + if (/^https:\/\/github\.com\//.test(string)) { + return new URL(string.slice(19).replace(/\.git$/, ""), githubUrl); + } + + throw new Error(`Unsupported git url: ${string}`); +} + +/** + * @param {string} [cwd] + * @returns {URL | undefined} + */ +export function getRepositoryUrl(cwd) { + if (!cwd) { + if (isBuildkite) { + const repository = getEnv("BUILDKITE_PULL_REQUEST_REPO", false) || getEnv("BUILDKITE_REPO", false); + if (repository) { + return parseGitUrl(repository); + } + } + + if (isGithubAction) { + const serverUrl = getEnv("GITHUB_SERVER_URL", false) || "https://github.com"; + const repository = getEnv("GITHUB_REPOSITORY", false); + if (serverUrl && repository) { + return parseGitUrl(new URL(repository, serverUrl)); + } + } + } + + const { error, stdout } = spawnSync(["git", "remote", "get-url", "origin"], { cwd }); + if (!error) { + return parseGitUrl(stdout.trim()); + } +} + +/** + * @param {string} [cwd] + * @returns {string | undefined} + */ +export function getRepository(cwd) { + if (!cwd) { + if (isGithubAction) { + const repository = getEnv("GITHUB_REPOSITORY", false); + if (repository) { + return repository; + } + } + } + + const url = getRepositoryUrl(cwd); + if (url) { + const { hostname, pathname } = new URL(url); + if (hostname == "github.com") { + return pathname.slice(1); + } + } +} + +/** + * @param {string} [cwd] + * @returns {string | undefined} + */ +export function getRepositoryOwner(cwd) { + const repository = getRepository(cwd); + if (repository) { + const [owner] = repository.split("/"); + if (owner) { + return owner; + } + } +} + +/** + * @param {string} [cwd] + * @returns {string | undefined} + */ +export function getCommit(cwd) { + if (!cwd) { + if (isBuildkite) { + const commit = getEnv("BUILDKITE_COMMIT", false); + if (commit) { + return commit; + } + } + + if (isGithubAction) { + const commit = getEnv("GITHUB_SHA", false); + if (commit) { + return commit; + } + } + } + + const { error, stdout } = spawnSync(["git", "rev-parse", "HEAD"], { cwd }); + if (!error) { + return stdout.trim(); + } +} + +/** + * @param {string} [cwd] + * @returns {string | undefined} + */ +export function getCommitMessage(cwd) { + if (!cwd) { + if (isBuildkite) { + const message = getEnv("BUILDKITE_MESSAGE", false); + if (message) { + return message; + } + } + } + + const { error, stdout } = spawnSync(["git", "log", "-1", "--pretty=%B"], { cwd }); + if (!error) { + return stdout.trim(); + } +} + +/** + * @param {string} [cwd] + * @returns {string | undefined} + */ +export function getBranch(cwd) { + if (!cwd) { + if (isBuildkite) { + const branch = getEnv("BUILDKITE_BRANCH", false); + if (branch) { + return branch; + } + } + + if (isGithubAction) { + const ref = getEnv("GITHUB_REF_NAME", false); + if (ref) { + return ref; + } + } + } + + const { error, stdout } = spawnSync(["git", "rev-parse", "--abbrev-ref", "HEAD"], { cwd }); + if (!error) { + return stdout.trim(); + } +} + +/** + * @param {string} [cwd] + * @returns {string} + */ +export function getMainBranch(cwd) { + if (!cwd) { + if (isBuildkite) { + const branch = getEnv("BUILDKITE_PIPELINE_DEFAULT_BRANCH", false); + if (branch) { + return branch; + } + } + + if (isGithubAction) { + const headRef = getEnv("GITHUB_HEAD_REF", false); + if (headRef) { + return headRef; + } + } + } + + const { error, stdout } = spawnSync(["git", "symbolic-ref", "refs/remotes/origin/HEAD"], { cwd }); + if (!error) { + return stdout.trim().replace("refs/remotes/origin/", ""); + } +} + +/** + * @param {string} [cwd] + * @returns {boolean} + */ +export function isMainBranch(cwd) { + return !isFork(cwd) && getBranch(cwd) === getMainBranch(cwd); +} + +/** + * @returns {boolean} + */ +export function isPullRequest() { + if (isBuildkite) { + return !isNaN(parseInt(getEnv("BUILDKITE_PULL_REQUEST", false))); + } + + if (isGithubAction) { + return /pull_request|merge_group/.test(getEnv("GITHUB_EVENT_NAME", false)); + } + + return false; +} + +/** + * @returns {number | undefined} + */ +export function getPullRequest() { + if (isBuildkite) { + const pullRequest = getEnv("BUILDKITE_PULL_REQUEST", false); + if (pullRequest) { + return parseInt(pullRequest); + } + } + + if (isGithubAction) { + const eventPath = getEnv("GITHUB_EVENT_PATH", false); + if (eventPath && existsSync(eventPath)) { + const event = JSON.parse(readFile(eventPath, { cache: true })); + const pullRequest = event["pull_request"]; + if (pullRequest) { + return parseInt(pullRequest["number"]); + } + } + } +} + +/** + * @returns {string | undefined} + */ +export function getTargetBranch() { + if (isPullRequest()) { + if (isBuildkite) { + return getEnv("BUILDKITE_PULL_REQUEST_BASE_BRANCH", false); + } + + if (isGithubAction) { + return getEnv("GITHUB_BASE_REF", false); + } + } +} + +/** + * @returns {boolean} + */ +export function isFork() { + if (isBuildkite) { + const repository = getEnv("BUILDKITE_PULL_REQUEST_REPO", false); + return !!repository && repository !== getEnv("BUILDKITE_REPO", false); + } + + if (isGithubAction) { + const eventPath = getEnv("GITHUB_EVENT_PATH", false); + if (eventPath && existsSync(eventPath)) { + const event = JSON.parse(readFile(eventPath, { cache: true })); + const pullRequest = event["pull_request"]; + if (pullRequest) { + return !!pullRequest["head"]["repo"]["fork"]; + } + } + } + + return false; +} + +/** + * @param {string} [cwd] + * @returns {boolean} + */ +export function isMergeQueue(cwd) { + return /^gh-readonly-queue/.test(getBranch(cwd)); +} + +/** + * @returns {string | undefined} + */ +export function getGithubToken() { + const cachedToken = getSecret("GITHUB_TOKEN", { required: false }); + + if (typeof cachedToken === "string") { + return cachedToken || undefined; + } + + const { error, stdout } = spawnSync(["gh", "auth", "token"]); + const token = error ? "" : stdout.trim(); + + setEnv("GITHUB_TOKEN", token); + return token || undefined; +} + +/** + * @typedef {object} CurlOptions + * @property {string} [method] + * @property {string} [body] + * @property {Record} [headers] + * @property {number} [timeout] + * @property {number} [retries] + * @property {boolean} [json] + * @property {boolean} [arrayBuffer] + * @property {string} [filename] + */ + +/** + * @typedef {object} CurlResult + * @property {number} status + * @property {string} statusText + * @property {Error | undefined} error + * @property {any} body + */ + +/** + * @param {string} url + * @param {CurlOptions} [options] + * @returns {Promise} + */ +export async function curl(url, options = {}) { + let { hostname, href } = new URL(url); + let method = options["method"] || "GET"; + let input = options["body"]; + let headers = options["headers"] || {}; + let retries = options["retries"] || 3; + let json = options["json"]; + let arrayBuffer = options["arrayBuffer"]; + let filename = options["filename"]; + + if (typeof headers["Authorization"] === "undefined") { + if (hostname === "api.github.com" || hostname === "uploads.github.com") { + const githubToken = getGithubToken(); + if (githubToken) { + headers["Authorization"] = `Bearer ${githubToken}`; + } + } + } + + let status; + let statusText; + let body; + let error; + for (let i = 0; i < retries; i++) { + if (i > 0) { + await new Promise(resolve => setTimeout(resolve, 1000 * (i + 1))); + } + + let response; + try { + response = await fetch(href, { method, headers, body: input }); + } catch (cause) { + debugLog("$", "curl", href, "-> error"); + error = new Error(`Fetch failed: ${method} ${url}`, { cause }); + continue; + } + + status = response["status"]; + statusText = response["statusText"]; + debugLog("$", "curl", href, "->", status, statusText); + + const ok = response["ok"]; + try { + if (filename && ok) { + const buffer = await response.arrayBuffer(); + await writeFile(filename, new Uint8Array(buffer)); + } else if (arrayBuffer && ok) { + body = await response.arrayBuffer(); + } else if (json && ok) { + body = await response.json(); + } else { + body = await response.text(); + } + } catch (cause) { + error = new Error(`Fetch failed: ${method} ${url}`, { cause }); + continue; + } + + if (response["ok"]) { + break; + } + + error = new Error(`Fetch failed: ${method} ${url}: ${status} ${statusText}`, { cause: body }); + + if (status === 400 || status === 404 || status === 422) { + break; + } + } + + return { + status, + statusText, + error, + body, + }; +} + +/** + * @param {string} url + * @param {CurlOptions} options + * @returns {Promise} + */ +export async function curlSafe(url, options) { + const result = await curl(url, options); + + const { error, body } = result; + if (error) { + throw error; + } + + return body; +} + +let cachedFiles; + +/** + * @param {string} filename + * @param {object} [options] + * @param {boolean} [options.cache] + * @returns {string} + */ +export function readFile(filename, options = {}) { + const absolutePath = resolve(filename); + if (options["cache"]) { + if (cachedFiles?.[absolutePath]) { + return cachedFiles[absolutePath]; + } + } + + const relativePath = relative(process.cwd(), absolutePath); + debugLog("cat", relativePath); + + let content; + try { + content = readFileSync(absolutePath, "utf-8"); + } catch (cause) { + throw new Error(`Read failed: ${relativePath}`, { cause }); + } + + if (options["cache"]) { + cachedFiles ||= {}; + cachedFiles[absolutePath] = content; + } + + return content; +} + +/** + * @param {string} [cwd] + * @param {string} [base] + * @param {string} [head] + * @returns {Promise} + */ +export async function getChangedFiles(cwd, base, head) { + const repository = getRepository(cwd); + base ||= getCommit(cwd); + head ||= `${base}^1`; + + const url = `https://api.github.com/repos/${repository}/compare/${head}...${base}`; + const { error, body } = await curl(url, { json: true }); + + if (error) { + console.warn("Failed to list changed files:", error); + return; + } + + const { files } = body; + return files.filter(({ status }) => !/removed|unchanged/i.test(status)).map(({ filename }) => filename); +} + +/** + * @param {string} filename + * @returns {boolean} + */ +export function isDocumentation(filename) { + if (/^(docs|bench|examples|misctools|\.vscode)/.test(filename)) { + return true; + } + + if (!/^(src|test|vendor)/.test(filename) && /\.(md|txt)$/.test(filename)) { + return true; + } + + return false; +} + +/** + * @returns {string | undefined} + */ +export function getBuildId() { + if (isBuildkite) { + return getEnv("BUILDKITE_BUILD_ID"); + } + + if (isGithubAction) { + return getEnv("GITHUB_RUN_ID"); + } +} + +/** + * @returns {number | undefined} + */ +export function getBuildNumber() { + if (isBuildkite) { + return parseInt(getEnv("BUILDKITE_BUILD_NUMBER")); + } + + if (isGithubAction) { + return parseInt(getEnv("GITHUB_RUN_ID")); + } +} + +/** + * @returns {URL | undefined} + */ +export function getBuildUrl() { + if (isBuildkite) { + const buildUrl = getEnv("BUILDKITE_BUILD_URL"); + const jobId = getEnv("BUILDKITE_JOB_ID"); + return new URL(`#${jobId}`, buildUrl); + } + + if (isGithubAction) { + const baseUrl = getEnv("GITHUB_SERVER_URL", false) || "https://github.com"; + const repository = getEnv("GITHUB_REPOSITORY"); + const runId = getEnv("GITHUB_RUN_ID"); + return new URL(`${repository}/actions/runs/${runId}`, baseUrl); + } +} + +/** + * @returns {string | undefined} + */ +export function getBuildLabel() { + if (isBuildkite) { + const label = getEnv("BUILDKITE_GROUP_LABEL", false) || getEnv("BUILDKITE_LABEL", false); + if (label) { + return label; + } + } + + if (isGithubAction) { + const label = getEnv("GITHUB_WORKFLOW", false); + if (label) { + return label; + } + } +} + +/** + * @typedef {object} BuildArtifact + * @property {string} [job] + * @property {string} filename + * @property {string} url + */ + +/** + * @returns {Promise} + */ +export async function getBuildArtifacts() { + const buildId = await getBuildkiteBuildNumber(); + if (buildId) { + return getBuildkiteArtifacts(buildId); + } +} + +/** + * @returns {Promise} + */ +export async function getBuildkiteBuildNumber() { + if (isBuildkite) { + const number = parseInt(getEnv("BUILDKITE_BUILD_NUMBER", false)); + if (!isNaN(number)) { + return number; + } + } + + const repository = getRepository(); + const commit = getCommit(); + if (!repository || !commit) { + return; + } + + const { status, error, body } = await curl(`https://api.github.com/repos/${repository}/commits/${commit}/statuses`, { + json: true, + }); + if (status === 404) { + return; + } + if (error) { + throw error; + } + + for (const { target_url: url } of body) { + const { hostname, pathname } = new URL(url); + if (hostname === "buildkite.com") { + const buildId = parseInt(pathname.split("/").pop()); + if (!isNaN(buildId)) { + return buildId; + } + } + } +} + +/** + * @param {string} buildId + * @returns {Promise} + */ +export async function getBuildkiteArtifacts(buildId) { + const orgId = getEnv("BUILDKITE_ORGANIZATION_SLUG", false) || "bun"; + const pipelineId = getEnv("BUILDKITE_PIPELINE_SLUG", false) || "bun"; + const { jobs } = await curlSafe(`https://buildkite.com/${orgId}/${pipelineId}/builds/${buildId}.json`, { + json: true, + }); + + const artifacts = await Promise.all( + jobs.map(async ({ id: jobId, step_key: jobKey }) => { + const artifacts = await curlSafe( + `https://buildkite.com/organizations/${orgId}/pipelines/${pipelineId}/builds/${buildId}/jobs/${jobId}/artifacts`, + { json: true }, + ); + + return artifacts.map(({ path, url }) => { + return { + job: jobKey, + filename: path, + url: new URL(url, "https://buildkite.com/").toString(), + }; + }); + }), + ); + + return artifacts.flat(); +} + +/** + * @param {string} [filename] + * @param {number} [line] + * @returns {URL | undefined} + */ +export function getFileUrl(filename, line) { + let cwd; + if (filename?.startsWith("vendor")) { + const parentPath = resolve(dirname(filename)); + const { error, stdout } = spawnSync(["git", "rev-parse", "--show-toplevel"], { cwd: parentPath }); + if (error) { + return; + } + cwd = stdout.trim(); + } + + const baseUrl = getRepositoryUrl(cwd); + if (!filename) { + return baseUrl; + } + + const filePath = (cwd ? relative(cwd, filename) : filename).replace(/\\/g, "/"); + const pullRequest = getPullRequest(); + + if (pullRequest) { + const fileMd5 = createHash("sha256").update(filePath).digest("hex"); + const url = new URL(`pull/${pullRequest}/files#diff-${fileMd5}`, `${baseUrl}/`); + if (typeof line !== "undefined") { + return new URL(`R${line}`, url); + } + return url; + } + + const commit = getCommit(cwd); + const url = new URL(`blob/${commit}/${filePath}`, `${baseUrl}/`).toString(); + if (typeof line !== "undefined") { + return new URL(`#L${line}`, url); + } + return url; +} + +/** + * @typedef {object} BuildkiteBuild + * @property {string} id + * @property {string} commit_id + * @property {string} branch_name + */ + +/** + * @returns {Promise} + */ +export async function getLastSuccessfulBuild() { + if (isBuildkite) { + let depth = 0; + let url = getBuildUrl(); + if (url) { + url.hash = ""; + } + + while (url) { + const { error, body } = await curl(`${url}.json`, { json: true }); + if (error) { + return; + } + + const { state, prev_branch_build: previousBuild, steps } = body; + if (depth++) { + if (state === "failed" || state === "passed" || state === "canceled") { + const buildSteps = steps.filter(({ label }) => label.endsWith("build-bun")); + if (buildSteps.length) { + if (buildSteps.every(({ outcome }) => outcome === "passed")) { + return body; + } + return; + } + } + } + + if (!previousBuild) { + return; + } + + url = new URL(previousBuild["url"], url); + } + } +} + +/** + * @param {string} string + * @returns {string} + */ +export function stripAnsi(string) { + return string.replace(/\u001b\[\d+m/g, ""); +} + +/** + * @param {string} string + * @returns {string} + */ +export function escapeGitHubAction(string) { + return string.replace(/%/g, "%25").replace(/\r/g, "%0D").replace(/\n/g, "%0A"); +} + +/** + * @param {string} string + * @returns {string} + */ +export function unescapeGitHubAction(string) { + return string.replace(/%25/g, "%").replace(/%0D/g, "\r").replace(/%0A/g, "\n"); +} + +/** + * @param {string} string + * @returns {string} + */ +export function escapeHtml(string) { + return string + .replace(/&/g, "&") + .replace(//g, ">") + .replace(/"/g, """) + .replace(/'/g, "'") + .replace(/`/g, "`"); +} + +/** + * @param {string} string + * @returns {string} + */ +export function escapeCodeBlock(string) { + return string.replace(/`/g, "\\`"); +} + +/** + * @returns {string} + */ +export function tmpdir() { + if (isWindows) { + for (const key of ["TMPDIR", "TEMP", "TEMPDIR", "TMP", "RUNNER_TEMP"]) { + const tmpdir = getEnv(key, false); + if (!tmpdir || /cygwin|cygdrive/i.test(tmpdir) || !/^[a-z]/i.test(tmpdir)) { + continue; + } + return normalizeWindows(tmpdir); + } + + const appData = process.env["LOCALAPPDATA"]; + if (appData) { + const appDataTemp = join(appData, "Temp"); + if (existsSync(appDataTemp)) { + return appDataTemp; + } + } + } + + if (isMacOS || isLinux) { + if (existsSync("/tmp")) { + return "/tmp"; + } + } + + return nodeTmpdir(); +} + +/** + * @param {string} string + * @returns {string} + */ +function escapePowershell(string) { + return string.replace(/'/g, "''").replace(/`/g, "``"); +} + +/** + * @param {string} filename + * @param {string} [output] + * @returns {Promise} + */ +export async function unzip(filename, output) { + const destination = output || mkdtempSync(join(tmpdir(), "unzip-")); + if (isWindows) { + const command = `Expand-Archive -Force -LiteralPath "${escapePowershell(filename)}" -DestinationPath "${escapePowershell(destination)}"`; + await spawnSafe(["powershell", "-Command", command]); + } else { + await spawnSafe(["unzip", "-o", filename, "-d", destination]); + } + return destination; +} + +/** + * @param {string} string + * @returns {"darwin" | "linux" | "windows"} + */ +export function parseOs(string) { + if (/darwin|apple|mac/i.test(string)) { + return "darwin"; + } + if (/linux/i.test(string)) { + return "linux"; + } + if (/win/i.test(string)) { + return "windows"; + } + throw new Error(`Unsupported operating system: ${string}`); +} + +/** + * @returns {"darwin" | "linux" | "windows"} + */ +export function getOs() { + return parseOs(process.platform); +} + +/** + * @param {string} string + * @returns {"x64" | "aarch64"} + */ +export function parseArch(string) { + if (/x64|amd64|x86_64/i.test(string)) { + return "x64"; + } + if (/arm64|aarch64/i.test(string)) { + return "aarch64"; + } + throw new Error(`Unsupported architecture: ${string}`); +} + +/** + * @returns {"x64" | "aarch64"} + */ +export function getArch() { + return parseArch(process.arch); +} + +/** + * @returns {"musl" | "gnu" | undefined} + */ +export function getAbi() { + if (isLinux) { + const arch = getArch() === "x64" ? "x86_64" : "aarch64"; + const muslLibPath = `/lib/ld-musl-${arch}.so.1`; + if (existsSync(muslLibPath)) { + return "musl"; + } + + const gnuLibPath = `/lib/ld-linux-${arch}.so.2`; + if (existsSync(gnuLibPath)) { + return "gnu"; + } + } +} + +/** + * @typedef {object} Target + * @property {"darwin" | "linux" | "windows"} os + * @property {"x64" | "aarch64"} arch + * @property {"musl"} [abi] + * @property {boolean} [baseline] + * @property {boolean} profile + * @property {string} label + */ + +/** + * @param {string} string + * @returns {Target} + */ +export function parseTarget(string) { + const os = parseOs(string); + const arch = parseArch(string); + const abi = os === "linux" && string.includes("-musl") ? "musl" : undefined; + const baseline = arch === "x64" ? string.includes("-baseline") : undefined; + const profile = string.includes("-profile"); + + let label = `${os}-${arch}`; + if (abi) { + label += `-${abi}`; + } + if (baseline) { + label += "-baseline"; + } + if (profile) { + label += "-profile"; + } + + return { label, os, arch, abi, baseline, profile }; +} + +/** + * @param {string} target + * @param {string} [release] + * @returns {Promise} + */ +export async function getTargetDownloadUrl(target, release) { + const { label, os, arch, abi, baseline } = parseTarget(target); + const baseUrl = "https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/"; + const filename = `bun-${label}.zip`; + + const exists = async url => { + const { status } = await curl(url, { method: "HEAD" }); + return status !== 404; + }; + + if (!release || /^(stable|latest|canary)$/i.test(release)) { + const tag = release === "canary" ? "canary" : "latest"; + const url = new URL(`${tag}/${filename}`, baseUrl); + if (await exists(url)) { + return url; + } + } + + if (/^(bun-v|v)?(\d+\.\d+\.\d+)$/i.test(release)) { + const [, major, minor, patch] = /(\d+)\.(\d+)\.(\d+)/i.exec(release); + const url = new URL(`bun-v${major}.${minor}.${patch}/${filename}`, baseUrl); + if (await exists(url)) { + return url; + } + } + + if (/^https?:\/\//i.test(release) && (await exists(release))) { + return new URL(release); + } + + if (release.length === 40 && /^[0-9a-f]{40}$/i.test(release)) { + const releaseUrl = new URL(`${release}/${filename}`, baseUrl); + if (await exists(releaseUrl)) { + return releaseUrl; + } + + const canaryUrl = new URL(`${release}-canary/${filename}`, baseUrl); + if (await exists(canaryUrl)) { + return canaryUrl; + } + + const statusUrl = new URL(`https://api.github.com/repos/oven-sh/bun/commits/${release}/status`).toString(); + const { error, body } = await curl(statusUrl, { json: true }); + if (error) { + throw new Error(`Failed to fetch commit status: ${release}`, { cause: error }); + } + + const { statuses } = body; + const buildUrls = new Set(); + for (const { target_url: url } of statuses) { + const { hostname, origin, pathname } = new URL(url); + if (hostname === "buildkite.com") { + buildUrls.add(`${origin}${pathname}.json`); + } + } + + const buildkiteUrl = new URL("https://buildkite.com/"); + for (const url of buildUrls) { + const { status, error, body } = await curl(url, { json: true }); + if (status === 404) { + continue; + } + if (error) { + throw new Error(`Failed to fetch build: ${url}`, { cause: error }); + } + + const { jobs } = body; + const job = jobs.find( + ({ step_key: key }) => + key && + key.includes("build-bun") && + key.includes(os) && + key.includes(arch) && + (!baseline || key.includes("baseline")) && + (!abi || key.includes(abi)), + ); + if (!job) { + continue; + } + + const { base_path: jobPath } = job; + const artifactsUrl = new URL(`${jobPath}/artifacts`, buildkiteUrl); + { + const { error, body } = await curl(artifactsUrl, { json: true }); + if (error) { + continue; + } + + for (const { url, file_name: name } of body) { + if (name === filename) { + return new URL(url, artifactsUrl); + } + } + } + } + } + + throw new Error(`Failed to find release: ${release}`); +} + +/** + * @param {string} target + * @param {string} [release] + * @returns {Promise} + */ +export async function downloadTarget(target, release) { + const url = await getTargetDownloadUrl(target, release); + const { error, body } = await curl(url, { arrayBuffer: true }); + if (error) { + throw new Error(`Failed to download target: ${target} at ${release}`, { cause: error }); + } + + const tmpPath = mkdtempSync(join(tmpdir(), "bun-download-")); + const zipPath = join(tmpPath, "bun.zip"); + + writeFileSync(zipPath, new Uint8Array(body)); + const unzipPath = await unzip(zipPath, tmpPath); + + for (const entry of readdirSync(unzipPath, { recursive: true, encoding: "utf-8" })) { + const exePath = join(unzipPath, entry); + if (/bun(?:\.exe)?$/i.test(entry)) { + return exePath; + } + } + + throw new Error(`Failed to find bun executable: ${unzipPath}`); +} + +/** + * @returns {string | undefined} + */ +export function getTailscaleIp() { + let tailscale = "tailscale"; + if (isMacOS) { + const tailscaleApp = "/Applications/Tailscale.app/Contents/MacOS/tailscale"; + if (existsSync(tailscaleApp)) { + tailscale = tailscaleApp; + } + } + + const { error, stdout } = spawnSync([tailscale, "ip", "--1"]); + if (!error) { + return stdout.trim(); + } +} + +/** + * @returns {string | undefined} + */ +export function getPublicIp() { + for (const url of ["https://checkip.amazonaws.com", "https://ipinfo.io/ip"]) { + const { error, stdout } = spawnSync(["curl", url]); + if (!error) { + return stdout.trim(); + } + } +} + +/** + * @returns {string} + */ +export function getHostname() { + if (isBuildkite) { + const agent = getEnv("BUILDKITE_AGENT_NAME", false); + if (agent) { + return agent; + } + } + + if (isGithubAction) { + const runner = getEnv("RUNNER_NAME", false); + if (runner) { + return runner; + } + } + + return hostname(); +} + +/** + * @returns {string} + */ +export function getUsername() { + const { username } = userInfo(); + return username; +} + +/** + * @returns {string} + */ +export function getDistro() { + if (isMacOS) { + return "macOS"; + } + + if (isLinux) { + const releasePath = "/etc/os-release"; + if (existsSync(releasePath)) { + const releaseFile = readFile(releasePath, { cache: true }); + const match = releaseFile.match(/ID=\"(.*)\"/); + if (match) { + return match[1]; + } + } + + const { error, stdout } = spawnSync(["lsb_release", "-is"]); + if (!error) { + return stdout.trim(); + } + + return "Linux"; + } + + if (isWindows) { + const { error, stdout } = spawnSync(["cmd", "/c", "ver"]); + if (!error) { + return stdout.trim(); + } + + return "Windows"; + } + + return `${process.platform} ${process.arch}`; +} + +/** + * @returns {string | undefined} + */ +export function getDistroRelease() { + if (isMacOS) { + const { error, stdout } = spawnSync(["sw_vers", "-productVersion"]); + if (!error) { + return stdout.trim(); + } + } + + if (isLinux) { + const releasePath = "/etc/os-release"; + if (existsSync(releasePath)) { + const releaseFile = readFile(releasePath, { cache: true }); + const match = releaseFile.match(/VERSION_ID=\"(.*)\"/); + if (match) { + return match[1]; + } + } + + const { error, stdout } = spawnSync(["lsb_release", "-rs"]); + if (!error) { + return stdout.trim(); + } + } + + if (isWindows) { + const { error, stdout } = spawnSync(["cmd", "/c", "ver"]); + if (!error) { + return stdout.trim(); + } + } +} + +/** + * @returns {Promise} + */ +export async function getCanaryRevision() { + const repository = getRepository() || "oven-sh/bun"; + const { error: releaseError, body: release } = await curl( + new URL(`repos/${repository}/releases/latest`, getGithubApiUrl()), + { json: true }, + ); + if (releaseError) { + return 1; + } + + const commit = getCommit(); + const { tag_name: latest } = release; + const { error: compareError, body: compare } = await curl( + new URL(`repos/${repository}/compare/${latest}...${commit}`, getGithubApiUrl()), + { json: true }, + ); + if (compareError) { + return 1; + } + + const { ahead_by: revision } = compare; + if (typeof revision === "number") { + return revision; + } + + return 1; +} + +/** + * @returns {URL} + */ +export function getGithubApiUrl() { + return new URL(getEnv("GITHUB_API_URL", false) || "https://api.github.com"); +} + +/** + * @returns {URL} + */ +export function getGithubUrl() { + return new URL(getEnv("GITHUB_SERVER_URL", false) || "https://github.com"); +} + +/** + * @param {string} title + * @param {function} [fn] + */ +export function startGroup(title, fn) { + if (isGithubAction) { + console.log(`::group::${stripAnsi(title)}`); + } else if (isBuildkite) { + console.log(`--- ${title}`); + } else { + console.group(title); + } + + if (typeof fn === "function") { + let result; + try { + result = fn(); + } finally { + if (result instanceof Promise) { + return result.finally(() => endGroup()); + } else { + endGroup(); + } + } + } +} + +export function endGroup() { + if (isGithubAction) { + console.log("::endgroup::"); + } else { + console.groupEnd(); + } +} + +export function printEnvironment() { + startGroup("Machine", () => { + console.log("Operating System:", getOs()); + console.log("Architecture:", getArch()); + if (isLinux) { + console.log("ABI:", getAbi()); + } + console.log("Distro:", getDistro()); + console.log("Release:", getDistroRelease()); + console.log("Hostname:", getHostname()); + if (isCI) { + console.log("Tailscale IP:", getTailscaleIp()); + console.log("Public IP:", getPublicIp()); + } + console.log("Username:", getUsername()); + console.log("Working Directory:", process.cwd()); + console.log("Temporary Directory:", tmpdir()); + }); + + if (isCI) { + startGroup("Environment", () => { + for (const [key, value] of Object.entries(process.env)) { + console.log(`${key}:`, value); + } + }); + } + + startGroup("Repository", () => { + console.log("Commit:", getCommit()); + console.log("Message:", getCommitMessage()); + console.log("Branch:", getBranch()); + console.log("Main Branch:", getMainBranch()); + console.log("Is Fork:", isFork()); + console.log("Is Merge Queue:", isMergeQueue()); + console.log("Is Main Branch:", isMainBranch()); + console.log("Is Pull Request:", isPullRequest()); + if (isPullRequest()) { + console.log("Pull Request:", getPullRequest()); + console.log("Target Branch:", getTargetBranch()); + } + }); + + if (isCI) { + startGroup("CI", () => { + console.log("Build ID:", getBuildId()); + console.log("Build Label:", getBuildLabel()); + console.log("Build URL:", `${getBuildUrl()}`); + }); + } +} diff --git a/scripts/vs-shell.ps1 b/scripts/vs-shell.ps1 new file mode 100755 index 0000000000..35694cd1f6 --- /dev/null +++ b/scripts/vs-shell.ps1 @@ -0,0 +1,46 @@ +# Ensures that commands run in a Visual Studio environment. +# This is required to run commands like cmake and ninja on Windows. + +$ErrorActionPreference = "Stop" + +if($env:VSINSTALLDIR -eq $null) { + Write-Host "Loading Visual Studio environment, this may take a second..." + + $vswhere = "C:\Program Files (x86)\Microsoft Visual Studio\Installer\vswhere.exe" + if (!(Test-Path $vswhere)) { + throw "Command not found: vswhere (did you install Visual Studio?)" + } + + $vsDir = (& $vswhere -prerelease -latest -property installationPath) + if ($vsDir -eq $null) { + $vsDir = Get-ChildItem -Path "C:\Program Files\Microsoft Visual Studio\2022" -Directory + if ($vsDir -eq $null) { + throw "Visual Studio directory not found." + } + $vsDir = $vsDir.FullName + } + + Push-Location $vsDir + try { + $vsShell = (Join-Path -Path $vsDir -ChildPath "Common7\Tools\Launch-VsDevShell.ps1") + . $vsShell -Arch amd64 -HostArch amd64 + } finally { + Pop-Location + } +} + +if($env:VSCMD_ARG_TGT_ARCH -eq "x86") { + throw "Visual Studio environment is targeting 32 bit, but only 64 bit is supported." +} + +if ($args.Count -gt 0) { + $command = $args[0] + $commandArgs = @() + if ($args.Count -gt 1) { + $commandArgs = @($args[1..($args.Count - 1)] | % {$_}) + } + + Write-Host "$ $command $commandArgs" + & $command $commandArgs + exit $LASTEXITCODE +} diff --git a/src/bun.js/bindings/.clang-format b/src/.clang-format similarity index 97% rename from src/bun.js/bindings/.clang-format rename to src/.clang-format index d8fb9d0b97..d3d5f2ecb3 100644 --- a/src/bun.js/bindings/.clang-format +++ b/src/.clang-format @@ -11,7 +11,7 @@ AllowAllParametersOfDeclarationOnNextLine: true AllowShortBlocksOnASingleLine: false AllowShortCaseLabelsOnASingleLine: false AllowShortFunctionsOnASingleLine: All -AllowShortIfStatementsOnASingleLine: false +AllowShortIfStatementsOnASingleLine: true AllowShortLoopsOnASingleLine: false AlwaysBreakAfterDefinitionReturnType: None AlwaysBreakAfterReturnType: None @@ -73,6 +73,7 @@ IncludeIsMainRegex: "(Test)?$" IndentCaseLabels: false IndentWidth: 4 IndentWrappedFunctionNames: false +InsertNewlineAtEOF: true JavaScriptQuotes: Leave JavaScriptWrapImports: true KeepEmptyLinesAtTheStartOfBlocks: true diff --git a/src/ArenaAllocator.zig b/src/ArenaAllocator.zig deleted file mode 100644 index 4c62038cab..0000000000 --- a/src/ArenaAllocator.zig +++ /dev/null @@ -1,248 +0,0 @@ -const std = @import("std"); -const bun = @import("root").bun; -const assert = bun.assert; -const mem = std.mem; -const Allocator = std.mem.Allocator; - -/// This allocator takes an existing allocator, wraps it, and provides an interface -/// where you can allocate without freeing, and then free it all together. -pub const ArenaAllocator = struct { - child_allocator: Allocator, - state: State, - - /// Inner state of ArenaAllocator. Can be stored rather than the entire ArenaAllocator - /// as a memory-saving optimization. - pub const State = struct { - buffer_list: std.SinglyLinkedList(usize) = .{}, - end_index: usize = 0, - - pub fn promote(self: State, child_allocator: Allocator) ArenaAllocator { - return .{ - .child_allocator = child_allocator, - .state = self, - }; - } - }; - - pub fn allocator(self: *ArenaAllocator) Allocator { - return .{ - .ptr = self, - .vtable = &.{ - .alloc = alloc, - .resize = resize, - .free = free, - }, - }; - } - - const BufNode = std.SinglyLinkedList(usize).Node; - - pub fn init(child_allocator: Allocator) ArenaAllocator { - return (State{}).promote(child_allocator); - } - - pub fn deinit(self: ArenaAllocator) void { - // NOTE: When changing this, make sure `reset()` is adjusted accordingly! - - var it = self.state.buffer_list.first; - while (it) |node| { - // this has to occur before the free because the free frees node - const next_it = node.next; - const align_bits = std.math.log2_int(usize, @alignOf(BufNode)); - const alloc_buf = @as([*]u8, @ptrCast(node))[0..node.data]; - self.child_allocator.rawFree(alloc_buf, align_bits, @returnAddress()); - it = next_it; - } - } - - pub const ResetMode = union(enum) { - /// Releases all allocated memory in the arena. - free_all, - /// This will pre-heat the arena for future allocations by allocating a - /// large enough buffer for all previously done allocations. - /// Preheating will speed up the allocation process by invoking the backing allocator - /// less often than before. If `reset()` is used in a loop, this means that after the - /// biggest operation, no memory allocations are performed anymore. - retain_capacity, - /// This is the same as `retain_capacity`, but the memory will be shrunk to - /// this value if it exceeds the limit. - retain_with_limit: usize, - }; - /// Queries the current memory use of this arena. - /// This will **not** include the storage required for internal keeping. - pub fn queryCapacity(self: ArenaAllocator) usize { - var size: usize = 0; - var it = self.state.buffer_list.first; - while (it) |node| : (it = node.next) { - // Compute the actually allocated size excluding the - // linked list node. - size += node.data - @sizeOf(BufNode); - } - return size; - } - /// Resets the arena allocator and frees all allocated memory. - /// - /// `mode` defines how the currently allocated memory is handled. - /// See the variant documentation for `ResetMode` for the effects of each mode. - /// - /// The function will return whether the reset operation was successful or not. - /// If the reallocation failed `false` is returned. The arena will still be fully - /// functional in that case, all memory is released. Future allocations just might - /// be slower. - /// - /// NOTE: If `mode` is `free_mode`, the function will always return `true`. - pub fn reset(self: *ArenaAllocator, mode: ResetMode) bool { - // Some words on the implementation: - // The reset function can be implemented with two basic approaches: - // - Counting how much bytes were allocated since the last reset, and storing that - // information in State. This will make reset fast and alloc only a teeny tiny bit - // slower. - // - Counting how much bytes were allocated by iterating the chunk linked list. This - // will make reset slower, but alloc() keeps the same speed when reset() as if reset() - // would not exist. - // - // The second variant was chosen for implementation, as with more and more calls to reset(), - // the function will get faster and faster. At one point, the complexity of the function - // will drop to amortized O(1), as we're only ever having a single chunk that will not be - // reallocated, and we're not even touching the backing allocator anymore. - // - // Thus, only the first hand full of calls to reset() will actually need to iterate the linked - // list, all future calls are just taking the first node, and only resetting the `end_index` - // value. - const requested_capacity = switch (mode) { - .retain_capacity => self.queryCapacity(), - .retain_with_limit => |limit| @min(limit, self.queryCapacity()), - .free_all => 0, - }; - if (requested_capacity == 0) { - // just reset when we don't have anything to reallocate - self.deinit(); - self.state = State{}; - return true; - } - const total_size = requested_capacity + @sizeOf(BufNode); - const align_bits = std.math.log2_int(usize, @alignOf(BufNode)); - // Free all nodes except for the last one - var it = self.state.buffer_list.first; - const maybe_first_node = while (it) |node| { - // this has to occur before the free because the free frees node - const next_it = node.next; - if (next_it == null) - break node; - const alloc_buf = @as([*]u8, @ptrCast(node))[0..node.data]; - self.child_allocator.rawFree(alloc_buf, align_bits, @returnAddress()); - it = next_it; - } else null; - assert(maybe_first_node == null or maybe_first_node.?.next == null); - // reset the state before we try resizing the buffers, so we definitely have reset the arena to 0. - self.state.end_index = 0; - if (maybe_first_node) |first_node| { - self.state.buffer_list.first = first_node; - // perfect, no need to invoke the child_allocator - if (first_node.data == total_size) - return true; - const first_alloc_buf = @as([*]u8, @ptrCast(first_node))[0..first_node.data]; - if (self.child_allocator.rawResize(first_alloc_buf, align_bits, total_size, @returnAddress())) { - // successful resize - first_node.data = total_size; - } else { - // manual realloc - const new_ptr = self.child_allocator.rawAlloc(total_size, align_bits, @returnAddress()) orelse { - // we failed to preheat the arena properly, signal this to the user. - return false; - }; - self.child_allocator.rawFree(first_alloc_buf, align_bits, @returnAddress()); - const node: *BufNode = @ptrCast(@alignCast(new_ptr)); - node.* = .{ .data = total_size }; - self.state.buffer_list.first = node; - } - } - return true; - } - - fn createNode(self: *ArenaAllocator, prev_len: usize, minimum_size: usize) ?*BufNode { - const actual_min_size = minimum_size + (@sizeOf(BufNode) + 16); - const big_enough_len = prev_len + actual_min_size; - const len = big_enough_len + big_enough_len / 2; - const log2_align = comptime std.math.log2_int(usize, @alignOf(BufNode)); - const ptr = self.child_allocator.rawAlloc(len, log2_align, @returnAddress()) orelse - return null; - const buf_node: *BufNode = @ptrCast(@alignCast(ptr)); - buf_node.* = .{ .data = len }; - self.state.buffer_list.prepend(buf_node); - self.state.end_index = 0; - return buf_node; - } - - fn alloc(ctx: *anyopaque, n: usize, log2_ptr_align: u8, ra: usize) ?[*]u8 { - const self: *ArenaAllocator = @ptrCast(@alignCast(ctx)); - _ = ra; - - const ptr_align = @as(usize, 1) << @as(Allocator.Log2Align, @intCast(log2_ptr_align)); - var cur_node = if (self.state.buffer_list.first) |first_node| - first_node - else - (self.createNode(0, n + ptr_align) orelse return null); - while (true) { - const cur_alloc_buf = @as([*]u8, @ptrCast(cur_node))[0..cur_node.data]; - const cur_buf = cur_alloc_buf[@sizeOf(BufNode)..]; - const addr = @intFromPtr(cur_buf.ptr) + self.state.end_index; - const adjusted_addr = mem.alignForward(usize, addr, ptr_align); - const adjusted_index = self.state.end_index + (adjusted_addr - addr); - const new_end_index = adjusted_index + n; - - if (new_end_index <= cur_buf.len) { - const result = cur_buf[adjusted_index..new_end_index]; - self.state.end_index = new_end_index; - return result.ptr; - } - - const bigger_buf_size = @sizeOf(BufNode) + new_end_index; - const log2_align = comptime std.math.log2_int(usize, @alignOf(BufNode)); - if (self.child_allocator.rawResize(cur_alloc_buf, log2_align, bigger_buf_size, @returnAddress())) { - cur_node.data = bigger_buf_size; - } else { - // Allocate a new node if that's not possible - cur_node = self.createNode(cur_buf.len, n + ptr_align) orelse return null; - } - } - } - - fn resize(ctx: *anyopaque, buf: []u8, log2_buf_align: u8, new_len: usize, ret_addr: usize) bool { - const self: *ArenaAllocator = @ptrCast(@alignCast(ctx)); - _ = log2_buf_align; - _ = ret_addr; - - const cur_node = self.state.buffer_list.first orelse return false; - const cur_buf = @as([*]u8, @ptrCast(cur_node))[@sizeOf(BufNode)..cur_node.data]; - if (@intFromPtr(cur_buf.ptr) + self.state.end_index != @intFromPtr(buf.ptr) + buf.len) { - // It's not the most recent allocation, so it cannot be expanded, - // but it's fine if they want to make it smaller. - return new_len <= buf.len; - } - - if (buf.len >= new_len) { - self.state.end_index -= buf.len - new_len; - return true; - } else if (cur_buf.len - self.state.end_index >= new_len - buf.len) { - self.state.end_index += new_len - buf.len; - return true; - } else { - return false; - } - } - - fn free(ctx: *anyopaque, buf: []u8, log2_buf_align: u8, ret_addr: usize) void { - _ = log2_buf_align; - _ = ret_addr; - - const self: *ArenaAllocator = @ptrCast(@alignCast(ctx)); - - const cur_node = self.state.buffer_list.first orelse return; - const cur_buf = @as([*]u8, @ptrCast(cur_node))[@sizeOf(BufNode)..cur_node.data]; - - if (@intFromPtr(cur_buf.ptr) + self.state.end_index == @intFromPtr(buf.ptr) + buf.len) { - self.state.end_index -= buf.len; - } - } -}; diff --git a/src/Global.zig b/src/Global.zig index d3becfed78..94b0bc70c3 100644 --- a/src/Global.zig +++ b/src/Global.zig @@ -172,23 +172,16 @@ const string = bun.string; pub const BunInfo = struct { bun_version: string, platform: Analytics.GenerateHeader.GeneratePlatform.Platform, - framework: string = "", - framework_version: string = "", const Analytics = @import("./analytics/analytics_thread.zig"); const JSON = bun.JSON; const JSAst = bun.JSAst; - pub fn generate(comptime Bundler: type, bundler: Bundler, allocator: std.mem.Allocator) !JSAst.Expr { - var info = BunInfo{ + pub fn generate(comptime Bundler: type, _: Bundler, allocator: std.mem.Allocator) !JSAst.Expr { + const info = BunInfo{ .bun_version = Global.package_json_version, .platform = Analytics.GenerateHeader.GeneratePlatform.forOS(), }; - if (bundler.options.framework) |framework| { - info.framework = framework.package; - info.framework_version = framework.version; - } - return try JSON.toAST(allocator, BunInfo, info); } }; diff --git a/src/StandaloneModuleGraph.zig b/src/StandaloneModuleGraph.zig index 85d283d9e8..e3daa4da17 100644 --- a/src/StandaloneModuleGraph.zig +++ b/src/StandaloneModuleGraph.zig @@ -9,6 +9,8 @@ const Output = bun.Output; const Global = bun.Global; const Environment = bun.Environment; const Syscall = bun.sys; +const SourceMap = bun.sourcemap; +const StringPointer = bun.StringPointer; const w = std.os.windows; @@ -33,6 +35,8 @@ pub const StandaloneModuleGraph = struct { pub const base_public_path = targetBasePublicPath(Environment.os, ""); + pub const base_public_path_with_default_suffix = targetBasePublicPath(Environment.os, "root/"); + pub fn targetBasePublicPath(target: Environment.OperatingSystem, comptime suffix: [:0]const u8) [:0]const u8 { return switch (target) { .windows => "B:/~BUN/" ++ suffix, @@ -54,6 +58,11 @@ pub const StandaloneModuleGraph = struct { if (!isBunStandaloneFilePath(base_path)) { return null; } + + return this.findAssumeStandalonePath(name); + } + + pub fn findAssumeStandalonePath(this: *const StandaloneModuleGraph, name: []const u8) ?*File { if (Environment.isWindows) { var normalized_buf: bun.PathBuffer = undefined; const normalized = bun.path.platformToPosixBuf(u8, name, &normalized_buf); @@ -66,8 +75,10 @@ pub const StandaloneModuleGraph = struct { name: Schema.StringPointer = .{}, contents: Schema.StringPointer = .{}, sourcemap: Schema.StringPointer = .{}, + bytecode: Schema.StringPointer = .{}, encoding: Encoding = .latin1, loader: bun.options.Loader = .file, + module_format: ModuleFormat = .none, }; pub const Encoding = enum(u8) { @@ -79,6 +90,12 @@ pub const StandaloneModuleGraph = struct { utf8 = 2, }; + pub const ModuleFormat = enum(u8) { + none = 0, + esm = 1, + cjs = 2, + }; + pub const File = struct { name: []const u8 = "", loader: bun.options.Loader, @@ -87,6 +104,14 @@ pub const StandaloneModuleGraph = struct { cached_blob: ?*bun.JSC.WebCore.Blob = null, encoding: Encoding = .binary, wtf_string: bun.String = bun.String.empty, + bytecode: []u8 = "", + module_format: ModuleFormat = .none, + + pub fn lessThanByIndex(ctx: []const File, lhs_i: u32, rhs_i: u32) bool { + const lhs = ctx[lhs_i]; + const rhs = ctx[rhs_i]; + return bun.strings.cmpStringsAsc({}, lhs.name, rhs.name); + } pub fn toWTFString(this: *File) bun.String { if (this.wtf_string.isEmpty()) { @@ -106,7 +131,7 @@ pub const StandaloneModuleGraph = struct { pub fn blob(this: *File, globalObject: *bun.JSC.JSGlobalObject) *bun.JSC.WebCore.Blob { if (this.cached_blob == null) { - var store = bun.JSC.WebCore.Blob.Store.init(@constCast(this.contents), bun.default_allocator); + const store = bun.JSC.WebCore.Blob.Store.init(@constCast(this.contents), bun.default_allocator); // make it never free store.ref(); @@ -120,8 +145,16 @@ pub const StandaloneModuleGraph = struct { b.content_type_allocated = false; } + // The real name goes here: store.data.bytes.stored_name = bun.PathString.init(this.name); + // The pretty name goes here: + if (strings.hasPrefixComptime(this.name, base_public_path_with_default_suffix)) { + b.name = bun.String.createUTF8(this.name[base_public_path_with_default_suffix.len..]); + } else if (this.name.len > 0) { + b.name = bun.String.createUTF8(this.name); + } + this.cached_blob = b; } @@ -130,24 +163,61 @@ pub const StandaloneModuleGraph = struct { }; pub const LazySourceMap = union(enum) { - compressed: []const u8, - decompressed: bun.sourcemap, + serialized: SerializedSourceMap, + parsed: *SourceMap.ParsedSourceMap, + none, - pub fn load(this: *LazySourceMap, log: *bun.logger.Log, allocator: std.mem.Allocator) !*bun.sourcemap { - if (this.* == .decompressed) return &this.decompressed; + /// It probably is not possible to run two decoding jobs on the same file + var init_lock: bun.Lock = .{}; - var decompressed = try allocator.alloc(u8, bun.zstd.getDecompressedSize(this.compressed)); - const result = bun.zstd.decompress(decompressed, this.compressed); - if (result == .err) { - allocator.free(decompressed); - log.addError(null, bun.logger.Loc.Empty, bun.span(result.err)) catch unreachable; - return error.@"Failed to decompress sourcemap"; - } - errdefer allocator.free(decompressed); - const bytes = decompressed[0..result.success]; + pub fn load(this: *LazySourceMap) ?*SourceMap.ParsedSourceMap { + init_lock.lock(); + defer init_lock.unlock(); - this.* = .{ .decompressed = try bun.sourcemap.parse(allocator, &bun.logger.Source.initPathString("sourcemap.json", bytes), log) }; - return &this.decompressed; + return switch (this.*) { + .none => null, + .parsed => |map| map, + .serialized => |serialized| { + var stored = switch (SourceMap.Mapping.parse( + bun.default_allocator, + serialized.mappingVLQ(), + null, + std.math.maxInt(i32), + std.math.maxInt(i32), + )) { + .success => |x| x, + .fail => { + this.* = .none; + return null; + }, + }; + + const source_files = serialized.sourceFileNames(); + const slices = bun.default_allocator.alloc(?[]u8, source_files.len * 2) catch bun.outOfMemory(); + + const file_names: [][]const u8 = @ptrCast(slices[0..source_files.len]); + const decompressed_contents_slice = slices[source_files.len..][0..source_files.len]; + for (file_names, source_files) |*dest, src| { + dest.* = src.slice(serialized.bytes); + } + + @memset(decompressed_contents_slice, null); + + const data = bun.new(SerializedSourceMap.Loaded, .{ + .map = serialized, + .decompressed_files = decompressed_contents_slice, + }); + + stored.external_source_names = file_names; + stored.underlying_provider = .{ .data = @truncate(@intFromPtr(data)) }; + stored.is_standalone_module_graph = true; + + const parsed = stored.new(); // allocate this on the heap + parsed.ref(); // never free + this.* = .{ .parsed = parsed }; + return parsed; + }, + }; } }; @@ -159,13 +229,13 @@ pub const StandaloneModuleGraph = struct { const trailer = "\n---- Bun! ----\n"; - pub fn fromBytes(allocator: std.mem.Allocator, raw_bytes: []const u8, offsets: Offsets) !StandaloneModuleGraph { + pub fn fromBytes(allocator: std.mem.Allocator, raw_bytes: []u8, offsets: Offsets) !StandaloneModuleGraph { if (raw_bytes.len == 0) return StandaloneModuleGraph{ .files = bun.StringArrayHashMap(File).init(allocator), }; const modules_list_bytes = sliceTo(raw_bytes, offsets.modules_ptr); - const modules_list = std.mem.bytesAsSlice(CompiledModuleGraphFile, modules_list_bytes); + const modules_list: []align(1) const CompiledModuleGraphFile = std.mem.bytesAsSlice(CompiledModuleGraphFile, modules_list_bytes); if (offsets.entry_point_id > modules_list.len) { return error.@"Corrupted module graph: entry point ID is greater than module list count"; @@ -180,13 +250,20 @@ pub const StandaloneModuleGraph = struct { .name = sliceToZ(raw_bytes, module.name), .loader = module.loader, .contents = sliceToZ(raw_bytes, module.contents), - .sourcemap = LazySourceMap{ - .compressed = sliceTo(raw_bytes, module.sourcemap), - }, + .sourcemap = if (module.sourcemap.length > 0) + .{ .serialized = .{ + .bytes = @alignCast(sliceTo(raw_bytes, module.sourcemap)), + } } + else + .none, + .bytecode = if (module.bytecode.length > 0) @constCast(sliceTo(raw_bytes, module.bytecode)) else &.{}, + .module_format = module.module_format, }, ); } + modules.lockPointers(); // make the pointers stable forever + return StandaloneModuleGraph{ .bytes = raw_bytes[0..offsets.byte_count], .files = modules, @@ -206,22 +283,30 @@ pub const StandaloneModuleGraph = struct { return bytes[ptr.offset..][0..ptr.length :0]; } - pub fn toBytes(allocator: std.mem.Allocator, prefix: []const u8, output_files: []const bun.options.OutputFile) ![]u8 { + pub fn toBytes(allocator: std.mem.Allocator, prefix: []const u8, output_files: []const bun.options.OutputFile, output_format: bun.options.Format) ![]u8 { var serialize_trace = bun.tracy.traceNamed(@src(), "StandaloneModuleGraph.serialize"); defer serialize_trace.end(); + var entry_point_id: ?usize = null; var string_builder = bun.StringBuilder{}; var module_count: usize = 0; - for (output_files, 0..) |output_file, i| { + for (output_files) |output_file| { string_builder.countZ(output_file.dest_path); string_builder.countZ(prefix); if (output_file.value == .buffer) { if (output_file.output_kind == .sourcemap) { - string_builder.cap += bun.zstd.compressBound(output_file.value.buffer.bytes.len); + // This is an over-estimation to ensure that we allocate + // enough memory for the source-map contents. Calculating + // the exact amount is not possible without allocating as it + // involves a JSON parser. + string_builder.cap += output_file.value.buffer.bytes.len * 2; + } else if (output_file.output_kind == .bytecode) { + // Allocate up to 256 byte alignment for bytecode + string_builder.cap += (output_file.value.buffer.bytes.len + 255) / 256 * 256 + 256; } else { if (entry_point_id == null) { if (output_file.output_kind == .@"entry-point") { - entry_point_id = i; + entry_point_id = module_count; } } @@ -236,18 +321,21 @@ pub const StandaloneModuleGraph = struct { string_builder.cap += @sizeOf(CompiledModuleGraphFile) * output_files.len; string_builder.cap += trailer.len; string_builder.cap += 16; - - { - var offsets_ = Offsets{}; - string_builder.cap += std.mem.asBytes(&offsets_).len; - } + string_builder.cap += @sizeOf(Offsets); try string_builder.allocate(allocator); var modules = try std.ArrayList(CompiledModuleGraphFile).initCapacity(allocator, module_count); + var source_map_header_list = std.ArrayList(u8).init(allocator); + defer source_map_header_list.deinit(); + var source_map_string_list = std.ArrayList(u8).init(allocator); + defer source_map_string_list.deinit(); + var source_map_arena = bun.ArenaAllocator.init(allocator); + defer source_map_arena.deinit(); + for (output_files) |output_file| { - if (output_file.output_kind == .sourcemap) { + if (!output_file.output_kind.isFileInStandaloneMode()) { continue; } @@ -257,6 +345,23 @@ pub const StandaloneModuleGraph = struct { const dest_path = bun.strings.removeLeadingDotSlash(output_file.dest_path); + const bytecode: StringPointer = brk: { + if (output_file.bytecode_index != std.math.maxInt(u32)) { + // Use up to 256 byte alignment for bytecode + // Not aligning it correctly will cause a runtime assertion error, or a segfault. + const bytecode = output_files[output_file.bytecode_index].value.buffer.bytes; + const aligned = std.mem.alignInSlice(string_builder.writable(), 128).?; + @memcpy(aligned[0..bytecode.len], bytecode[0..bytecode.len]); + const unaligned_space = aligned[bytecode.len..]; + const offset = @intFromPtr(aligned.ptr) - @intFromPtr(string_builder.ptr.?); + const len = bytecode.len + @min(unaligned_space.len, 128); + string_builder.len += len; + break :brk StringPointer{ .offset = @truncate(offset), .length = @truncate(len) }; + } else { + break :brk .{}; + } + }; + var module = CompiledModuleGraphFile{ .name = string_builder.fmtAppendCountZ("{s}{s}", .{ prefix, @@ -268,14 +373,28 @@ pub const StandaloneModuleGraph = struct { .js, .jsx, .ts, .tsx => .latin1, else => .binary, }, + .module_format = if (output_file.loader.isJavaScriptLike()) switch (output_format) { + .cjs => .cjs, + .esm => .esm, + else => .none, + } else .none, + .bytecode = bytecode, }; + if (output_file.source_map_index != std.math.maxInt(u32)) { - const remaining_slice = string_builder.allocatedSlice()[string_builder.len..]; - const compressed_result = bun.zstd.compress(remaining_slice, output_files[output_file.source_map_index].value.buffer.bytes, 1); - if (compressed_result == .err) { - bun.Output.panic("Unexpected error compressing sourcemap: {s}", .{bun.span(compressed_result.err)}); - } - module.sourcemap = string_builder.add(compressed_result.success); + defer source_map_header_list.clearRetainingCapacity(); + defer source_map_string_list.clearRetainingCapacity(); + _ = source_map_arena.reset(.retain_capacity); + try serializeJsonSourceMapForStandalone( + &source_map_header_list, + &source_map_string_list, + source_map_arena.allocator(), + output_files[output_file.source_map_index].value.buffer.bytes, + ); + module.sourcemap = string_builder.addConcat(&.{ + source_map_header_list.items, + source_map_string_list.items, + }); } modules.appendAssumeCapacity(module); } @@ -293,8 +412,12 @@ pub const StandaloneModuleGraph = struct { if (comptime Environment.isDebug) { // An expensive sanity check: - var graph = try fromBytes(allocator, output_bytes, offsets); - defer graph.files.deinit(); + var graph = try fromBytes(allocator, @alignCast(output_bytes), offsets); + defer { + graph.files.unlockPointers(); + graph.files.deinit(); + } + bun.assert_eql(graph.files.count(), modules.items.len); } @@ -535,8 +658,9 @@ pub const StandaloneModuleGraph = struct { module_prefix: []const u8, outfile: []const u8, env: *bun.DotEnv.Loader, + output_format: bun.options.Format, ) !void { - const bytes = try toBytes(allocator, module_prefix, output_files); + const bytes = try toBytes(allocator, module_prefix, output_files, output_format); if (bytes.len == 0) return; const fd = inject( @@ -824,4 +948,172 @@ pub const StandaloneModuleGraph = struct { else => @compileError("TODO"), } } + + /// Source map serialization in the bundler is specially designed to be + /// loaded in memory as is. Source contents are compressed with ZSTD to + /// reduce the file size, and mappings are stored as uncompressed VLQ. + pub const SerializedSourceMap = struct { + bytes: []const u8, + + /// Following the header bytes: + /// - source_files_count number of StringPointer, file names + /// - source_files_count number of StringPointer, zstd compressed contents + /// - the mapping data, `map_vlq_length` bytes + /// - all the StringPointer contents + pub const Header = extern struct { + source_files_count: u32, + map_bytes_length: u32, + }; + + pub fn header(map: SerializedSourceMap) *align(1) const Header { + return @ptrCast(map.bytes.ptr); + } + + pub fn mappingVLQ(map: SerializedSourceMap) []const u8 { + const head = map.header(); + const start = @sizeOf(Header) + head.source_files_count * @sizeOf(StringPointer) * 2; + return map.bytes[start..][0..head.map_bytes_length]; + } + + pub fn sourceFileNames(map: SerializedSourceMap) []align(1) const StringPointer { + const head = map.header(); + return @as([*]align(1) const StringPointer, @ptrCast(map.bytes[@sizeOf(Header)..]))[0..head.source_files_count]; + } + + fn compressedSourceFiles(map: SerializedSourceMap) []align(1) const StringPointer { + const head = map.header(); + return @as([*]align(1) const StringPointer, @ptrCast(map.bytes[@sizeOf(Header)..]))[head.source_files_count..][0..head.source_files_count]; + } + + /// Once loaded, this map stores additional data for keeping track of source code. + pub const Loaded = struct { + map: SerializedSourceMap, + + /// Only decompress source code once! Once a file is decompressed, + /// it is stored here. Decompression failures are stored as an empty + /// string, which will be treated as "no contents". + decompressed_files: []?[]u8, + + pub fn sourceFileContents(this: Loaded, index: usize) ?[]const u8 { + if (this.decompressed_files[index]) |decompressed| { + return if (decompressed.len == 0) null else decompressed; + } + + const compressed_codes = this.map.compressedSourceFiles(); + const compressed_file = compressed_codes[@intCast(index)].slice(this.map.bytes); + const size = bun.zstd.getDecompressedSize(compressed_file); + + const bytes = bun.default_allocator.alloc(u8, size) catch bun.outOfMemory(); + const result = bun.zstd.decompress(bytes, compressed_file); + + if (result == .err) { + bun.Output.warn("Source map decompression error: {s}", .{result.err}); + bun.default_allocator.free(bytes); + this.decompressed_files[index] = ""; + return null; + } + + const data = bytes[0..result.success]; + this.decompressed_files[index] = data; + return data; + } + }; + }; + + pub fn serializeJsonSourceMapForStandalone( + header_list: *std.ArrayList(u8), + string_payload: *std.ArrayList(u8), + arena: std.mem.Allocator, + json_source: []const u8, + ) !void { + const out = header_list.writer(); + const json_src = bun.logger.Source.initPathString("sourcemap.json", json_source); + var log = bun.logger.Log.init(arena); + defer log.deinit(); + + // the allocator given to the JS parser is not respected for all parts + // of the parse, so we need to remember to reset the ast store + bun.JSAst.Expr.Data.Store.reset(); + bun.JSAst.Stmt.Data.Store.reset(); + defer { + bun.JSAst.Expr.Data.Store.reset(); + bun.JSAst.Stmt.Data.Store.reset(); + } + var json = bun.JSON.parse(&json_src, &log, arena, false) catch + return error.InvalidSourceMap; + + const mappings_str = json.get("mappings") orelse + return error.InvalidSourceMap; + if (mappings_str.data != .e_string) + return error.InvalidSourceMap; + const sources_content = switch ((json.get("sourcesContent") orelse return error.InvalidSourceMap).data) { + .e_array => |arr| arr, + else => return error.InvalidSourceMap, + }; + const sources_paths = switch ((json.get("sources") orelse return error.InvalidSourceMap).data) { + .e_array => |arr| arr, + else => return error.InvalidSourceMap, + }; + if (sources_content.items.len != sources_paths.items.len) { + return error.InvalidSourceMap; + } + + const map_vlq: []const u8 = mappings_str.data.e_string.slice(arena); + + try out.writeInt(u32, sources_paths.items.len, .little); + try out.writeInt(u32, @intCast(map_vlq.len), .little); + + const string_payload_start_location = @sizeOf(u32) + + @sizeOf(u32) + + @sizeOf(bun.StringPointer) * sources_content.items.len * 2 + // path + source + map_vlq.len; + + for (sources_paths.items.slice()) |item| { + if (item.data != .e_string) + return error.InvalidSourceMap; + + const decoded = try item.data.e_string.stringCloned(arena); + + const offset = string_payload.items.len; + try string_payload.appendSlice(decoded); + + const slice = bun.StringPointer{ + .offset = @intCast(offset + string_payload_start_location), + .length = @intCast(string_payload.items.len - offset), + }; + try out.writeInt(u32, slice.offset, .little); + try out.writeInt(u32, slice.length, .little); + } + + for (sources_content.items.slice()) |item| { + if (item.data != .e_string) + return error.InvalidSourceMap; + + const utf8 = try item.data.e_string.stringCloned(arena); + defer arena.free(utf8); + + const offset = string_payload.items.len; + + const bound = bun.zstd.compressBound(utf8.len); + try string_payload.ensureUnusedCapacity(bound); + + const unused = string_payload.unusedCapacitySlice(); + const compressed_result = bun.zstd.compress(unused, utf8, 1); + if (compressed_result == .err) { + bun.Output.panic("Unexpected error compressing sourcemap: {s}", .{bun.span(compressed_result.err)}); + } + string_payload.items.len += compressed_result.success; + + const slice = bun.StringPointer{ + .offset = @intCast(offset + string_payload_start_location), + .length = @intCast(string_payload.items.len - offset), + }; + try out.writeInt(u32, slice.offset, .little); + try out.writeInt(u32, slice.length, .little); + } + + try out.writeAll(map_vlq); + + bun.assert(header_list.items.len == string_payload_start_location); + } }; diff --git a/src/allocators.zig b/src/allocators.zig index 31bc747fa6..5b3fd6cc82 100644 --- a/src/allocators.zig +++ b/src/allocators.zig @@ -177,6 +177,11 @@ pub fn OverflowList(comptime ValueType: type, comptime count: comptime_int) type }; } +/// "Formerly-BSSList" +/// It's not actually BSS anymore. +/// +/// We do keep a pointer to it globally, but because the data is not zero-initialized, it ends up taking space in the object file. +/// We don't want to spend 1-2 MB on these structs. pub fn BSSList(comptime ValueType: type, comptime _count: anytype) type { const count = _count * 2; const max_index = count - 1; @@ -199,13 +204,13 @@ pub fn BSSList(comptime ValueType: type, comptime _count: anytype) type { const Self = @This(); allocator: Allocator, - mutex: Mutex = Mutex.init(), + mutex: Mutex = .{}, head: *OverflowBlock = undefined, tail: OverflowBlock = OverflowBlock{}, backing_buf: [count]ValueType = undefined, used: u32 = 0, - pub var instance: Self = undefined; + pub var instance: *Self = undefined; pub var loaded = false; pub inline fn blockIndex(index: u31) usize { @@ -214,7 +219,8 @@ pub fn BSSList(comptime ValueType: type, comptime _count: anytype) type { pub fn init(allocator: std.mem.Allocator) *Self { if (!loaded) { - instance = Self{ + instance = bun.default_allocator.create(Self) catch bun.outOfMemory(); + instance.* = Self{ .allocator = allocator, .tail = OverflowBlock{}, }; @@ -222,7 +228,7 @@ pub fn BSSList(comptime ValueType: type, comptime _count: anytype) type { loaded = true; } - return &instance; + return instance; } pub fn isOverflowing() bool { @@ -288,8 +294,8 @@ pub fn BSSStringList(comptime _count: usize, comptime _item_length: usize) type allocator: Allocator, slice_buf: [count][]const u8 = undefined, slice_buf_used: u16 = 0, - mutex: Mutex = Mutex.init(), - pub var instance: Self = undefined; + mutex: Mutex = .{}, + pub var instance: *Self = undefined; var loaded: bool = false; // only need the mutex on append @@ -299,14 +305,15 @@ pub fn BSSStringList(comptime _count: usize, comptime _item_length: usize) type pub fn init(allocator: std.mem.Allocator) *Self { if (!loaded) { - instance = Self{ + instance = bun.default_allocator.create(Self) catch bun.outOfMemory(); + instance.* = Self{ .allocator = allocator, .backing_buf_used = 0, }; loaded = true; } - return &instance; + return instance; } pub inline fn isOverflowing() bool { @@ -465,24 +472,25 @@ pub fn BSSMap(comptime ValueType: type, comptime count: anytype, comptime store_ index: IndexMap, overflow_list: Overflow = Overflow{}, allocator: Allocator, - mutex: Mutex = Mutex.init(), + mutex: Mutex = .{}, backing_buf: [count]ValueType = undefined, backing_buf_used: u16 = 0, - pub var instance: Self = undefined; + pub var instance: *Self = undefined; var loaded: bool = false; pub fn init(allocator: std.mem.Allocator) *Self { if (!loaded) { - instance = Self{ + instance = bun.default_allocator.create(Self) catch bun.outOfMemory(); + instance.* = Self{ .index = IndexMap{}, .allocator = allocator, }; loaded = true; } - return &instance; + return instance; } pub fn isOverflowing() bool { @@ -621,18 +629,19 @@ pub fn BSSMap(comptime ValueType: type, comptime count: anytype, comptime store_ key_list_overflow: OverflowList([]u8, count / 4) = OverflowList([]u8, count / 4){}, const Self = @This(); - pub var instance: Self = undefined; + pub var instance: *Self = undefined; pub var instance_loaded = false; pub fn init(allocator: std.mem.Allocator) *Self { if (!instance_loaded) { - instance = Self{ + instance = bun.default_allocator.create(Self) catch bun.outOfMemory(); + instance.* = Self{ .map = BSSMapType.init(allocator), }; instance_loaded = true; } - return &instance; + return instance; } pub fn isOverflowing() bool { @@ -676,7 +685,7 @@ pub fn BSSMap(comptime ValueType: type, comptime count: anytype, comptime store_ } // There's two parts to this. - // 1. Storing the underyling string. + // 1. Storing the underlying string. // 2. Making the key accessible at the index. pub fn putKey(self: *Self, key: anytype, result: *Result) !void { self.map.mutex.lock(); diff --git a/src/analytics/analytics_thread.zig b/src/analytics/analytics_thread.zig index 7be985fbf9..18094bcba1 100644 --- a/src/analytics/analytics_thread.zig +++ b/src/analytics/analytics_thread.zig @@ -79,40 +79,53 @@ pub fn isCI() bool { /// This answers, "What parts of bun are people actually using?" pub const Features = struct { - /// Set right before JSC::initialize is called - pub var jsc: usize = 0; + pub var builtin_modules = std.enums.EnumSet(bun.JSC.HardcodedModule).initEmpty(); + pub var @"Bun.stderr": usize = 0; pub var @"Bun.stdin": usize = 0; pub var @"Bun.stdout": usize = 0; + pub var WebSocket: usize = 0; pub var abort_signal: usize = 0; + pub var binlinks: usize = 0; pub var bunfig: usize = 0; pub var define: usize = 0; pub var dotenv: usize = 0; pub var external: usize = 0; pub var extracted_packages: usize = 0; - /// Incremented for each call to `fetch` pub var fetch: usize = 0; - pub var filesystem_router: usize = 0; pub var git_dependencies: usize = 0; pub var html_rewriter: usize = 0; pub var http_server: usize = 0; pub var https_server: usize = 0; + /// Set right before JSC::initialize is called + pub var jsc: usize = 0; + /// Set when kit.DevServer is initialized + pub var kit_dev: usize = 0; pub var lifecycle_scripts: usize = 0; pub var loaders: usize = 0; pub var lockfile_migration_from_package_lock: usize = 0; pub var macros: usize = 0; + pub var no_avx2: usize = 0; + pub var no_avx: usize = 0; pub var shell: usize = 0; pub var spawn: usize = 0; + pub var standalone_executable: usize = 0; pub var standalone_shell: usize = 0; + /// Set when invoking a todo panic + pub var todo_panic: usize = 0; pub var transpiler_cache: usize = 0; - pub var tsconfig_paths: usize = 0; pub var tsconfig: usize = 0; + pub var tsconfig_paths: usize = 0; pub var virtual_modules: usize = 0; - pub var WebSocket: usize = 0; - pub var no_avx: usize = 0; - pub var no_avx2: usize = 0; - pub var binlinks: usize = 0; - pub var builtin_modules = std.enums.EnumSet(bun.JSC.HardcodedModule).initEmpty(); + pub var workers_spawned: usize = 0; + pub var workers_terminated: usize = 0; + pub var napi_module_register: usize = 0; + pub var process_dlopen: usize = 0; + + comptime { + @export(napi_module_register, .{ .name = "Bun__napi_module_register_count" }); + @export(process_dlopen, .{ .name = "Bun__process_dlopen_count" }); + } pub fn formatter() Formatter { return Formatter{}; @@ -330,6 +343,25 @@ pub const GenerateHeader = struct { return linux_kernel_version; } + export fn Bun__isEpollPwait2SupportedOnLinuxKernel() i32 { + if (comptime !Environment.isLinux) { + return 0; + } + + // https://man.archlinux.org/man/epoll_pwait2.2.en#HISTORY + const min_epoll_pwait2 = Semver.Version{ + .major = 5, + .minor = 11, + .patch = 0, + }; + + return switch (kernelVersion().order(min_epoll_pwait2, "", "")) { + .gt => 1, + .eq => 1, + .lt => 0, + }; + } + fn forLinux() Analytics.Platform { linux_os_name = std.mem.zeroes(@TypeOf(linux_os_name)); diff --git a/src/api/schema.js b/src/api/schema.js index 8451955b9d..908a044ab1 100644 --- a/src/api/schema.js +++ b/src/api/schema.js @@ -3434,157 +3434,159 @@ function encodeGetTestsResponse(message, bb) { } } -export { Loader }; -export { LoaderKeys }; -export { FrameworkEntryPointType }; -export { FrameworkEntryPointTypeKeys }; -export { StackFrameScope }; -export { StackFrameScopeKeys }; -export { decodeStackFrame }; -export { encodeStackFrame }; -export { decodeStackFramePosition }; -export { encodeStackFramePosition }; -export { decodeSourceLine }; -export { encodeSourceLine }; -export { decodeStackTrace }; -export { encodeStackTrace }; -export { decodeJSException }; -export { encodeJSException }; -export { FallbackStep }; -export { FallbackStepKeys }; -export { decodeProblems }; -export { encodeProblems }; -export { decodeRouter }; -export { encodeRouter }; -export { decodeFallbackMessageContainer }; -export { encodeFallbackMessageContainer }; -export { ResolveMode }; -export { ResolveModeKeys }; -export { Target }; -export { TargetKeys }; -export { CSSInJSBehavior }; -export { CSSInJSBehaviorKeys }; -export { JSXRuntime }; -export { JSXRuntimeKeys }; -export { decodeJSX }; -export { encodeJSX }; -export { decodeStringPointer }; -export { encodeStringPointer }; -export { decodeJavascriptBundledModule }; -export { encodeJavascriptBundledModule }; -export { decodeJavascriptBundledPackage }; -export { encodeJavascriptBundledPackage }; -export { decodeJavascriptBundle }; -export { encodeJavascriptBundle }; -export { decodeJavascriptBundleContainer }; -export { encodeJavascriptBundleContainer }; -export { ScanDependencyMode }; -export { ScanDependencyModeKeys }; -export { ModuleImportType }; -export { ModuleImportTypeKeys }; -export { decodeModuleImportRecord }; -export { encodeModuleImportRecord }; -export { decodeModule }; -export { encodeModule }; -export { decodeStringMap }; -export { encodeStringMap }; -export { decodeLoaderMap }; -export { encodeLoaderMap }; -export { DotEnvBehavior }; -export { DotEnvBehaviorKeys }; -export { decodeEnvConfig }; -export { encodeEnvConfig }; -export { decodeLoadedEnvConfig }; -export { encodeLoadedEnvConfig }; -export { decodeFrameworkConfig }; -export { encodeFrameworkConfig }; -export { decodeFrameworkEntryPoint }; -export { encodeFrameworkEntryPoint }; -export { decodeFrameworkEntryPointMap }; -export { encodeFrameworkEntryPointMap }; -export { decodeFrameworkEntryPointMessage }; -export { encodeFrameworkEntryPointMessage }; -export { decodeLoadedFramework }; -export { encodeLoadedFramework }; -export { decodeLoadedRouteConfig }; -export { encodeLoadedRouteConfig }; -export { decodeRouteConfig }; -export { encodeRouteConfig }; -export { decodeTransformOptions }; -export { encodeTransformOptions }; -export { SourceMapMode }; -export { SourceMapModeKeys }; -export { decodeFileHandle }; -export { encodeFileHandle }; -export { decodeTransform }; -export { encodeTransform }; -export { decodeScan }; -export { encodeScan }; -export { decodeScanResult }; -export { encodeScanResult }; -export { decodeScannedImport }; -export { encodeScannedImport }; -export { ImportKind }; -export { ImportKindKeys }; -export { TransformResponseStatus }; -export { TransformResponseStatusKeys }; -export { decodeOutputFile }; -export { encodeOutputFile }; -export { decodeTransformResponse }; -export { encodeTransformResponse }; -export { MessageLevel }; -export { MessageLevelKeys }; -export { decodeLocation }; -export { encodeLocation }; -export { decodeMessageData }; -export { encodeMessageData }; -export { decodeMessageMeta }; -export { encodeMessageMeta }; -export { decodeMessage }; -export { encodeMessage }; -export { decodeLog }; -export { encodeLog }; -export { Reloader }; -export { ReloaderKeys }; -export { WebsocketMessageKind }; -export { WebsocketMessageKindKeys }; -export { WebsocketCommandKind }; -export { WebsocketCommandKindKeys }; -export { decodeWebsocketMessage }; -export { encodeWebsocketMessage }; -export { decodeWebsocketMessageWelcome }; -export { encodeWebsocketMessageWelcome }; -export { decodeWebsocketMessageFileChangeNotification }; -export { encodeWebsocketMessageFileChangeNotification }; -export { decodeWebsocketCommand }; -export { encodeWebsocketCommand }; -export { decodeWebsocketCommandBuild }; -export { encodeWebsocketCommandBuild }; -export { decodeWebsocketCommandManifest }; -export { encodeWebsocketCommandManifest }; -export { decodeWebsocketMessageBuildSuccess }; -export { encodeWebsocketMessageBuildSuccess }; -export { decodeWebsocketMessageBuildFailure }; -export { encodeWebsocketMessageBuildFailure }; -export { decodeWebsocketCommandBuildWithFilePath }; -export { encodeWebsocketCommandBuildWithFilePath }; -export { decodeWebsocketMessageResolveID }; -export { encodeWebsocketMessageResolveID }; -export { decodeNPMRegistry }; -export { encodeNPMRegistry }; -export { decodeNPMRegistryMap }; -export { encodeNPMRegistryMap }; -export { decodeBunInstall }; -export { encodeBunInstall }; -export { decodeClientServerModule }; -export { encodeClientServerModule }; -export { decodeClientServerModuleManifest }; -export { encodeClientServerModuleManifest }; -export { decodeGetTestsRequest }; -export { encodeGetTestsRequest }; -export { TestKind }; -export { TestKindKeys }; -export { decodeTestResponseItem }; -export { encodeTestResponseItem }; -export { decodeGetTestsResponse }; -export { encodeGetTestsResponse }; +export { + CSSInJSBehavior, + CSSInJSBehaviorKeys, + DotEnvBehavior, + DotEnvBehaviorKeys, + FallbackStep, + FallbackStepKeys, + FrameworkEntryPointType, + FrameworkEntryPointTypeKeys, + ImportKind, + ImportKindKeys, + JSXRuntime, + JSXRuntimeKeys, + Loader, + LoaderKeys, + MessageLevel, + MessageLevelKeys, + ModuleImportType, + ModuleImportTypeKeys, + Reloader, + ReloaderKeys, + ResolveMode, + ResolveModeKeys, + ScanDependencyMode, + ScanDependencyModeKeys, + SourceMapMode, + SourceMapModeKeys, + StackFrameScope, + StackFrameScopeKeys, + Target, + TargetKeys, + TestKind, + TestKindKeys, + TransformResponseStatus, + TransformResponseStatusKeys, + WebsocketCommandKind, + WebsocketCommandKindKeys, + WebsocketMessageKind, + WebsocketMessageKindKeys, + decodeBunInstall, + decodeClientServerModule, + decodeClientServerModuleManifest, + decodeEnvConfig, + decodeFallbackMessageContainer, + decodeFileHandle, + decodeFrameworkConfig, + decodeFrameworkEntryPoint, + decodeFrameworkEntryPointMap, + decodeFrameworkEntryPointMessage, + decodeGetTestsRequest, + decodeGetTestsResponse, + decodeJSException, + decodeJSX, + decodeJavascriptBundle, + decodeJavascriptBundleContainer, + decodeJavascriptBundledModule, + decodeJavascriptBundledPackage, + decodeLoadedEnvConfig, + decodeLoadedFramework, + decodeLoadedRouteConfig, + decodeLoaderMap, + decodeLocation, + decodeLog, + decodeMessage, + decodeMessageData, + decodeMessageMeta, + decodeModule, + decodeModuleImportRecord, + decodeNPMRegistry, + decodeNPMRegistryMap, + decodeOutputFile, + decodeProblems, + decodeRouteConfig, + decodeRouter, + decodeScan, + decodeScanResult, + decodeScannedImport, + decodeSourceLine, + decodeStackFrame, + decodeStackFramePosition, + decodeStackTrace, + decodeStringMap, + decodeStringPointer, + decodeTestResponseItem, + decodeTransform, + decodeTransformOptions, + decodeTransformResponse, + decodeWebsocketCommand, + decodeWebsocketCommandBuild, + decodeWebsocketCommandBuildWithFilePath, + decodeWebsocketCommandManifest, + decodeWebsocketMessage, + decodeWebsocketMessageBuildFailure, + decodeWebsocketMessageBuildSuccess, + decodeWebsocketMessageFileChangeNotification, + decodeWebsocketMessageResolveID, + decodeWebsocketMessageWelcome, + encodeBunInstall, + encodeClientServerModule, + encodeClientServerModuleManifest, + encodeEnvConfig, + encodeFallbackMessageContainer, + encodeFileHandle, + encodeFrameworkConfig, + encodeFrameworkEntryPoint, + encodeFrameworkEntryPointMap, + encodeFrameworkEntryPointMessage, + encodeGetTestsRequest, + encodeGetTestsResponse, + encodeJSException, + encodeJSX, + encodeJavascriptBundle, + encodeJavascriptBundleContainer, + encodeJavascriptBundledModule, + encodeJavascriptBundledPackage, + encodeLoadedEnvConfig, + encodeLoadedFramework, + encodeLoadedRouteConfig, + encodeLoaderMap, + encodeLocation, + encodeLog, + encodeMessage, + encodeMessageData, + encodeMessageMeta, + encodeModule, + encodeModuleImportRecord, + encodeNPMRegistry, + encodeNPMRegistryMap, + encodeOutputFile, + encodeProblems, + encodeRouteConfig, + encodeRouter, + encodeScan, + encodeScanResult, + encodeScannedImport, + encodeSourceLine, + encodeStackFrame, + encodeStackFramePosition, + encodeStackTrace, + encodeStringMap, + encodeStringPointer, + encodeTestResponseItem, + encodeTransform, + encodeTransformOptions, + encodeTransformResponse, + encodeWebsocketCommand, + encodeWebsocketCommandBuild, + encodeWebsocketCommandBuildWithFilePath, + encodeWebsocketCommandManifest, + encodeWebsocketMessage, + encodeWebsocketMessageBuildFailure, + encodeWebsocketMessageBuildSuccess, + encodeWebsocketMessageFileChangeNotification, + encodeWebsocketMessageResolveID, + encodeWebsocketMessageWelcome, +}; diff --git a/src/api/schema.zig b/src/api/schema.zig index 1207251c48..002f43223f 100644 --- a/src/api/schema.zig +++ b/src/api/schema.zig @@ -1,6 +1,7 @@ const std = @import("std"); const bun = @import("root").bun; const js_ast = bun.JSAst; +const OOM = bun.OOM; pub const Reader = struct { const Self = @This(); @@ -824,13 +825,20 @@ pub const Api = struct { } }; - pub const StringPointer = packed struct { + /// Represents a slice stored within an externally stored buffer. Safe to serialize. + /// Must be an extern struct to match with `headers-handwritten.h`. + pub const StringPointer = extern struct { /// offset offset: u32 = 0, /// length length: u32 = 0, + comptime { + bun.assert(@alignOf(StringPointer) == @alignOf(u32)); + bun.assert(@sizeOf(StringPointer) == @sizeOf(u64)); + } + pub fn decode(reader: anytype) anyerror!StringPointer { var this = std.mem.zeroes(StringPointer); @@ -844,7 +852,7 @@ pub const Api = struct { try writer.writeInt(this.length); } - pub fn slice(this: *const @This(), bytes: []const u8) []const u8 { + pub fn slice(this: @This(), bytes: []const u8) []const u8 { return bytes[this.offset .. this.offset + this.length]; } }; @@ -1628,6 +1636,8 @@ pub const Api = struct { /// define define: ?StringMap = null, + drop: []const []const u8 = &.{}, + /// preserve_symlinks preserve_symlinks: ?bool = null, @@ -1664,12 +1674,6 @@ pub const Api = struct { /// extension_order extension_order: []const []const u8, - /// framework - framework: ?FrameworkConfig = null, - - /// router - router: ?RouteConfig = null, - /// no_summary no_summary: ?bool = null, @@ -1748,9 +1752,7 @@ pub const Api = struct { 15 => { this.target = try reader.readValue(Target); }, - 16 => { - this.serve = try reader.readValue(bool); - }, + 16 => {}, 17 => { this.env_files = try reader.readArray([]const u8); }, @@ -2814,7 +2816,7 @@ pub const Api = struct { fn expectString(this: *Parser, expr: js_ast.Expr) !void { switch (expr.data) { - .e_string, .e_utf8_string => {}, + .e_string => {}, else => { this.log.addErrorFmt(this.source, expr.loc, this.allocator, "expected string but received {}", .{ @as(js_ast.Expr.Tag, expr.data), @@ -2824,11 +2826,11 @@ pub const Api = struct { } } - pub fn parseRegistryURLString(this: *Parser, str: *js_ast.E.String) !Api.NpmRegistry { + pub fn parseRegistryURLString(this: *Parser, str: *js_ast.E.String) OOM!Api.NpmRegistry { return try this.parseRegistryURLStringImpl(str.data); } - pub fn parseRegistryURLStringImpl(this: *Parser, str: []const u8) !Api.NpmRegistry { + pub fn parseRegistryURLStringImpl(this: *Parser, str: []const u8) OOM!Api.NpmRegistry { const url = bun.URL.parse(str); var registry = std.mem.zeroes(Api.NpmRegistry); @@ -2975,6 +2977,13 @@ pub const Api = struct { /// concurrent_scripts concurrent_scripts: ?u32 = null, + cafile: ?[]const u8 = null, + + ca: ?union(enum) { + str: []const u8, + list: []const []const u8, + } = null, + pub fn decode(reader: anytype) anyerror!BunInstall { var this = std.mem.zeroes(BunInstall); diff --git a/src/ast/base.zig b/src/ast/base.zig index 20eb0183bb..ffd6240ad3 100644 --- a/src/ast/base.zig +++ b/src/ast/base.zig @@ -31,7 +31,6 @@ pub const RefCtx = struct { /// In some parts of Bun, we have many different IDs pointing to different things. /// It's easy for them to get mixed up, so we use this type to make sure we don't. -/// pub const Index = packed struct(u32) { value: Int, @@ -48,6 +47,9 @@ pub const Index = packed struct(u32) { pub const invalid = Index{ .value = std.math.maxInt(Int) }; pub const runtime = Index{ .value = 0 }; + pub const bake_server_data = Index{ .value = 1 }; + pub const bake_client_data = Index{ .value = 2 }; + pub const Int = u32; pub inline fn source(num: anytype) Index { @@ -111,7 +113,7 @@ pub const Ref = packed struct(u64) { allocated_name, source_contents_slice, symbol, - } = .invalid, + }, source_index: Int = 0, @@ -152,18 +154,11 @@ pub const Ref = packed struct(u64) { pub fn dump(ref: Ref, symbol_table: anytype) std.fmt.Formatter(dumpImpl) { return .{ .data = .{ .ref = ref, - .symbol_table = switch (@TypeOf(symbol_table)) { - *const std.ArrayList(js_ast.Symbol) => symbol_table.items, - *std.ArrayList(js_ast.Symbol) => symbol_table.items, - []const js_ast.Symbol => symbol_table, - []js_ast.Symbol => symbol_table, - else => |T| @compileError("Unsupported type to Ref.dump: " ++ @typeName(T)), - }, + .symbol = ref.getSymbol(symbol_table), } }; } - fn dumpImpl(data: struct { ref: Ref, symbol_table: []const js_ast.Symbol }, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { - const symbol = data.symbol_table[data.ref.inner_index]; + fn dumpImpl(data: struct { ref: Ref, symbol: *js_ast.Symbol }, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { try std.fmt.format( writer, "Ref[inner={d}, src={d}, .{s}; original_name={s}, uses={d}]", @@ -171,8 +166,8 @@ pub const Ref = packed struct(u64) { data.ref.inner_index, data.ref.source_index, @tagName(data.ref.tag), - symbol.original_name, - symbol.use_count_estimate, + data.symbol.original_name, + data.symbol.use_count_estimate, }, ); } @@ -227,4 +222,19 @@ pub const Ref = packed struct(u64) { pub fn jsonStringify(self: *const Ref, writer: anytype) !void { return try writer.write([2]u32{ self.sourceIndex(), self.innerIndex() }); } + + pub fn getSymbol(ref: Ref, symbol_table: anytype) *js_ast.Symbol { + // Different parts of the bundler use different formats of the symbol table + // In the parser you only have one array, and .sourceIndex() is ignored. + // In the bundler, you have a 2D array where both parts of the ref are used. + const resolved_symbol_table = switch (@TypeOf(symbol_table)) { + *const std.ArrayList(js_ast.Symbol) => symbol_table.items, + *std.ArrayList(js_ast.Symbol) => symbol_table.items, + []js_ast.Symbol => symbol_table, + *js_ast.Symbol.Map => return symbol_table.get(ref) orelse + unreachable, // ref must exist within symbol table + else => |T| @compileError("Unsupported type to Ref.getSymbol: " ++ @typeName(T)), + }; + return &resolved_symbol_table[ref.innerIndex()]; + } }; diff --git a/src/async/posix_event_loop.zig b/src/async/posix_event_loop.zig index 491e09ce7b..bb4c4286f1 100644 --- a/src/async/posix_event_loop.zig +++ b/src/async/posix_event_loop.zig @@ -55,11 +55,11 @@ pub const KeepAlive = struct { this.status = .inactive; if (comptime @TypeOf(event_loop_ctx_) == JSC.EventLoopHandle) { - event_loop_ctx_.loop().subActive(1); + event_loop_ctx_.loop().unref(); return; } const event_loop_ctx = JSC.AbstractVM(event_loop_ctx_); - event_loop_ctx.platformEventLoop().subActive(1); + event_loop_ctx.platformEventLoop().unref(); } /// From another thread, Prevent a poll from keeping the process alive. diff --git a/src/baby_list.zig b/src/baby_list.zig index d304f75496..adf41b1620 100644 --- a/src/baby_list.zig +++ b/src/baby_list.zig @@ -13,7 +13,28 @@ pub fn BabyList(comptime Type: type) type { cap: u32 = 0, pub const Elem = Type; + pub fn parse(input: *bun.css.Parser) bun.css.Result(ListType) { + return switch (input.parseCommaSeparated(Type, bun.css.generic.parseFor(Type))) { + .result => |v| return .{ .result = ListType{ + .ptr = v.items.ptr, + .len = @intCast(v.items.len), + .cap = @intCast(v.capacity), + } }, + .err => |e| return .{ .err = e }, + }; + } + pub fn toCss(this: *const ListType, comptime W: type, dest: *bun.css.Printer(W)) bun.css.PrintErr!void { + return bun.css.to_css.fromBabyList(Type, this, W, dest); + } + + pub fn eql(lhs: *const ListType, rhs: *const ListType) bool { + if (lhs.len != rhs.len) return false; + for (lhs.sliceConst(), rhs.sliceConst()) |*a, *b| { + if (!bun.css.generic.eql(Type, a, b)) return false; + } + return true; + } pub fn set(this: *@This(), slice_: []Type) void { this.ptr = slice_.ptr; this.len = @as(u32, @truncate(slice_.len)); @@ -29,6 +50,12 @@ pub fn BabyList(comptime Type: type) type { this.* = .{}; } + pub fn shrinkAndFree(this: *@This(), allocator: std.mem.Allocator, size: usize) void { + var list_ = this.listManaged(allocator); + list_.shrinkAndFree(size); + this.update(list_); + } + pub fn orderedRemove(this: *@This(), index: usize) Type { var l = this.list(); defer this.update(l); @@ -41,11 +68,17 @@ pub fn BabyList(comptime Type: type) type { return l.swapRemove(index); } + pub fn sortAsc( + this: *@This(), + ) void { + bun.strings.sortAsc(this.slice()); + } + pub fn contains(this: @This(), item: []const Type) bool { return this.len > 0 and @intFromPtr(item.ptr) >= @intFromPtr(this.ptr) and @intFromPtr(item.ptr) < @intFromPtr(this.ptr) + this.len; } - pub inline fn initConst(items: []const Type) ListType { + pub fn initConst(items: []const Type) callconv(bun.callconv_inline) ListType { @setRuntimeSafety(false); return ListType{ // Remove the const qualifier from the items @@ -77,8 +110,17 @@ pub fn BabyList(comptime Type: type) type { }; } + fn assertValidDeepClone(comptime T: type) void { + return switch (T) { + bun.JSAst.Expr, bun.JSAst.G.Property, bun.css.ImportConditions => {}, + else => { + @compileError("Unsupported type for BabyList.deepClone(): " ++ @typeName(Type)); + }, + }; + } + pub fn deepClone(this: @This(), allocator: std.mem.Allocator) !@This() { - if (comptime Type != bun.JSAst.Expr and Type != bun.JSAst.G.Property) @compileError("Unsupported type for BabyList.deepClone()"); + assertValidDeepClone(Type); var list_ = try initCapacity(allocator, this.len); for (this.slice()) |item| { list_.appendAssumeCapacity(try item.deepClone(allocator)); @@ -87,6 +129,17 @@ pub fn BabyList(comptime Type: type) type { return list_; } + /// Same as `deepClone` but doesn't return an error + pub fn deepClone2(this: @This(), allocator: std.mem.Allocator) @This() { + assertValidDeepClone(Type); + var list_ = initCapacity(allocator, this.len) catch bun.outOfMemory(); + for (this.slice()) |item| { + list_.appendAssumeCapacity(item.deepClone(allocator)); + } + + return list_; + } + pub fn clearRetainingCapacity(this: *@This()) void { this.len = 0; } @@ -204,24 +257,24 @@ pub fn BabyList(comptime Type: type) type { }; } - pub inline fn first(this: ListType) ?*Type { + pub fn first(this: ListType) callconv(bun.callconv_inline) ?*Type { return if (this.len > 0) this.ptr[0] else @as(?*Type, null); } - pub inline fn last(this: ListType) ?*Type { + pub fn last(this: ListType) callconv(bun.callconv_inline) ?*Type { return if (this.len > 0) &this.ptr[this.len - 1] else @as(?*Type, null); } - pub inline fn first_(this: ListType) Type { + pub fn first_(this: ListType) callconv(bun.callconv_inline) Type { return this.ptr[0]; } - pub inline fn at(this: ListType, index: usize) *const Type { + pub fn at(this: ListType, index: usize) callconv(bun.callconv_inline) *const Type { bun.assert(index < this.len); return &this.ptr[index]; } - pub inline fn mut(this: ListType, index: usize) *Type { + pub fn mut(this: ListType, index: usize) callconv(bun.callconv_inline) *Type { bun.assert(index < this.len); return &this.ptr[index]; } @@ -236,7 +289,7 @@ pub fn BabyList(comptime Type: type) type { }; } - pub inline fn @"[0]"(this: ListType) Type { + pub fn @"[0]"(this: ListType) callconv(bun.callconv_inline) Type { return this.ptr[0]; } const OOM = error{OutOfMemory}; @@ -259,7 +312,12 @@ pub fn BabyList(comptime Type: type) type { this.update(list__); } - pub inline fn slice(this: ListType) []Type { + pub fn slice(this: ListType) callconv(bun.callconv_inline) []Type { + @setRuntimeSafety(false); + return this.ptr[0..this.len]; + } + + pub fn sliceConst(this: *const ListType) callconv(bun.callconv_inline) []const Type { @setRuntimeSafety(false); return this.ptr[0..this.len]; } @@ -273,6 +331,7 @@ pub fn BabyList(comptime Type: type) type { this.update(list_); return this.len - initial; } + pub fn writeLatin1(this: *@This(), allocator: std.mem.Allocator, str: []const u8) !u32 { if (comptime Type != u8) @compileError("Unsupported for type " ++ @typeName(Type)); @@ -282,6 +341,7 @@ pub fn BabyList(comptime Type: type) type { this.update(new); return this.len - initial; } + pub fn writeUTF16(this: *@This(), allocator: std.mem.Allocator, str: []const u16) !u32 { if (comptime Type != u8) @compileError("Unsupported for type " ++ @typeName(Type)); diff --git a/src/bake/BakeGlobalObject.cpp b/src/bake/BakeGlobalObject.cpp new file mode 100644 index 0000000000..44ee0e1854 --- /dev/null +++ b/src/bake/BakeGlobalObject.cpp @@ -0,0 +1,158 @@ +#include "BakeGlobalObject.h" +#include "JSNextTickQueue.h" +#include "JavaScriptCore/GlobalObjectMethodTable.h" +#include "JavaScriptCore/JSInternalPromise.h" +#include "headers-handwritten.h" +#include "JavaScriptCore/JSModuleLoader.h" +#include "JavaScriptCore/Completion.h" + +extern "C" BunString BakeProdResolve(JSC::JSGlobalObject*, BunString a, BunString b); + +namespace Bake { + +JSC::JSInternalPromise* +bakeModuleLoaderImportModule(JSC::JSGlobalObject* global, + JSC::JSModuleLoader* moduleLoader, JSC::JSString* moduleNameValue, + JSC::JSValue parameters, + const JSC::SourceOrigin& sourceOrigin) +{ + WTF::String keyString = moduleNameValue->getString(global); + if (keyString.startsWith("bake:/"_s)) { + JSC::VM& vm = global->vm(); + return JSC::importModule(global, JSC::Identifier::fromString(vm, keyString), + JSC::jsUndefined(), parameters, JSC::jsUndefined()); + } + + if (!sourceOrigin.isNull() && sourceOrigin.string().startsWith("bake:/"_s)) { + JSC::VM& vm = global->vm(); + auto scope = DECLARE_THROW_SCOPE(vm); + + WTF::String refererString = sourceOrigin.string(); + WTF::String keyString = moduleNameValue->getString(global); + + if (!keyString) { + auto promise = JSC::JSInternalPromise::create(vm, global->internalPromiseStructure()); + promise->reject(global, JSC::createError(global, "import() requires a string"_s)); + return promise; + } + + BunString result = BakeProdResolve(global, Bun::toString(refererString), Bun::toString(keyString)); + RETURN_IF_EXCEPTION(scope, nullptr); + + return JSC::importModule(global, JSC::Identifier::fromString(vm, result.toWTFString()), + JSC::jsUndefined(), parameters, JSC::jsUndefined()); + } + + // Use Zig::GlobalObject's function + return jsCast(global)->moduleLoaderImportModule(global, moduleLoader, moduleNameValue, parameters, sourceOrigin); +} + +JSC::Identifier bakeModuleLoaderResolve(JSC::JSGlobalObject* jsGlobal, + JSC::JSModuleLoader* loader, JSC::JSValue key, + JSC::JSValue referrer, JSC::JSValue origin) +{ + Bake::GlobalObject* global = jsCast(jsGlobal); + JSC::VM& vm = global->vm(); + auto scope = DECLARE_THROW_SCOPE(vm); + + ASSERT(referrer.isString()); + WTF::String refererString = jsCast(referrer)->getString(global); + + WTF::String keyString = key.toWTFString(global); + RETURN_IF_EXCEPTION(scope, vm.propertyNames->emptyIdentifier); + + if (refererString.startsWith("bake:/"_s) || (refererString == "."_s && keyString.startsWith("bake:/"_s))) { + BunString result = BakeProdResolve(global, Bun::toString(referrer.getString(global)), Bun::toString(keyString)); + RETURN_IF_EXCEPTION(scope, vm.propertyNames->emptyIdentifier); + + return JSC::Identifier::fromString(vm, result.toWTFString(BunString::ZeroCopy)); + } + + // Use Zig::GlobalObject's function + return Zig::GlobalObject::moduleLoaderResolve(jsGlobal, loader, key, referrer, origin); +} + +#define INHERIT_HOOK_METHOD(name) \ + Zig::GlobalObject::s_globalObjectMethodTable.name + +const JSC::GlobalObjectMethodTable GlobalObject::s_globalObjectMethodTable = { + INHERIT_HOOK_METHOD(supportsRichSourceInfo), + INHERIT_HOOK_METHOD(shouldInterruptScript), + INHERIT_HOOK_METHOD(javaScriptRuntimeFlags), + INHERIT_HOOK_METHOD(queueMicrotaskToEventLoop), + INHERIT_HOOK_METHOD(shouldInterruptScriptBeforeTimeout), + bakeModuleLoaderImportModule, + bakeModuleLoaderResolve, + INHERIT_HOOK_METHOD(moduleLoaderFetch), + INHERIT_HOOK_METHOD(moduleLoaderCreateImportMetaProperties), + INHERIT_HOOK_METHOD(moduleLoaderEvaluate), + INHERIT_HOOK_METHOD(promiseRejectionTracker), + INHERIT_HOOK_METHOD(reportUncaughtExceptionAtEventLoop), + INHERIT_HOOK_METHOD(currentScriptExecutionOwner), + INHERIT_HOOK_METHOD(scriptExecutionStatus), + INHERIT_HOOK_METHOD(reportViolationForUnsafeEval), + INHERIT_HOOK_METHOD(defaultLanguage), + INHERIT_HOOK_METHOD(compileStreaming), + INHERIT_HOOK_METHOD(instantiateStreaming), + INHERIT_HOOK_METHOD(deriveShadowRealmGlobalObject), + INHERIT_HOOK_METHOD(codeForEval), + INHERIT_HOOK_METHOD(canCompileStrings), +}; + +GlobalObject* GlobalObject::create(JSC::VM& vm, JSC::Structure* structure, + const JSC::GlobalObjectMethodTable* methodTable) +{ + GlobalObject* ptr = new (NotNull, JSC::allocateCell(vm)) + GlobalObject(vm, structure, methodTable); + ptr->finishCreation(vm); + return ptr; +} + +void GlobalObject::finishCreation(JSC::VM& vm) +{ + Base::finishCreation(vm); + ASSERT(inherits(info())); +} + +struct BunVirtualMachine; +extern "C" BunVirtualMachine* Bun__getVM(); + +// A lot of this function is taken from 'Zig__GlobalObject__create' +// TODO: remove this entire method +extern "C" GlobalObject* BakeCreateProdGlobal(void* console) +{ + JSC::VM& vm = JSC::VM::create(JSC::HeapType::Large).leakRef(); + vm.heap.acquireAccess(); + JSC::JSLockHolder locker(vm); + BunVirtualMachine* bunVM = Bun__getVM(); + WebCore::JSVMClientData::create(&vm, bunVM); + + JSC::Structure* structure = GlobalObject::createStructure(vm); + GlobalObject* global = GlobalObject::create( + vm, structure, &GlobalObject::s_globalObjectMethodTable); + if (!global) + BUN_PANIC("Failed to create BakeGlobalObject"); + + global->m_bunVM = bunVM; + + JSC::gcProtect(global); + + global->setConsole(console); + global->setStackTraceLimit(10); // Node.js defaults to 10 + + // TODO: it segfaults! process.nextTick is scoped out for now i guess! + // vm.setOnComputeErrorInfo(computeErrorInfoWrapper); + // vm.setOnEachMicrotaskTick([global](JSC::VM &vm) -> void { + // if (auto nextTickQueue = global->m_nextTickQueue.get()) { + // global->resetOnEachMicrotaskTick(); + // // Bun::JSNextTickQueue *queue = + // // jsCast(nextTickQueue); + // // queue->drain(vm, global); + // return; + // } + // }); + + return global; +} + +}; // namespace Bake diff --git a/src/bake/BakeGlobalObject.h b/src/bake/BakeGlobalObject.h new file mode 100644 index 0000000000..af2b3490f9 --- /dev/null +++ b/src/bake/BakeGlobalObject.h @@ -0,0 +1,33 @@ +#pragma once +#include "root.h" +#include "ZigGlobalObject.h" + +namespace Bake { + +class GlobalObject : public Zig::GlobalObject { +public: + using Base = Zig::GlobalObject; + + template static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm) + { + if constexpr (mode == JSC::SubspaceAccess::Concurrently) + return nullptr; + return WebCore::subspaceForImpl( + vm, + [](auto& spaces) { return spaces.m_clientSubspaceForBakeGlobalScope.get(); }, + [](auto& spaces, auto&& space) { spaces.m_clientSubspaceForBakeGlobalScope = std::forward(space); }, + [](auto& spaces) { return spaces.m_subspaceForBakeGlobalScope.get(); }, + [](auto& spaces, auto&& space) { spaces.m_subspaceForBakeGlobalScope = std::forward(space); }, + [](auto& server) -> JSC::HeapCellType& { return server.m_heapCellTypeForJSWorkerGlobalScope; }); + } + + static const JSC::GlobalObjectMethodTable s_globalObjectMethodTable; + static GlobalObject* create(JSC::VM& vm, JSC::Structure* structure, const JSC::GlobalObjectMethodTable* methodTable); + + void finishCreation(JSC::VM& vm); + + GlobalObject(JSC::VM& vm, JSC::Structure* structure, const JSC::GlobalObjectMethodTable* methodTable) + : Zig::GlobalObject(vm, structure, methodTable) { } +}; + +}; // namespace Kit diff --git a/src/bake/BakeProduction.cpp b/src/bake/BakeProduction.cpp new file mode 100644 index 0000000000..887dbb565e --- /dev/null +++ b/src/bake/BakeProduction.cpp @@ -0,0 +1,47 @@ +#include "BakeProduction.h" +#include "BunBuiltinNames.h" +#include "WebCoreJSBuiltins.h" +#include "JavaScriptCore/JSPromise.h" +#include "JavaScriptCore/Exception.h" + +namespace Bake { + +extern "C" JSC::JSPromise* BakeRenderRoutesForProdStatic( + JSC::JSGlobalObject* global, + BunString outBase, + JSC::JSValue allServerFiles, + JSC::JSValue renderStatic, + JSC::JSValue clientEntryUrl, + JSC::JSValue pattern, + JSC::JSValue files, + JSC::JSValue typeAndFlags, + JSC::JSValue sourceRouteFiles, + JSC::JSValue paramInformation, + JSC::JSValue styles) +{ + JSC::VM& vm = global->vm(); + JSC::JSFunction* cb = JSC::JSFunction::create(vm, global, WebCore::bakeRenderRoutesForProdStaticCodeGenerator(vm), global); + JSC::CallData callData = JSC::getCallData(cb); + + JSC::MarkedArgumentBuffer args; + args.append(JSC::jsString(vm, outBase.toWTFString())); + args.append(allServerFiles); + args.append(renderStatic); + args.append(clientEntryUrl); + args.append(pattern); + args.append(files); + args.append(typeAndFlags); + args.append(sourceRouteFiles); + args.append(paramInformation); + args.append(styles); + + NakedPtr returnedException = nullptr; + auto result = JSC::call(global, cb, callData, JSC::jsUndefined(), args, returnedException); + if (UNLIKELY(returnedException)) { + // This should be impossible because it returns a promise. + return JSC::JSPromise::rejectedPromise(global, returnedException->value()); + } + return JSC::jsCast(result); +} + +} // namespace Bake diff --git a/src/bake/BakeProduction.h b/src/bake/BakeProduction.h new file mode 100644 index 0000000000..342159fd3e --- /dev/null +++ b/src/bake/BakeProduction.h @@ -0,0 +1,5 @@ +#include "root.h" +#include "headers-handwritten.h" + +namespace Bake { +} // namespace Bake \ No newline at end of file diff --git a/src/bake/BakeSourceProvider.cpp b/src/bake/BakeSourceProvider.cpp new file mode 100644 index 0000000000..cf7ef839ab --- /dev/null +++ b/src/bake/BakeSourceProvider.cpp @@ -0,0 +1,134 @@ +// clang-format off +#include "BakeSourceProvider.h" +#include "BakeGlobalObject.h" +#include "JavaScriptCore/Completion.h" +#include "JavaScriptCore/Identifier.h" +#include "JavaScriptCore/JSCJSValue.h" +#include "JavaScriptCore/JSCast.h" +#include "JavaScriptCore/JSLock.h" +#include "JavaScriptCore/JSMap.h" +#include "JavaScriptCore/JSModuleLoader.h" +#include "JavaScriptCore/JSModuleRecord.h" +#include "JavaScriptCore/JSString.h" +#include "JavaScriptCore/JSModuleNamespaceObject.h" +#include "ImportMetaObject.h" + +namespace Bake { + +extern "C" JSC::EncodedJSValue BakeLoadInitialServerCode(GlobalObject* global, BunString source, bool separateSSRGraph) { + JSC::VM& vm = global->vm(); + auto scope = DECLARE_THROW_SCOPE(vm); + + String string = "bake://server-runtime.js"_s; + JSC::SourceOrigin origin = JSC::SourceOrigin(WTF::URL(string)); + JSC::SourceCode sourceCode = JSC::SourceCode(DevSourceProvider::create( + source.toWTFString(), + origin, + WTFMove(string), + WTF::TextPosition(), + JSC::SourceProviderSourceType::Program + )); + + JSC::JSValue fnValue = vm.interpreter.executeProgram(sourceCode, global, global); + RETURN_IF_EXCEPTION(scope, JSC::JSValue::encode({})); + + RELEASE_ASSERT(fnValue); + + JSC::JSFunction* fn = jsCast(fnValue); + JSC::CallData callData = JSC::getCallData(fn); + + JSC::MarkedArgumentBuffer args; + args.append(JSC::jsBoolean(separateSSRGraph)); // separateSSRGraph + args.append(Zig::ImportMetaObject::create(global, "bake://server-runtime.js"_s)); // importMeta + + return JSC::JSValue::encode(JSC::call(global, fn, callData, JSC::jsUndefined(), args)); +} + +extern "C" JSC::JSInternalPromise* BakeLoadModuleByKey(GlobalObject* global, JSC::JSString* key) { + return global->moduleLoader()->loadAndEvaluateModule(global, key, JSC::jsUndefined(), JSC::jsUndefined()); +} + +extern "C" JSC::EncodedJSValue BakeLoadServerHmrPatch(GlobalObject* global, BunString source) { + JSC::VM&vm = global->vm(); + auto scope = DECLARE_THROW_SCOPE(vm); + + String string = "bake://server.patch.js"_s; + JSC::SourceOrigin origin = JSC::SourceOrigin(WTF::URL(string)); + JSC::SourceCode sourceCode = JSC::SourceCode(DevSourceProvider::create( + source.toWTFString(), + origin, + WTFMove(string), + WTF::TextPosition(), + JSC::SourceProviderSourceType::Program + )); + + JSC::JSValue result = vm.interpreter.executeProgram(sourceCode, global, global); + RETURN_IF_EXCEPTION(scope, JSC::JSValue::encode({})); + + RELEASE_ASSERT(result); + return JSC::JSValue::encode(result); +} + +extern "C" JSC::EncodedJSValue BakeGetModuleNamespace( + JSC::JSGlobalObject* global, + JSC::JSValue keyValue +) { + JSC::JSString* key = JSC::jsCast(keyValue); + JSC::VM& vm = global->vm(); + JSC::JSMap* map = JSC::jsCast( + global->moduleLoader()->getDirect( + vm, JSC::Identifier::fromString(global->vm(), "registry"_s) + )); + JSC::JSValue entry = map->get(global, key); + ASSERT(entry.isObject()); // should have called BakeLoadServerCode and wait for that promise + JSC::JSValue module = entry.getObject()->get(global, JSC::Identifier::fromString(global->vm(), "module"_s)); + ASSERT(module.isCell()); + JSC::JSModuleNamespaceObject* namespaceObject = global->moduleLoader()->getModuleNamespaceObject(global, module); + ASSERT(namespaceObject); + return JSC::JSValue::encode(namespaceObject); +} + +extern "C" JSC::EncodedJSValue BakeGetDefaultExportFromModule( + JSC::JSGlobalObject* global, + JSC::JSValue keyValue +) { + JSC::VM& vm = global->vm(); + return JSC::JSValue::encode(jsCast(JSC::JSValue::decode(BakeGetModuleNamespace(global, keyValue)))->get(global, vm.propertyNames->defaultKeyword)); +} + +// There were issues when trying to use JSValue.get from zig +extern "C" JSC::EncodedJSValue BakeGetOnModuleNamespace( + JSC::JSGlobalObject* global, + JSC::JSModuleNamespaceObject* moduleNamespace, + const unsigned char* key, + size_t keyLength +) { + JSC::VM& vm = global->vm(); + const auto propertyString = String(StringImpl::createWithoutCopying({ key, keyLength })); + const auto identifier = JSC::Identifier::fromString(vm, propertyString); + const auto property = JSC::PropertyName(identifier); + return JSC::JSValue::encode(moduleNamespace->get(global, property)); +} + +extern "C" JSC::EncodedJSValue BakeRegisterProductionChunk(JSC::JSGlobalObject* global, BunString virtualPathName, BunString source) { + JSC::VM& vm = global->vm(); + auto scope = DECLARE_THROW_SCOPE(vm); + + String string = virtualPathName.toWTFString(); + JSC::JSString* key = JSC::jsString(vm, string); + JSC::SourceOrigin origin = JSC::SourceOrigin(WTF::URL(string)); + JSC::SourceCode sourceCode = JSC::SourceCode(DevSourceProvider::create( + source.toWTFString(), + origin, + WTFMove(string), + WTF::TextPosition(), + JSC::SourceProviderSourceType::Module + )); + + global->moduleLoader()->provideFetch(global, key, sourceCode); + RETURN_IF_EXCEPTION(scope, JSC::JSValue::encode({})); + + return JSC::JSValue::encode(key); +} + +} // namespace Bake diff --git a/src/bake/BakeSourceProvider.h b/src/bake/BakeSourceProvider.h new file mode 100644 index 0000000000..2d821fc401 --- /dev/null +++ b/src/bake/BakeSourceProvider.h @@ -0,0 +1,38 @@ +#pragma once +#include "root.h" +#include "headers-handwritten.h" +#include "BakeGlobalObject.h" +#include "JavaScriptCore/SourceOrigin.h" + +namespace Bake { + +class DevSourceProvider final : public JSC::StringSourceProvider { +public: + static Ref create( + const String& source, + const JSC::SourceOrigin& sourceOrigin, + String&& sourceURL, + const TextPosition& startPosition, + JSC::SourceProviderSourceType sourceType + ) { + return adoptRef(*new DevSourceProvider(source, sourceOrigin, WTFMove(sourceURL), startPosition, sourceType)); + } + +private: + DevSourceProvider( + const String& source, + const JSC::SourceOrigin& sourceOrigin, + String&& sourceURL, + const TextPosition& startPosition, + JSC::SourceProviderSourceType sourceType + ) : StringSourceProvider( + source, + sourceOrigin, + JSC::SourceTaintedOrigin::Untainted, + WTFMove(sourceURL), + startPosition, + sourceType + ) {} +}; + +} // namespace Bake diff --git a/src/bake/DevServer.zig b/src/bake/DevServer.zig new file mode 100644 index 0000000000..fe5e658d45 --- /dev/null +++ b/src/bake/DevServer.zig @@ -0,0 +1,3840 @@ +//! Instance of the development server. Attaches to an instance of `Bun.serve`, +//! controlling bundler, routing, and hot module reloading. +//! +//! Reprocessing files that did not change is banned; by having perfect +//! incremental tracking over the project, editing a file's contents (asides +//! adjusting imports) must always rebundle only that one file. +//! +//! All work is held in-memory, using manually managed data-oriented design. +//! +//! TODO: Currently does not have a `deinit()`, as it was assumed to be alive for +//! the remainder of this process' lifespan. Later, it will be required to fully +//! clean up server state. +pub const DevServer = @This(); +pub const debug = bun.Output.Scoped(.Bake, false); +pub const igLog = bun.Output.scoped(.IncrementalGraph, false); + +pub const Options = struct { + root: []const u8, + framework: bake.Framework, + dump_sources: ?[]const u8 = if (Environment.isDebug) ".bake-debug" else null, + dump_state_on_crash: bool = bun.FeatureFlags.bake_debugging_features, + verbose_watcher: bool = false, + vm: *VirtualMachine, +}; + +// The fields `client_graph`, `server_graph`, and `directory_watchers` all +// use `@fieldParentPointer` to access DevServer's state. This pattern has +// made it easier to group related fields together, but one must remember +// those structures still depend on the DevServer pointer. + +/// Used for all server-wide allocations. In debug, this shows up in +/// a separate named heap. Thread-safe. +allocator: Allocator, +/// Absolute path to project root directory. For the HMR +/// runtime, its module IDs are strings relative to this. +root: []const u8, +/// Hex string generated by hashing the framework config and bun revision. +/// Emebedding in client bundles and sent when the HMR Socket is opened; +/// When the value mismatches the page is forcibly reloaded. +configuration_hash_key: [16]u8, +/// The virtual machine (global object) to execute code in. +vm: *VirtualMachine, +/// May be `null` if not attached to an HTTP server yet. +server: ?bun.JSC.API.AnyServer, +/// Contains the tree of routes. This structure contains FileIndex +router: FrameworkRouter, +/// Every navigatable route has bundling state here. +route_bundles: ArrayListUnmanaged(RouteBundle), +/// All access into IncrementalGraph is guarded by a DebugThreadLock. This is +/// only a debug assertion as contention to this is always a bug; If a bundle is +/// active and a file is changed, that change is placed into the next bundle. +graph_safety_lock: bun.DebugThreadLock, +client_graph: IncrementalGraph(.client), +server_graph: IncrementalGraph(.server), +/// State populated during bundling and hot updates. Often cleared +incremental_result: IncrementalResult, +/// CSS files are accessible via `/_bun/css/.css` +/// Value is bundled code owned by `dev.allocator` +css_files: AutoArrayHashMapUnmanaged(u64, []const u8), +/// JS files are accessible via `/_bun/client/route..js` +/// These are randomly generated to avoid possible browser caching of old assets. +route_js_payloads: AutoArrayHashMapUnmanaged(u64, Route.Index), +// /// Assets are accessible via `/_bun/asset/` +// assets: bun.StringArrayHashMapUnmanaged(u64, Asset), +/// All bundling failures are stored until a file is saved and rebuilt. +/// They are stored in the wire format the HMR runtime expects so that +/// serialization only happens once. +bundling_failures: std.ArrayHashMapUnmanaged( + SerializedFailure, + void, + SerializedFailure.ArrayHashContextViaOwner, + false, +) = .{}, + +// These values are handles to the functions in `hmr-runtime-server.ts`. +// For type definitions, see `./bake.private.d.ts` +server_fetch_function_callback: JSC.Strong, +server_register_update_callback: JSC.Strong, + +// Watching +bun_watcher: *JSC.Watcher, +directory_watchers: DirectoryWatchStore, +/// Only two hot-reload tasks exist ever. Memory is reused by swapping between the two. +/// These items are aligned to cache lines to reduce contention. +watch_events: [2]HotReloadTask.Aligned, +/// 0 - no watch +/// 1 - has fired additional watch +/// 2+ - new events available, watcher is waiting on bundler to finish +watch_state: std.atomic.Value(u32), +watch_current: u1 = 0, + +/// Number of bundles that have been executed. This is currently not read, but +/// will be used later to determine when to invoke graph garbage collection. +generation: usize = 0, +/// Displayed in the HMR success indicator +bundles_since_last_error: usize = 0, + +/// Quickly retrieve a route's index from the entry point file. These are +/// populated as the routes are discovered. The route may not be bundled or +/// navigatable, in the case a layout's index is looked up. +route_lookup: AutoArrayHashMapUnmanaged(IncrementalGraph(.server).FileIndex, RouteIndexAndRecurseFlag), + +framework: bake.Framework, +// Each logical graph gets its own bundler configuration +server_bundler: Bundler, +client_bundler: Bundler, +ssr_bundler: Bundler, + +// TODO: This being shared state is likely causing a crash +/// Stored and reused for bundling tasks +log: Log, + +// Debugging +dump_dir: ?std.fs.Dir, +/// Reference count to number of active sockets with the visualizer enabled. +emit_visualizer_events: u32, +has_pre_crash_handler: bool, + +pub const internal_prefix = "/_bun"; +pub const client_prefix = internal_prefix ++ "/client"; +pub const asset_prefix = internal_prefix ++ "/asset"; +pub const css_prefix = internal_prefix ++ "/css"; + +pub const RouteBundle = struct { + pub const Index = bun.GenericIndex(u30, RouteBundle); + + route: Route.Index, + + server_state: State, + + /// Used to communicate over WebSocket the pattern. The HMR client contains code + /// to match this against the URL bar to determine if a reloading route applies + /// or not. + full_pattern: []const u8, + /// Generated lazily when the client JS is requested (HTTP GET /_bun/client/*.js), + /// which is only needed when a hard-reload is performed. + /// + /// Freed when a client module updates. + client_bundle: ?[]const u8, + /// Contain the list of serialized failures. Hashmap allows for + /// efficient lookup and removal of failing files. + /// When state == .evaluation_failure, this is popualted with that error. + evaluate_failure: ?SerializedFailure, + + // TODO: micro-opt: use a singular strong + + /// Cached to avoid re-creating the array every request. + /// Invalidated when a layout is added or removed from this route. + cached_module_list: JSC.Strong, + /// Cached to avoid re-creating the string every request. + /// Invalidated when any client file associated with the route is updated. + cached_client_bundle_url: JSC.Strong, + /// Cached to avoid re-creating the array every request. + /// Invalidated when the list of CSS files changes. + cached_css_file_array: JSC.Strong, + + /// A union is not used so that `bundler_failure_logs` can re-use memory, as + /// this state frequently changes between `loaded` and the failure variants. + const State = enum { + /// In development mode, routes are lazily built. This state implies a + /// build of this route has never been run. It is possible to bundle the + /// route entry point and still have an unqueued route if another route + /// imports this one. This state is implied if `FrameworkRouter.Route` + /// has no bundle index assigned. + unqueued, + /// A bundle associated with this route is happening + bundling, + /// This route was flagged for bundling failures. There are edge cases + /// where a route can be disconnected from its failures, so the route + /// imports has to be traced to discover if possible failures still + /// exist. + possible_bundling_failures, + /// Loading the module at runtime had a failure. + evaluation_failure, + /// Calling the request function may error, but that error will not be + /// at fault of bundling. + loaded, + }; +}; + +pub const DeferredRequest = struct { + next: ?*DeferredRequest, + bundle: RouteBundle.Index, + data: Data, + + const Data = union(enum) { + server_handler: bun.JSC.API.SavedRequest, + /// onJsRequestWithBundle + js_payload: *Response, + + const Tag = @typeInfo(Data).Union.tag_type.?; + }; +}; + +/// DevServer is stored on the heap, storing its allocator. +// TODO: change the error set to JSOrMemoryError!*DevServer +pub fn init(options: Options) !*DevServer { + const allocator = bun.default_allocator; + bun.analytics.Features.kit_dev +|= 1; + + var dump_dir = if (bun.FeatureFlags.bake_debugging_features) + if (options.dump_sources) |dir| + std.fs.cwd().makeOpenPath(dir, .{}) catch |err| dir: { + bun.handleErrorReturnTrace(err, @errorReturnTrace()); + Output.warn("Could not open directory for dumping sources: {}", .{err}); + break :dir null; + } + else + null; + errdefer if (bun.FeatureFlags.bake_debugging_features) if (dump_dir) |*dir| dir.close(); + + const separate_ssr_graph = if (options.framework.server_components) |sc| sc.separate_ssr_graph else false; + + const dev = bun.create(allocator, DevServer, .{ + .allocator = allocator, + + .root = options.root, + .vm = options.vm, + .server = null, + .directory_watchers = DirectoryWatchStore.empty, + .server_fetch_function_callback = .{}, + .server_register_update_callback = .{}, + .generation = 0, + .graph_safety_lock = .{}, + .log = Log.init(allocator), + .dump_dir = dump_dir, + .framework = options.framework, + .watch_state = .{ .raw = 0 }, + .watch_current = 0, + .emit_visualizer_events = 0, + .has_pre_crash_handler = options.dump_state_on_crash, + .css_files = .{}, + .route_js_payloads = .{}, + // .assets = .{}, + + .client_graph = IncrementalGraph(.client).empty, + .server_graph = IncrementalGraph(.server).empty, + .incremental_result = IncrementalResult.empty, + .route_lookup = .{}, + + .server_bundler = undefined, + .client_bundler = undefined, + .ssr_bundler = undefined, + + .bun_watcher = undefined, + .watch_events = undefined, + + .configuration_hash_key = undefined, + + .router = undefined, + .route_bundles = .{}, + }); + errdefer allocator.destroy(dev); + + assert(dev.server_graph.owner() == dev); + assert(dev.client_graph.owner() == dev); + assert(dev.directory_watchers.owner() == dev); + + dev.graph_safety_lock.lock(); + defer dev.graph_safety_lock.unlock(); + + const fs = try bun.fs.FileSystem.init(options.root); + + dev.bun_watcher = try Watcher.init(DevServer, dev, fs, bun.default_allocator); + errdefer dev.bun_watcher.deinit(false); + try dev.bun_watcher.start(); + + dev.server_bundler.resolver.watcher = dev.bun_watcher.getResolveWatcher(); + dev.client_bundler.resolver.watcher = dev.bun_watcher.getResolveWatcher(); + dev.ssr_bundler.resolver.watcher = dev.bun_watcher.getResolveWatcher(); + dev.watch_events = .{ + .{ .aligned = HotReloadTask.initEmpty(dev) }, + .{ .aligned = HotReloadTask.initEmpty(dev) }, + }; + + try dev.framework.initBundler(allocator, &dev.log, .development, .server, &dev.server_bundler); + dev.client_bundler.options.dev_server = dev; + try dev.framework.initBundler(allocator, &dev.log, .development, .client, &dev.client_bundler); + dev.server_bundler.options.dev_server = dev; + if (separate_ssr_graph) { + try dev.framework.initBundler(allocator, &dev.log, .development, .ssr, &dev.ssr_bundler); + dev.ssr_bundler.options.dev_server = dev; + } + + dev.framework = dev.framework.resolve(&dev.server_bundler.resolver, &dev.client_bundler.resolver) catch { + Output.errGeneric("Failed to resolve all imports required by the framework", .{}); + return error.FrameworkInitialization; + }; + + errdefer dev.route_lookup.clearAndFree(allocator); + // errdefer dev.client_graph.deinit(allocator); + // errdefer dev.server_graph.deinit(allocator); + + dev.vm.global = @ptrCast(dev.vm.global); + + dev.configuration_hash_key = hash_key: { + var hash = std.hash.Wyhash.init(128); + + if (bun.Environment.isDebug) { + const stat = try bun.sys.stat(try bun.selfExePath()).unwrap(); + bun.writeAnyToHasher(&hash, stat.mtime()); + hash.update(bake.getHmrRuntime(.client)); + hash.update(bake.getHmrRuntime(.server)); + } else { + hash.update(bun.Environment.git_sha_short); + } + + // TODO: hash router types + // hash.update(dev.framework.entry_client); + // hash.update(dev.framework.entry_server); + + if (dev.framework.server_components) |sc| { + bun.writeAnyToHasher(&hash, true); + bun.writeAnyToHasher(&hash, sc.separate_ssr_graph); + hash.update(sc.client_register_server_reference); + hash.update(&.{0}); + hash.update(sc.server_register_client_reference); + hash.update(&.{0}); + hash.update(sc.server_register_server_reference); + hash.update(&.{0}); + hash.update(sc.server_runtime_import); + hash.update(&.{0}); + } else { + bun.writeAnyToHasher(&hash, false); + } + + if (dev.framework.react_fast_refresh) |rfr| { + bun.writeAnyToHasher(&hash, true); + hash.update(rfr.import_source); + } else { + bun.writeAnyToHasher(&hash, false); + } + + // TODO: dev.framework.built_in_modules + + break :hash_key std.fmt.bytesToHex(std.mem.asBytes(&hash.final()), .lower); + }; + + // Add react fast refresh if needed. This is the first file on the client side, + // as it will be referred to by index. + if (dev.framework.react_fast_refresh) |rfr| { + assert(try dev.client_graph.insertStale(rfr.import_source, false) == IncrementalGraph(.client).react_refresh_index); + } + + try dev.initServerRuntime(); + + // Initialize the router + dev.router = router: { + var types = try std.ArrayListUnmanaged(FrameworkRouter.Type).initCapacity(allocator, options.framework.file_system_router_types.len); + errdefer types.deinit(allocator); + + for (options.framework.file_system_router_types, 0..) |fsr, i| { + const joined_root = bun.path.joinAbs(dev.root, .auto, fsr.root); + const entry = dev.server_bundler.resolver.readDirInfoIgnoreError(joined_root) orelse + continue; + + const server_file = try dev.server_graph.insertStaleExtra(fsr.entry_server, false, true); + + try types.append(allocator, .{ + .abs_root = bun.strings.withoutTrailingSlash(entry.abs_path), + .prefix = fsr.prefix, + .ignore_underscores = fsr.ignore_underscores, + .ignore_dirs = fsr.ignore_dirs, + .extensions = fsr.extensions, + .style = fsr.style, + .server_file = toOpaqueFileId(.server, server_file), + .client_file = if (fsr.entry_client) |client| + toOpaqueFileId(.client, try dev.client_graph.insertStale(client, false)).toOptional() + else + .none, + .server_file_string = .{}, + }); + + try dev.route_lookup.put(allocator, server_file, .{ + .route_index = FrameworkRouter.Route.Index.init(@intCast(i)), + .should_recurse_when_visiting = true, + }); + } + + break :router try FrameworkRouter.initEmpty(types.items, allocator); + }; + + // TODO: move pre-bundling to be one tick after server startup. + // this way the line saying the server is ready shows quicker + try dev.scanInitialRoutes(); + + if (bun.FeatureFlags.bake_debugging_features and options.dump_state_on_crash) + try bun.crash_handler.appendPreCrashHandler(DevServer, dev, dumpStateDueToCrash); + + return dev; +} + +fn initServerRuntime(dev: *DevServer) !void { + const runtime = bun.String.static(bun.bake.getHmrRuntime(.server)); + + const interface = c.BakeLoadInitialServerCode( + @ptrCast(dev.vm.global), + runtime, + if (dev.framework.server_components) |sc| sc.separate_ssr_graph else false, + ) catch |err| { + dev.vm.printErrorLikeObjectToConsole(dev.vm.global.takeException(err)); + @panic("Server runtime failed to start. The above error is always a bug in Bun"); + }; + + if (!interface.isObject()) + @panic("Internal assertion failure: expected interface from HMR runtime to be an object"); + const fetch_function: JSValue = interface.get(dev.vm.global, "handleRequest") orelse + @panic("Internal assertion failure: expected interface from HMR runtime to contain handleRequest"); + bun.assert(fetch_function.isCallable(dev.vm.jsc)); + dev.server_fetch_function_callback = JSC.Strong.create(fetch_function, dev.vm.global); + const register_update = interface.get(dev.vm.global, "registerUpdate") orelse + @panic("Internal assertion failure: expected interface from HMR runtime to contain registerUpdate"); + dev.server_register_update_callback = JSC.Strong.create(register_update, dev.vm.global); + + fetch_function.ensureStillAlive(); + register_update.ensureStillAlive(); +} + +/// Deferred one tick so that the server can be up faster +fn scanInitialRoutes(dev: *DevServer) !void { + try dev.router.scanAll( + dev.allocator, + &dev.server_bundler.resolver, + FrameworkRouter.InsertionContext.wrap(DevServer, dev), + ); + + try dev.server_graph.ensureStaleBitCapacity(true); + try dev.client_graph.ensureStaleBitCapacity(true); +} + +pub fn attachRoutes(dev: *DevServer, server: anytype) !void { + dev.server = bun.JSC.API.AnyServer.from(server); + const app = server.app.?; + + // For this to work, the route handlers need to be augmented to use the comptime + // SSL parameter. It's worth considering removing the SSL boolean. + if (@TypeOf(app) == *uws.NewApp(true)) { + bun.todoPanic(@src(), "DevServer does not support SSL yet", .{}); + } + + app.get(client_prefix ++ "/:route", *DevServer, dev, onJsRequest); + app.get(asset_prefix ++ "/:asset", *DevServer, dev, onAssetRequest); + app.get(css_prefix ++ "/:asset", *DevServer, dev, onCssRequest); + app.get(internal_prefix ++ "/src/*", *DevServer, dev, onSrcRequest); + + app.ws( + internal_prefix ++ "/hmr", + dev, + 0, + uws.WebSocketBehavior.Wrap(DevServer, HmrSocket, false).apply(.{}), + ); + + app.get(internal_prefix ++ "/incremental_visualizer", *DevServer, dev, onIncrementalVisualizer); + + app.any("/*", *DevServer, dev, onRequest); +} + +pub fn deinit(dev: *DevServer) void { + const allocator = dev.allocator; + if (dev.has_pre_crash_handler) + bun.crash_handler.removePreCrashHandler(dev); + allocator.destroy(dev); + bun.todoPanic(@src(), "bake.DevServer.deinit()", .{}); +} + +fn onJsRequest(dev: *DevServer, req: *Request, resp: *Response) void { + const route_bundle = route: { + const route_id = req.parameter(0); + if (!bun.strings.hasSuffixComptime(route_id, ".js")) + return req.setYield(true); + if (!bun.strings.hasPrefixComptime(route_id, "route.")) + return req.setYield(true); + const i = parseHexToInt(u64, route_id["route.".len .. route_id.len - ".js".len]) orelse + return req.setYield(true); + break :route dev.route_js_payloads.get(i) orelse + return req.setYield(true); + }; + + dev.ensureRouteIsBundled(route_bundle, .js_payload, req, resp) catch bun.outOfMemory(); +} + +fn onAssetRequest(dev: *DevServer, req: *Request, resp: *Response) void { + _ = dev; + _ = req; + _ = resp; + bun.todoPanic(@src(), "serve asset file", .{}); + // const route_id = req.parameter(0); + // const asset = dev.assets.get(route_id) orelse + // return req.setYield(true); + // _ = asset; // autofix + +} + +fn onCssRequest(dev: *DevServer, req: *Request, resp: *Response) void { + const param = req.parameter(0); + if (!bun.strings.hasSuffixComptime(param, ".css")) + return req.setYield(true); + const hex = param[0 .. param.len - ".css".len]; + if (hex.len != @sizeOf(u64) * 2) + return req.setYield(true); + + var out: [@sizeOf(u64)]u8 = undefined; + assert((std.fmt.hexToBytes(&out, hex) catch + return req.setYield(true)).len == @sizeOf(u64)); + const hash: u64 = @bitCast(out); + + const css = dev.css_files.get(hash) orelse + return req.setYield(true); + + sendTextFile(css, MimeType.css.value, resp); +} + +fn parseHexToInt(comptime T: type, slice: []const u8) ?T { + var out: [@sizeOf(T)]u8 = undefined; + assert((std.fmt.hexToBytes(&out, slice) catch return null).len == @sizeOf(T)); + return @bitCast(out); +} + +fn onIncrementalVisualizer(_: *DevServer, _: *Request, resp: *Response) void { + resp.corked(onIncrementalVisualizerCorked, .{resp}); +} + +fn onIncrementalVisualizerCorked(resp: *Response) void { + const code = if (Environment.codegen_embed) + @embedFile("incremental_visualizer.html") + else + bun.runtimeEmbedFile(.src_eager, "bake/incremental_visualizer.html"); + resp.writeHeaderInt("Content-Length", code.len); + resp.end(code, false); +} + +fn ensureRouteIsBundled( + dev: *DevServer, + route_index: Route.Index, + kind: DeferredRequest.Data.Tag, + req: *Request, + resp: *Response, +) bun.OOM!void { + const bundle_index = if (dev.router.routePtr(route_index).bundle.unwrap()) |bundle_index| + bundle_index + else + try dev.insertRouteBundle(route_index); + + switch (dev.routeBundlePtr(bundle_index).server_state) { + .unqueued => { + const server_file_names = dev.server_graph.bundled_files.keys(); + const client_file_names = dev.client_graph.bundled_files.keys(); + + var sfa = std.heap.stackFallback(4096, dev.allocator); + const temp_alloc = sfa.get(); + + var entry_points = std.ArrayList(BakeEntryPoint).init(temp_alloc); + defer entry_points.deinit(); + + // Build a list of all files that have not yet been bundled. + var route = dev.router.routePtr(route_index); + const router_type = dev.router.typePtr(route.type); + try dev.appendOpaqueEntryPoint(server_file_names, &entry_points, .server, router_type.server_file); + try dev.appendOpaqueEntryPoint(client_file_names, &entry_points, .client, router_type.client_file); + try dev.appendOpaqueEntryPoint(server_file_names, &entry_points, .server, route.file_page); + try dev.appendOpaqueEntryPoint(server_file_names, &entry_points, .server, route.file_layout); + while (route.parent.unwrap()) |parent_index| { + route = dev.router.routePtr(parent_index); + try dev.appendOpaqueEntryPoint(server_file_names, &entry_points, .server, route.file_layout); + } + + if (entry_points.items.len == 0) { + @panic("TODO: trace graph for possible errors, so DevServer knows what state this should go to"); + } + + const route_bundle = dev.routeBundlePtr(bundle_index); + if (dev.bundle(entry_points.items)) |_| { + route_bundle.server_state = .loaded; + } else |err| switch (err) { + error.OutOfMemory => bun.outOfMemory(), + error.BuildFailed => assert(route_bundle.server_state == .possible_bundling_failures), + error.ServerLoadFailed => route_bundle.server_state = .evaluation_failure, + } + }, + .bundling => { + const prepared = dev.server.?.DebugHTTPServer.prepareJsRequestContext(req, resp) orelse + return; + _ = prepared; + @panic("TODO: Async Bundler"); + }, + else => {}, + } + switch (dev.routeBundlePtr(bundle_index).server_state) { + .unqueued => unreachable, + .bundling => @panic("TODO: Async Bundler"), + .possible_bundling_failures => { + // TODO: perform a graph trace to find just the errors that are needed + if (dev.bundling_failures.count() > 0) { + resp.corked(sendSerializedFailures, .{ + dev, + resp, + dev.bundling_failures.keys(), + .bundler, + }); + return; + } else { + dev.routeBundlePtr(bundle_index).server_state = .loaded; + } + }, + .evaluation_failure => { + resp.corked(sendSerializedFailures, .{ + dev, + resp, + (&(dev.routeBundlePtr(bundle_index).evaluate_failure orelse @panic("missing error")))[0..1], + .evaluation, + }); + return; + }, + .loaded => {}, + } + + switch (kind) { + .server_handler => dev.onRequestWithBundle(bundle_index, .{ .stack = req }, resp), + .js_payload => dev.onJsRequestWithBundle(bundle_index, resp), + } +} + +fn onRequestWithBundle( + dev: *DevServer, + route_bundle_index: RouteBundle.Index, + req: bun.JSC.API.SavedRequest.Union, + resp: *Response, +) void { + const server_request_callback = dev.server_fetch_function_callback.get() orelse + unreachable; // did not bundle + + const route_bundle = dev.routeBundlePtr(route_bundle_index); + + const router_type = dev.router.typePtr(dev.router.routePtr(route_bundle.route).type); + + dev.server.?.onRequestFromSaved( + req, + resp, + server_request_callback, + 4, + .{ + // routerTypeMain + router_type.server_file_string.get() orelse str: { + const name = dev.server_graph.bundled_files.keys()[fromOpaqueFileId(.server, router_type.server_file).get()]; + const str = bun.String.createUTF8(name); + defer str.deref(); + const js = str.toJS(dev.vm.global); + router_type.server_file_string = JSC.Strong.create(js, dev.vm.global); + break :str js; + }, + // routeModules + route_bundle.cached_module_list.get() orelse arr: { + const global = dev.vm.global; + const keys = dev.server_graph.bundled_files.keys(); + var n: usize = 1; + var route = dev.router.routePtr(route_bundle.route); + while (true) { + if (route.file_layout != .none) n += 1; + route = dev.router.routePtr(route.parent.unwrap() orelse break); + } + const arr = JSValue.createEmptyArray(global, n); + route = dev.router.routePtr(route_bundle.route); + var route_name = bun.String.createUTF8(dev.relativePath(keys[fromOpaqueFileId(.server, route.file_page.unwrap().?).get()])); + arr.putIndex(global, 0, route_name.transferToJS(global)); + n = 1; + while (true) { + if (route.file_layout.unwrap()) |layout| { + var layout_name = bun.String.createUTF8(dev.relativePath(keys[fromOpaqueFileId(.server, layout).get()])); + arr.putIndex(global, @intCast(n), layout_name.transferToJS(global)); + n += 1; + } + route = dev.router.routePtr(route.parent.unwrap() orelse break); + } + route_bundle.cached_module_list = JSC.Strong.create(arr, global); + break :arr arr; + }, + // clientId + route_bundle.cached_client_bundle_url.get() orelse str: { + const id = std.crypto.random.int(u64); + dev.route_js_payloads.put(dev.allocator, id, route_bundle.route) catch bun.outOfMemory(); + const str = bun.String.createFormat(client_prefix ++ "/route.{}.js", .{std.fmt.fmtSliceHexLower(std.mem.asBytes(&id))}) catch bun.outOfMemory(); + defer str.deref(); + const js = str.toJS(dev.vm.global); + route_bundle.cached_client_bundle_url = JSC.Strong.create(js, dev.vm.global); + break :str js; + }, + // styles + route_bundle.cached_css_file_array.get() orelse arr: { + const js = dev.generateCssList(route_bundle) catch bun.outOfMemory(); + route_bundle.cached_css_file_array = JSC.Strong.create(js, dev.vm.global); + break :arr js; + }, + }, + ); +} + +pub fn onJsRequestWithBundle(dev: *DevServer, bundle_index: RouteBundle.Index, resp: *Response) void { + const route_bundle = dev.routeBundlePtr(bundle_index); + const code = route_bundle.client_bundle orelse code: { + const code = dev.generateClientBundle(route_bundle) catch bun.outOfMemory(); + route_bundle.client_bundle = code; + break :code code; + }; + sendTextFile(code, MimeType.javascript.value, resp); +} + +pub fn onSrcRequest(dev: *DevServer, req: *uws.Request, resp: *App.Response) void { + if (req.header("open-in-editor") == null) { + resp.writeStatus("501 Not Implemented"); + resp.end("Viewing source without opening in editor is not implemented yet!", false); + return; + } + + const ctx = &dev.vm.rareData().editor_context; + ctx.autoDetectEditor(JSC.VirtualMachine.get().bundler.env); + const line: ?[]const u8 = req.header("editor-line"); + const column: ?[]const u8 = req.header("editor-column"); + + if (ctx.editor) |editor| { + var url = req.url()[internal_prefix.len + "/src/".len ..]; + if (bun.strings.indexOfChar(url, ':')) |colon| { + url = url[0..colon]; + } + editor.open(ctx.path, url, line, column, dev.allocator) catch { + resp.writeStatus("202 No Content"); + resp.end("", false); + return; + }; + resp.writeStatus("202 No Content"); + resp.end("", false); + } else { + resp.writeStatus("500 Internal Server Error"); + resp.end("Please set your editor in bunfig.toml", false); + } +} + +const BundleError = error{ + OutOfMemory, + /// Graph entry points will be annotated with failures to display. + BuildFailed, + + ServerLoadFailed, +}; + +fn bundle(dev: *DevServer, files: []const BakeEntryPoint) BundleError!void { + defer dev.emitVisualizerMessageIfNeeded() catch bun.outOfMemory(); + + assert(files.len > 0); + + const bundle_file_list = bun.Output.Scoped(.bundle_file_list, false); + + if (bundle_file_list.isVisible()) { + bundle_file_list.log("Start bundle {d} files", .{files.len}); + for (files) |f| { + bundle_file_list.log("- {s} (.{s})", .{ f.path, @tagName(f.graph) }); + } + } + + var heap = try ThreadlocalArena.init(); + defer heap.deinit(); + + const allocator = heap.allocator(); + var ast_memory_allocator = try allocator.create(bun.JSAst.ASTMemoryAllocator); + ast_memory_allocator.* = .{ .allocator = allocator }; + ast_memory_allocator.reset(); + ast_memory_allocator.push(); + + if (dev.framework.server_components == null) { + // The handling of the dependency graphs are SLIGHTLY different when + // server components are disabled. It's subtle, but enough that it + // would be incorrect to even try to run a build. + bun.todoPanic(@src(), "support non-server components build", .{}); + } + + var timer = if (Environment.enable_logs) std.time.Timer.start() catch unreachable; + + dev.graph_safety_lock.lock(); + defer dev.graph_safety_lock.unlock(); + + const bv2 = try BundleV2.init( + &dev.server_bundler, + if (dev.framework.server_components != null) .{ + .framework = dev.framework, + .client_bundler = &dev.client_bundler, + .ssr_bundler = &dev.ssr_bundler, + } else @panic("TODO: support non-server components"), + allocator, + JSC.AnyEventLoop.init(allocator), + false, // reloading is handled separately + JSC.WorkPool.get(), + heap, + ); + bv2.bun_watcher = dev.bun_watcher; + // this.plugins = completion.plugins; + + defer { + if (bv2.graph.pool.pool.threadpool_context == @as(?*anyopaque, @ptrCast(bv2.graph.pool))) { + bv2.graph.pool.pool.threadpool_context = null; + } + ast_memory_allocator.pop(); + bv2.deinit(); + } + + dev.client_graph.reset(); + dev.server_graph.reset(); + + const bundle_result = bv2.runFromBakeDevServer(files) catch |err| { + bun.handleErrorReturnTrace(err, @errorReturnTrace()); + + bv2.bundler.log.print(Output.errorWriter()) catch {}; + + Output.warn("BundleV2.runFromBakeDevServer returned error.{s}", .{@errorName(err)}); + + return; + }; + + bv2.bundler.log.print(Output.errorWriter()) catch {}; + + try dev.finalizeBundle(bv2, bundle_result); + + try dev.client_graph.ensureStaleBitCapacity(false); + try dev.server_graph.ensureStaleBitCapacity(false); + + dev.generation +%= 1; + if (Environment.enable_logs) { + debug.log("Bundle Round {d}: {d} server, {d} client, {d} ms", .{ + dev.generation, + dev.server_graph.current_chunk_parts.items.len, + dev.client_graph.current_chunk_parts.items.len, + @divFloor(timer.read(), std.time.ns_per_ms), + }); + } + + const is_first_server_chunk = !dev.server_fetch_function_callback.has(); + + if (dev.server_graph.current_chunk_len > 0) { + const server_bundle = try dev.server_graph.takeBundle( + if (is_first_server_chunk) .initial_response else .hmr_chunk, + "", + ); + defer dev.allocator.free(server_bundle); + + const server_modules = c.BakeLoadServerHmrPatch(@ptrCast(dev.vm.global), bun.String.createLatin1(server_bundle)) catch |err| { + // No user code has been evaluated yet, since everything is to + // be wrapped in a function clousure. This means that the likely + // error is going to be a syntax error, or other mistake in the + // bundler. + dev.vm.printErrorLikeObjectToConsole(dev.vm.global.takeException(err)); + @panic("Error thrown while evaluating server code. This is always a bug in the bundler."); + }; + const errors = dev.server_register_update_callback.get().?.call( + dev.vm.global, + dev.vm.global.toJSValue(), + &.{ + server_modules, + dev.makeArrayForServerComponentsPatch(dev.vm.global, dev.incremental_result.client_components_added.items), + dev.makeArrayForServerComponentsPatch(dev.vm.global, dev.incremental_result.client_components_removed.items), + }, + ) catch |err| { + // One module replacement error should NOT prevent follow-up + // module replacements to fail. It is the HMR runtime's + // responsibility to collect all module load errors, and + // bubble them up. + dev.vm.printErrorLikeObjectToConsole(dev.vm.global.takeException(err)); + @panic("Error thrown in Hot-module-replacement code. This is always a bug in the HMR runtime."); + }; + _ = errors; // TODO: + } + + const css_chunks = bundle_result.cssChunks(); + if ((dev.client_graph.current_chunk_len > 0 or + css_chunks.len > 0) and + dev.numSubscribers(HmrSocket.global_topic) > 0) + { + var sfb2 = std.heap.stackFallback(65536, bun.default_allocator); + var payload = std.ArrayList(u8).initCapacity(sfb2.get(), 65536) catch + unreachable; // enough space + defer payload.deinit(); + payload.appendAssumeCapacity(MessageId.hot_update.char()); + const w = payload.writer(); + + const css_values = dev.css_files.values(); + try w.writeInt(u32, @intCast(css_chunks.len), .little); + const sources = bv2.graph.input_files.items(.source); + for (css_chunks) |chunk| { + const abs_path = sources[chunk.entry_point.source_index].path.text; + + try w.writeAll(&std.fmt.bytesToHex(std.mem.asBytes(&bun.hash(abs_path)), .lower)); + + const css_data = css_values[chunk.entry_point.entry_point_id]; + try w.writeInt(u32, @intCast(css_data.len), .little); + try w.writeAll(css_data); + } + + if (dev.client_graph.current_chunk_len > 0) + try dev.client_graph.takeBundleToList(.hmr_chunk, &payload, ""); + + dev.publish(HmrSocket.global_topic, payload.items, .binary); + } + + if (dev.incremental_result.failures_added.items.len > 0) { + dev.bundles_since_last_error = 0; + return error.BuildFailed; + } +} + +fn indexFailures(dev: *DevServer) !void { + var sfa_state = std.heap.stackFallback(65536, dev.allocator); + const sfa = sfa_state.get(); + + if (dev.incremental_result.failures_added.items.len > 0) { + var total_len: usize = @sizeOf(MessageId) + @sizeOf(u32); + + for (dev.incremental_result.failures_added.items) |fail| { + total_len += fail.data.len; + } + + total_len += dev.incremental_result.failures_removed.items.len * @sizeOf(u32); + + dev.server_graph.affected_by_trace = try DynamicBitSetUnmanaged.initEmpty(sfa, dev.server_graph.bundled_files.count()); + defer dev.server_graph.affected_by_trace.deinit(sfa); + + dev.client_graph.affected_by_trace = try DynamicBitSetUnmanaged.initEmpty(sfa, dev.client_graph.bundled_files.count()); + defer dev.client_graph.affected_by_trace.deinit(sfa); + + var payload = try std.ArrayList(u8).initCapacity(sfa, total_len); + defer payload.deinit(); + payload.appendAssumeCapacity(MessageId.errors.char()); + const w = payload.writer(); + + try w.writeInt(u32, @intCast(dev.incremental_result.failures_removed.items.len), .little); + + for (dev.incremental_result.failures_removed.items) |removed| { + try w.writeInt(u32, @bitCast(removed.getOwner().encode()), .little); + removed.deinit(); + } + + for (dev.incremental_result.failures_added.items) |added| { + try w.writeAll(added.data); + + switch (added.getOwner()) { + .none, .route => unreachable, + .server => |index| try dev.server_graph.traceDependencies(index, .no_stop), + .client => |index| try dev.client_graph.traceDependencies(index, .no_stop), + } + } + + { + @panic("TODO: revive"); + } + // for (dev.incremental_result.routes_affected.items) |route_index| { + // const route = &dev.routes[route_index.get()]; + // route.server_state = .possible_bundling_failures; + // } + + dev.publish(HmrSocket.global_topic, payload.items, .binary); + } else if (dev.incremental_result.failures_removed.items.len > 0) { + if (dev.bundling_failures.count() == 0) { + dev.publish(HmrSocket.global_topic, &.{MessageId.errors_cleared.char()}, .binary); + for (dev.incremental_result.failures_removed.items) |removed| { + removed.deinit(); + } + } else { + var payload = try std.ArrayList(u8).initCapacity(sfa, @sizeOf(MessageId) + @sizeOf(u32) + dev.incremental_result.failures_removed.items.len * @sizeOf(u32)); + defer payload.deinit(); + payload.appendAssumeCapacity(MessageId.errors.char()); + const w = payload.writer(); + + try w.writeInt(u32, @intCast(dev.incremental_result.failures_removed.items.len), .little); + + for (dev.incremental_result.failures_removed.items) |removed| { + try w.writeInt(u32, @bitCast(removed.getOwner().encode()), .little); + removed.deinit(); + } + + dev.publish(HmrSocket.global_topic, payload.items, .binary); + } + } + + dev.incremental_result.failures_removed.clearRetainingCapacity(); +} + +/// Used to generate the entry point. Unlike incremental patches, this always +/// contains all needed files for a route. +fn generateClientBundle(dev: *DevServer, route_bundle: *RouteBundle) bun.OOM![]const u8 { + assert(route_bundle.client_bundle == null); + assert(route_bundle.server_state == .loaded); // page is unfit to load + + dev.graph_safety_lock.lock(); + defer dev.graph_safety_lock.unlock(); + + // Prepare bitsets + var sfa_state = std.heap.stackFallback(65536, dev.allocator); + const sfa = sfa_state.get(); + // const gts = try dev.initGraphTraceState(sfa); + // defer gts.deinit(sfa); + dev.server_graph.affected_by_trace = try DynamicBitSetUnmanaged.initEmpty(sfa, dev.server_graph.bundled_files.count()); + defer dev.server_graph.affected_by_trace.deinit(sfa); + + dev.client_graph.affected_by_trace = try DynamicBitSetUnmanaged.initEmpty(sfa, dev.client_graph.bundled_files.count()); + defer dev.client_graph.affected_by_trace.deinit(sfa); + + // Run tracing + dev.client_graph.reset(); + try dev.traceAllRouteImports(route_bundle, .{ .find_client_modules = true }); + + const client_file = dev.router.typePtr(dev.router.routePtr(route_bundle.route).type).client_file.unwrap() orelse + @panic("No client side entrypoint in client bundle"); + + return dev.client_graph.takeBundle( + .initial_response, + dev.relativePath(dev.client_graph.bundled_files.keys()[fromOpaqueFileId(.client, client_file).get()]), + ); +} + +fn generateCssList(dev: *DevServer, route_bundle: *RouteBundle) bun.OOM!JSC.JSValue { + if (Environment.allow_assert) assert(!route_bundle.cached_css_file_array.has()); + assert(route_bundle.server_state == .loaded); // page is unfit to load + + dev.graph_safety_lock.lock(); + defer dev.graph_safety_lock.unlock(); + + // Prepare bitsets + var sfa_state = std.heap.stackFallback(65536, dev.allocator); + + const sfa = sfa_state.get(); + dev.server_graph.affected_by_trace = try DynamicBitSetUnmanaged.initEmpty(sfa, dev.server_graph.bundled_files.count()); + defer dev.server_graph.affected_by_trace.deinit(sfa); + + dev.client_graph.affected_by_trace = try DynamicBitSetUnmanaged.initEmpty(sfa, dev.client_graph.bundled_files.count()); + defer dev.client_graph.affected_by_trace.deinit(sfa); + + // Run tracing + dev.client_graph.reset(); + try dev.traceAllRouteImports(route_bundle, .{ .find_css = true }); + + const names = dev.client_graph.current_css_files.items; + const arr = JSC.JSArray.createEmpty(dev.vm.global, names.len); + for (names, 0..) |item, i| { + const str = bun.String.createUTF8(item); + defer str.deref(); + arr.putIndex(dev.vm.global, @intCast(i), str.toJS(dev.vm.global)); + } + return arr; +} + +fn traceAllRouteImports(dev: *DevServer, route_bundle: *RouteBundle, goal: TraceImportGoal) !void { + var route = dev.router.routePtr(route_bundle.route); + const router_type = dev.router.typePtr(route.type); + + // Both framework entry points are considered + try dev.server_graph.traceImports(fromOpaqueFileId(.server, router_type.server_file), .{ .find_css = true }); + if (router_type.client_file.unwrap()) |id| { + try dev.client_graph.traceImports(fromOpaqueFileId(.client, id), goal); + } + + // The route file is considered + if (route.file_page.unwrap()) |id| { + try dev.server_graph.traceImports(fromOpaqueFileId(.server, id), goal); + } + + // For all parents, the layout is considered + while (true) { + if (route.file_layout.unwrap()) |id| { + try dev.server_graph.traceImports(fromOpaqueFileId(.server, id), goal); + } + route = dev.router.routePtr(route.parent.unwrap() orelse break); + } +} + +fn makeArrayForServerComponentsPatch(dev: *DevServer, global: *JSC.JSGlobalObject, items: []const IncrementalGraph(.server).FileIndex) JSValue { + if (items.len == 0) return .null; + const arr = JSC.JSArray.createEmpty(global, items.len); + const names = dev.server_graph.bundled_files.keys(); + for (items, 0..) |item, i| { + const str = bun.String.createUTF8(dev.relativePath(names[item.get()])); + defer str.deref(); + arr.putIndex(global, @intCast(i), str.toJS(global)); + } + return arr; +} + +pub const HotUpdateContext = struct { + /// bundle_v2.Graph.input_files.items(.source) + sources: []bun.logger.Source, + /// bundle_v2.Graph.ast.items(.import_records) + import_records: []bun.ImportRecord.List, + /// bundle_v2.Graph.server_component_boundaries.slice() + scbs: bun.JSAst.ServerComponentBoundary.List.Slice, + /// Which files have a server-component boundary. + server_to_client_bitset: DynamicBitSetUnmanaged, + /// Used to reduce calls to the IncrementalGraph hash table. + /// + /// Caller initializes a slice with `sources.len * 2` items + /// all initialized to `std.math.maxInt(u32)` + /// + /// The first half of this slice is for the client graph, + /// second half is for server. Interact with this via + /// `getCachedIndex` + resolved_index_cache: []u32, + /// Used to tell if the server should replace or append import records. + server_seen_bit_set: DynamicBitSetUnmanaged, + + pub fn getCachedIndex( + rc: *const HotUpdateContext, + comptime side: bake.Side, + i: bun.JSAst.Index, + ) *IncrementalGraph(side).FileIndex { + const start = switch (side) { + .client => 0, + .server => rc.sources.len, + }; + + const subslice = rc.resolved_index_cache[start..][0..rc.sources.len]; + + comptime assert(@alignOf(IncrementalGraph(side).FileIndex.Optional) == @alignOf(u32)); + comptime assert(@sizeOf(IncrementalGraph(side).FileIndex.Optional) == @sizeOf(u32)); + return @ptrCast(&subslice[i.get()]); + } +}; + +/// Called at the end of BundleV2 to index bundle contents into the `IncrementalGraph`s +pub fn finalizeBundle( + dev: *DevServer, + bv2: *bun.bundle_v2.BundleV2, + result: bun.bundle_v2.BakeBundleOutput, +) !void { + const js_chunk = result.jsPseudoChunk(); + const input_file_sources = bv2.graph.input_files.items(.source); + const import_records = bv2.graph.ast.items(.import_records); + const targets = bv2.graph.ast.items(.target); + const scbs = bv2.graph.server_component_boundaries.slice(); + + var sfa = std.heap.stackFallback(4096, bv2.graph.allocator); + const stack_alloc = sfa.get(); + var scb_bitset = try bun.bit_set.DynamicBitSetUnmanaged.initEmpty(stack_alloc, input_file_sources.len); + for ( + scbs.list.items(.source_index), + scbs.list.items(.ssr_source_index), + scbs.list.items(.reference_source_index), + ) |source_index, ssr_index, ref_index| { + scb_bitset.set(source_index); + scb_bitset.set(ref_index); + if (ssr_index < scb_bitset.bit_length) + scb_bitset.set(ssr_index); + } + + const resolved_index_cache = try bv2.graph.allocator.alloc(u32, input_file_sources.len * 2); + + var ctx: bun.bake.DevServer.HotUpdateContext = .{ + .import_records = import_records, + .sources = input_file_sources, + .scbs = scbs, + .server_to_client_bitset = scb_bitset, + .resolved_index_cache = resolved_index_cache, + .server_seen_bit_set = undefined, + }; + + // Pass 1, update the graph's nodes, resolving every bundler source + // index into its `IncrementalGraph(...).FileIndex` + for ( + js_chunk.content.javascript.parts_in_chunk_in_order, + js_chunk.compile_results_for_chunk, + ) |part_range, compile_result| { + const index = part_range.source_index; + switch (targets[part_range.source_index.get()].bakeGraph()) { + .server => try dev.server_graph.receiveChunk(&ctx, index, compile_result.code(), .js, false), + .ssr => try dev.server_graph.receiveChunk(&ctx, index, compile_result.code(), .js, true), + .client => try dev.client_graph.receiveChunk(&ctx, index, compile_result.code(), .js, false), + } + } + for (result.cssChunks(), result.css_file_list.metas) |*chunk, metadata| { + const index = bun.JSAst.Index.init(chunk.entry_point.source_index); + + const code = try chunk.intermediate_output.code( + dev.allocator, + &bv2.graph, + &bv2.linker.graph, + "/_bun/TODO-import-prefix-where-is-this-used?", + chunk, + result.chunks, + null, + false, // TODO: sourcemaps true + ); + + // Create an asset entry for this file. + const abs_path = ctx.sources[index.get()].path.text; + // Later code needs to retrieve the CSS content + // The hack is to use `entry_point_id`, which is otherwise unused, to store an index. + chunk.entry_point.entry_point_id = try dev.insertOrUpdateCssAsset(abs_path, code.buffer); + + try dev.client_graph.receiveChunk(&ctx, index, "", .css, false); + + // If imported on server, there needs to be a server-side file entry + // so that edges can be attached. When a file is only imported on + // the server, this file is used to trace the CSS to the route. + if (metadata.imported_on_server) { + try dev.server_graph.insertCssFileOnServer( + &ctx, + index, + abs_path, + ); + } + } + + dev.client_graph.affected_by_trace = try DynamicBitSetUnmanaged.initEmpty(bv2.graph.allocator, dev.client_graph.bundled_files.count()); + defer dev.client_graph.affected_by_trace = .{}; + dev.server_graph.affected_by_trace = try DynamicBitSetUnmanaged.initEmpty(bv2.graph.allocator, dev.server_graph.bundled_files.count()); + defer dev.client_graph.affected_by_trace = .{}; + + ctx.server_seen_bit_set = try bun.bit_set.DynamicBitSetUnmanaged.initEmpty(bv2.graph.allocator, dev.server_graph.bundled_files.count()); + + // Pass 2, update the graph's edges by performing import diffing on each + // changed file, removing dependencies. This pass also flags what routes + // have been modified. + for (js_chunk.content.javascript.parts_in_chunk_in_order) |part_range| { + switch (targets[part_range.source_index.get()].bakeGraph()) { + .server, .ssr => try dev.server_graph.processChunkDependencies(&ctx, part_range.source_index, bv2.graph.allocator), + .client => try dev.client_graph.processChunkDependencies(&ctx, part_range.source_index, bv2.graph.allocator), + } + } + for (result.cssChunks(), result.css_file_list.metas) |*chunk, metadata| { + const index = bun.JSAst.Index.init(chunk.entry_point.source_index); + // TODO: index css deps + _ = index; // autofix + _ = metadata; // autofix + } + + // Index all failed files now that the incremental graph has been updated. + try dev.indexFailures(); +} + +fn insertOrUpdateCssAsset(dev: *DevServer, abs_path: []const u8, code: []const u8) !u31 { + const path_hash = bun.hash(abs_path); + const gop = try dev.css_files.getOrPut(dev.allocator, path_hash); + if (gop.found_existing) { + dev.allocator.free(gop.value_ptr.*); + } + gop.value_ptr.* = code; + return @intCast(gop.index); +} + +pub fn handleParseTaskFailure( + dev: *DevServer, + graph: bake.Graph, + abs_path: []const u8, + log: *Log, +) bun.OOM!void { + // Print each error only once + Output.prettyErrorln("Errors while bundling '{s}':", .{ + dev.relativePath(abs_path), + }); + Output.flush(); + log.print(Output.errorWriter()) catch {}; + + return switch (graph) { + .server => dev.server_graph.insertFailure(abs_path, log, false), + .ssr => dev.server_graph.insertFailure(abs_path, log, true), + .client => dev.client_graph.insertFailure(abs_path, log, false), + }; +} + +const CacheEntry = struct { + kind: FileKind, +}; + +pub fn isFileCached(dev: *DevServer, path: []const u8, side: bake.Graph) ?CacheEntry { + switch (side) { + inline else => |side_comptime| { + const g = switch (side_comptime) { + .client => &dev.client_graph, + .server => &dev.server_graph, + .ssr => &dev.server_graph, + }; + const index = g.bundled_files.getIndex(path) orelse + return null; // non-existent files are considered stale + if (!g.stale_files.isSet(index)) { + return .{ .kind = g.bundled_files.values()[index].fileKind() }; + } + return null; + }, + } +} + +fn appendOpaqueEntryPoint( + dev: *DevServer, + file_names: [][]const u8, + entry_points: *std.ArrayList(BakeEntryPoint), + comptime side: bake.Side, + optional_id: anytype, +) !void { + const file = switch (@TypeOf(optional_id)) { + OpaqueFileId.Optional => optional_id.unwrap() orelse return, + OpaqueFileId => optional_id, + else => @compileError("invalid type here"), + }; + + const file_index = fromOpaqueFileId(side, file); + if (switch (side) { + .server => dev.server_graph.stale_files.isSet(file_index.get()), + .client => dev.client_graph.stale_files.isSet(file_index.get()), + }) { + try entry_points.append(.{ + .path = file_names[file_index.get()], + .graph = switch (side) { + .server => .server, + .client => .client, + }, + }); + } +} + +pub fn routeBundlePtr(dev: *DevServer, idx: RouteBundle.Index) *RouteBundle { + return &dev.route_bundles.items[idx.get()]; +} + +fn onRequest(dev: *DevServer, req: *Request, resp: *Response) void { + var params: FrameworkRouter.MatchedParams = undefined; + if (dev.router.matchSlow(req.url(), ¶ms)) |route_index| { + dev.ensureRouteIsBundled(route_index, .server_handler, req, resp) catch bun.outOfMemory(); + return; + } + + sendBuiltInNotFound(resp); +} + +fn insertRouteBundle(dev: *DevServer, route: Route.Index) !RouteBundle.Index { + const full_pattern = full_pattern: { + var buf = bake.PatternBuffer.empty; + var current: *Route = dev.router.routePtr(route); + while (true) { + buf.prependPart(current.part); + current = dev.router.routePtr(current.parent.unwrap() orelse break); + } + break :full_pattern try dev.allocator.dupe(u8, buf.slice()); + }; + errdefer dev.allocator.free(full_pattern); + + try dev.route_bundles.append(dev.allocator, .{ + .route = route, + .server_state = .unqueued, + .full_pattern = full_pattern, + .client_bundle = null, + .evaluate_failure = null, + .cached_module_list = .{}, + .cached_client_bundle_url = .{}, + .cached_css_file_array = .{}, + }); + const bundle_index = RouteBundle.Index.init(@intCast(dev.route_bundles.items.len - 1)); + dev.router.routePtr(route).bundle = bundle_index.toOptional(); + return bundle_index; +} + +fn sendTextFile(code: []const u8, content_type: []const u8, resp: *Response) void { + if (code.len == 0) { + resp.writeStatus("202 No Content"); + resp.writeHeaderInt("Content-Length", 0); + resp.end("", true); + return; + } + + resp.writeStatus("200 OK"); + resp.writeHeader("Content-Type", content_type); + resp.end(code, true); // TODO: You should never call res.end(huge buffer) +} + +const ErrorPageKind = enum { + /// Modules failed to bundle + bundler, + /// Modules failed to evaluate + evaluation, + /// Request handler threw + runtime, +}; + +fn sendSerializedFailures( + dev: *DevServer, + resp: *Response, + failures: []const SerializedFailure, + kind: ErrorPageKind, +) void { + resp.writeStatus("500 Internal Server Error"); + resp.writeHeader("Content-Type", MimeType.html.value); + + // TODO: what to do about return values here? + _ = resp.write(switch (kind) { + inline else => |k| std.fmt.comptimePrint( + \\ + \\ + \\ + \\ + \\ + \\Bun - {[page_title]s} + \\ + \\ + \\ + \\ + \\"; + + if (Environment.codegen_embed) { + _ = resp.end(pre ++ @embedFile("bake-codegen/bake.error.js") ++ post, false); + } else { + _ = resp.write(pre); + _ = resp.write(bun.runtimeEmbedFile(.codegen_eager, "bake.error.js")); + _ = resp.end(post, false); + } +} + +fn sendBuiltInNotFound(resp: *Response) void { + const message = "404 Not Found"; + resp.writeStatus("404 Not Found"); + resp.end(message, true); +} + +fn sendStubErrorMessage(dev: *DevServer, route: *RouteBundle, resp: *Response, err: JSValue) void { + var sfb = std.heap.stackFallback(65536, dev.allocator); + var a = std.ArrayList(u8).initCapacity(sfb.get(), 65536) catch bun.outOfMemory(); + + a.writer().print("Server route handler for '{s}' threw while loading\n\n", .{ + route.pattern, + }) catch bun.outOfMemory(); + route.dev.vm.printErrorLikeObjectSimple(err, a.writer(), false); + + resp.writeStatus("500 Internal Server Error"); + resp.end(a.items, true); // TODO: "You should never call res.end(huge buffer)" +} + +const FileKind = enum(u2) { + /// Files that failed to bundle or do not exist on disk will appear in the + /// graph as "unknown". + unknown, + js, + css, + asset, +}; + +/// The paradigm of Bake's incremental state is to store a separate list of files +/// than the Graph in bundle_v2. When watch events happen, the bundler is run on +/// the changed files, excluding non-stale files via `isFileStale`. +/// +/// Upon bundle completion, both `client_graph` and `server_graph` have their +/// `receiveChunk` methods called with all new chunks, counting the total length +/// needed. A call to `takeBundle` joins all of the chunks, resulting in the +/// code to send to client or evaluate on the server. +/// +/// Then, `processChunkDependencies` is called on each chunk to update the +/// list of imports. When a change in imports is detected, the dependencies +/// are updated accordingly. +/// +/// Since all routes share the two graphs, bundling a new route that shared +/// a module from a previously bundled route will perform the same exclusion +/// behavior that rebuilds use. This also ensures that two routes on the server +/// do not emit duplicate dependencies. By tracing `imports` on each file in +/// the module graph recursively, the full bundle for any given route can +/// be re-materialized (required when pressing Cmd+R after any client update) +pub fn IncrementalGraph(side: bake.Side) type { + return struct { + // Unless otherwise mentioned, all data structures use DevServer's allocator. + + /// Key contents are owned by `default_allocator` + bundled_files: bun.StringArrayHashMapUnmanaged(File), + /// Track bools for files which are "stale", meaning they should be + /// re-bundled before being used. Resizing this is usually deferred + /// until after a bundle, since resizing the bit-set requires an + /// exact size, instead of the log approach that dynamic arrays use. + stale_files: DynamicBitSetUnmanaged, + + /// Start of the 'dependencies' linked list. These are the other files + /// that import used by this file. Walk this list to discover what + /// files are to be reloaded when something changes. + first_dep: ArrayListUnmanaged(EdgeIndex.Optional), + /// Start of the 'imports' linked list. These are the files that this + /// file imports. + first_import: ArrayListUnmanaged(EdgeIndex.Optional), + /// `File` objects act as nodes in a directional many-to-many graph, + /// where edges represent the imports between modules. An 'dependency' + /// is a file that must to be notified when it `imported` changes. This + /// is implemented using an array of `Edge` objects that act as linked + /// list nodes; each file stores the first imports and dependency. + edges: ArrayListUnmanaged(Edge), + /// HMR Dependencies are added and removed very frequently, but indexes + /// must remain stable. This free list allows re-use of freed indexes, + /// so garbage collection can run less often. + edges_free_list: ArrayListUnmanaged(EdgeIndex), + + // TODO: delete + /// Used during an incremental update to determine what "HMR roots" + /// are affected. Set for all `bundled_files` that have been visited + /// by the dependency tracing logic. + /// + /// Outside of an incremental bundle, this is empty. + /// Backed by the bundler thread's arena allocator. + affected_by_trace: DynamicBitSetUnmanaged, + + /// Byte length of every file queued for concatenation + current_chunk_len: usize = 0, + /// All part contents + current_chunk_parts: ArrayListUnmanaged(switch (side) { + .client => FileIndex, + // These slices do not outlive the bundler, and must + // be joined before its arena is deinitialized. + .server => []const u8, + }), + + current_css_files: switch (side) { + .client => ArrayListUnmanaged([]const u8), + .server => void, + }, + + const empty: @This() = .{ + .bundled_files = .{}, + .stale_files = .{}, + + .first_dep = .{}, + .first_import = .{}, + .edges = .{}, + .edges_free_list = .{}, + + .affected_by_trace = .{}, + + .current_chunk_len = 0, + .current_chunk_parts = .{}, + + .current_css_files = switch (side) { + .client => .{}, + .server => {}, + }, + }; + + pub const File = switch (side) { + // The server's incremental graph does not store previously bundled + // code because there is only one instance of the server. Instead, + // it stores which module graphs it is a part of. This makes sure + // that recompilation knows what bundler options to use. + .server => struct { // TODO: make this packed(u8), i had compiler crashes before + /// Is this file built for the Server graph. + is_rsc: bool, + /// Is this file built for the SSR graph. + is_ssr: bool, + /// If set, the client graph contains a matching file. + /// The server + is_client_component_boundary: bool, + /// If this file is a route root, the route can be looked up in + /// the route list. This also stops dependency propagation. + is_route: bool, + /// If the file has an error, the failure can be looked up + /// in the `.failures` map. + failed: bool, + /// CSS and Asset files get special handling + kind: FileKind, + + fn stopsDependencyTrace(file: @This()) bool { + return file.is_client_component_boundary; + } + + fn fileKind(file: @This()) FileKind { + return file.kind; + } + }, + .client => struct { + /// Allocated by default_allocator. Access with `.code()` + code_ptr: [*]const u8, + /// Separated from the pointer to reduce struct size. + /// Parser does not support files >4gb anyways. + code_len: u32, + flags: Flags, + + const Flags = struct { + /// If the file has an error, the failure can be looked up + /// in the `.failures` map. + failed: bool, + /// For JS files, this is a component root; the server contains a matching file. + /// For CSS files, this is also marked on the stylesheet that is imported from JS. + is_hmr_root: bool, + /// This is a file is an entry point to the framework. + /// Changing this will always cause a full page reload. + is_special_framework_file: bool, + /// CSS and Asset files get special handling + kind: FileKind, + }; + + comptime { + assert(@sizeOf(@This()) == @sizeOf(usize) * 2); + assert(@alignOf(@This()) == @alignOf([*]u8)); + } + + fn init(code_slice: []const u8, flags: Flags) @This() { + return .{ + .code_ptr = code_slice.ptr, + .code_len = @intCast(code_slice.len), + .flags = flags, + }; + } + + fn code(file: @This()) []const u8 { + return file.code_ptr[0..file.code_len]; + } + + inline fn stopsDependencyTrace(_: @This()) bool { + return false; + } + + fn fileKind(file: @This()) FileKind { + return file.flags.kind; + } + }, + }; + + // If this data structure is not clear, see `DirectoryWatchStore.Dep` + // for a simpler example. It is more complicated here because this + // structure is two-way. + pub const Edge = struct { + /// The file with the `import` statement + dependency: FileIndex, + /// The file that `dependency` is importing + imported: FileIndex, + + next_import: EdgeIndex.Optional, + next_dependency: EdgeIndex.Optional, + prev_dependency: EdgeIndex.Optional, + }; + + /// An index into `bundled_files`, `stale_files`, `first_dep`, `first_import`, or `affected_by_trace` + /// Top bits cannot be relied on due to `SerializedFailure.Owner.Packed` + pub const FileIndex = bun.GenericIndex(u30, File); + pub const react_refresh_index = if (side == .client) FileIndex.init(0); + + /// An index into `edges` + const EdgeIndex = bun.GenericIndex(u32, Edge); + + fn getFileIndex(g: *@This(), path: []const u8) ?FileIndex { + return if (g.bundled_files.getIndex(path)) |i| FileIndex.init(@intCast(i)) else null; + } + + /// Tracks a bundled code chunk for cross-bundle chunks, + /// ensuring it has an entry in `bundled_files`. + /// + /// For client, takes ownership of the code slice (must be default allocated) + /// + /// For server, the code is temporarily kept in the + /// `current_chunk_parts` array, where it must live until + /// takeBundle is called. Then it can be freed. + pub fn receiveChunk( + g: *@This(), + ctx: *HotUpdateContext, + index: bun.JSAst.Index, + code: []const u8, + kind: FileKind, + is_ssr_graph: bool, + ) !void { + const dev = g.owner(); + dev.graph_safety_lock.assertLocked(); + + const abs_path = ctx.sources[index.get()].path.text; + + if (Environment.allow_assert) { + switch (kind) { + .css => bun.assert(code.len == 0), + .js => if (bun.strings.isAllWhitespace(code)) { + // Should at least contain the function wrapper + bun.Output.panic("Empty chunk is impossible: {s} {s}", .{ + abs_path, + switch (side) { + .client => "client", + .server => if (is_ssr_graph) "ssr" else "server", + }, + }); + }, + else => Output.panic("unexpected file kind: .{s}", .{@tagName(kind)}), + } + } + + g.current_chunk_len += code.len; + + // Dump to filesystem if enabled + if (bun.FeatureFlags.bake_debugging_features) if (dev.dump_dir) |dump_dir| { + const cwd = dev.root; + var a: bun.PathBuffer = undefined; + var b: [bun.MAX_PATH_BYTES * 2]u8 = undefined; + const rel_path = bun.path.relativeBufZ(&a, cwd, abs_path); + const size = std.mem.replacementSize(u8, rel_path, "../", "_.._/"); + _ = std.mem.replace(u8, rel_path, "../", "_.._/", &b); + const rel_path_escaped = b[0..size]; + dumpBundle(dump_dir, switch (side) { + .client => .client, + .server => if (is_ssr_graph) .ssr else .server, + }, rel_path_escaped, code, true) catch |err| { + bun.handleErrorReturnTrace(err, @errorReturnTrace()); + Output.warn("Could not dump bundle: {}", .{err}); + }; + }; + + const gop = try g.bundled_files.getOrPut(dev.allocator, abs_path); + const file_index = FileIndex.init(@intCast(gop.index)); + + if (!gop.found_existing) { + gop.key_ptr.* = try bun.default_allocator.dupe(u8, abs_path); + try g.first_dep.append(dev.allocator, .none); + try g.first_import.append(dev.allocator, .none); + } + + if (g.stale_files.bit_length > gop.index) { + g.stale_files.unset(gop.index); + } + + ctx.getCachedIndex(side, index).* = FileIndex.init(@intCast(gop.index)); + + switch (side) { + .client => { + if (gop.found_existing) { + if (kind == .js) + bun.default_allocator.free(gop.value_ptr.code()); + + if (gop.value_ptr.flags.failed) { + const kv = dev.bundling_failures.fetchSwapRemoveAdapted( + SerializedFailure.Owner{ .client = file_index }, + SerializedFailure.ArrayHashAdapter{}, + ) orelse + Output.panic("Missing SerializedFailure in IncrementalGraph", .{}); + try dev.incremental_result.failures_removed.append( + dev.allocator, + kv.key, + ); + } + } + const flags: File.Flags = .{ + .failed = false, + .is_hmr_root = ctx.server_to_client_bitset.isSet(index.get()), + .is_special_framework_file = false, + .kind = kind, + }; + if (kind == .css) { + if (!gop.found_existing or gop.value_ptr.code_len == 0) { + gop.value_ptr.* = File.init(try std.fmt.allocPrint( + dev.allocator, + css_prefix ++ "/{}.css", + .{std.fmt.fmtSliceHexLower(std.mem.asBytes(&bun.hash(abs_path)))}, + ), flags); + } else { + // The key is just the file-path + gop.value_ptr.flags = flags; + } + } else { + gop.value_ptr.* = File.init(code, flags); + } + try g.current_chunk_parts.append(dev.allocator, file_index); + }, + .server => { + if (!gop.found_existing) { + const client_component_boundary = ctx.server_to_client_bitset.isSet(index.get()); + + gop.value_ptr.* = .{ + .is_rsc = !is_ssr_graph, + .is_ssr = is_ssr_graph, + .is_route = false, + .is_client_component_boundary = client_component_boundary, + .failed = false, + .kind = kind, + }; + + if (client_component_boundary) { + try dev.incremental_result.client_components_added.append(dev.allocator, file_index); + } + } else { + gop.value_ptr.kind = kind; + + if (is_ssr_graph) { + gop.value_ptr.is_ssr = true; + } else { + gop.value_ptr.is_rsc = true; + } + + if (ctx.server_to_client_bitset.isSet(index.get())) { + gop.value_ptr.is_client_component_boundary = true; + try dev.incremental_result.client_components_added.append(dev.allocator, file_index); + } else if (gop.value_ptr.is_client_component_boundary) { + const client_graph = &g.owner().client_graph; + const client_index = client_graph.getFileIndex(gop.key_ptr.*) orelse + Output.panic("Client graph's SCB was already deleted", .{}); + try dev.incremental_result.delete_client_files_later.append(g.owner().allocator, client_index); + gop.value_ptr.is_client_component_boundary = false; + + try dev.incremental_result.client_components_removed.append(dev.allocator, file_index); + } + + if (gop.value_ptr.failed) { + gop.value_ptr.failed = false; + const kv = dev.bundling_failures.fetchSwapRemoveAdapted( + SerializedFailure.Owner{ .server = file_index }, + SerializedFailure.ArrayHashAdapter{}, + ) orelse + Output.panic("Missing failure in IncrementalGraph", .{}); + try dev.incremental_result.failures_removed.append( + dev.allocator, + kv.key, + ); + } + } + try g.current_chunk_parts.append(dev.allocator, code); + }, + } + } + + const TempLookup = extern struct { + edge_index: EdgeIndex, + seen: bool, + + const HashTable = AutoArrayHashMapUnmanaged(FileIndex, TempLookup); + }; + + /// Second pass of IncrementalGraph indexing + /// - Updates dependency information for each file + /// - Resolves what the HMR roots are + pub fn processChunkDependencies( + g: *@This(), + ctx: *HotUpdateContext, + bundle_graph_index: bun.JSAst.Index, + temp_alloc: Allocator, + ) bun.OOM!void { + const log = bun.Output.scoped(.processChunkDependencies, false); + const file_index: FileIndex = ctx.getCachedIndex(side, bundle_graph_index).*; + log("index id={d} {}:", .{ + file_index.get(), + bun.fmt.quote(g.bundled_files.keys()[file_index.get()]), + }); + + var quick_lookup: TempLookup.HashTable = .{}; + defer quick_lookup.deinit(temp_alloc); + + { + var it: ?EdgeIndex = g.first_import.items[file_index.get()].unwrap(); + while (it) |edge_index| { + const dep = g.edges.items[edge_index.get()]; + it = dep.next_import.unwrap(); + assert(dep.dependency == file_index); + try quick_lookup.putNoClobber(temp_alloc, dep.imported, .{ + .seen = false, + .edge_index = edge_index, + }); + } + } + + var new_imports: EdgeIndex.Optional = .none; + defer g.first_import.items[file_index.get()] = new_imports; + + if (side == .server) { + if (ctx.server_seen_bit_set.isSet(file_index.get())) return; + + const file = &g.bundled_files.values()[file_index.get()]; + + // Process both files in the server-components graph at the same + // time. If they were done separately, the second would detach + // the edges the first added. + if (file.is_rsc and file.is_ssr) { + // The non-ssr file is always first. + // const ssr_index = ctx.scbs.getSSRIndex(bundle_graph_index.get()) orelse { + // @panic("Unexpected missing server-component-boundary entry"); + // }; + // try g.processChunkImportRecords(ctx, &quick_lookup, &new_imports, file_index, bun.JSAst.Index.init(ssr_index)); + } + } + + try g.processChunkImportRecords(ctx, &quick_lookup, &new_imports, file_index, bundle_graph_index); + + // '.seen = false' means an import was removed and should be freed + for (quick_lookup.values()) |val| { + if (!val.seen) { + // Unlink from dependency list. At this point the edge is + // already detached from the import list. + g.disconnectEdgeFromDependencyList(val.edge_index); + + // With no references to this edge, it can be freed + g.freeEdge(val.edge_index); + } + } + + if (side == .server) { + // Follow this file to the route to mark it as stale. + try g.traceDependencies(file_index, .stop_at_boundary); + } else { + // TODO: Follow this file to the HMR root (info to determine is currently not stored) + // without this, changing a client-only file will not mark the route's client bundle as stale + } + } + + fn disconnectEdgeFromDependencyList(g: *@This(), edge_index: EdgeIndex) void { + const edge = &g.edges.items[edge_index.get()]; + igLog("detach edge={d} | id={d} {} -> id={d} {}", .{ + edge_index.get(), + edge.dependency.get(), + bun.fmt.quote(g.bundled_files.keys()[edge.dependency.get()]), + edge.imported.get(), + bun.fmt.quote(g.bundled_files.keys()[edge.imported.get()]), + }); + if (edge.prev_dependency.unwrap()) |prev| { + const prev_dependency = &g.edges.items[prev.get()]; + prev_dependency.next_dependency = edge.next_dependency; + } else { + assert(g.first_dep.items[edge.imported.get()].unwrap() == edge_index); + g.first_dep.items[edge.imported.get()] = .none; + } + if (edge.next_dependency.unwrap()) |next| { + const next_dependency = &g.edges.items[next.get()]; + next_dependency.prev_dependency = edge.prev_dependency; + } + } + + fn processChunkImportRecords( + g: *@This(), + ctx: *HotUpdateContext, + quick_lookup: *TempLookup.HashTable, + new_imports: *EdgeIndex.Optional, + file_index: FileIndex, + index: bun.JSAst.Index, + ) !void { + const log = bun.Output.scoped(.processChunkDependencies, false); + for (ctx.import_records[index.get()].slice()) |import_record| { + if (!import_record.source_index.isRuntime()) try_index_record: { + const imported_file_index = if (import_record.source_index.isInvalid()) + if (std.fs.path.isAbsolute(import_record.path.text)) + FileIndex.init(@intCast( + g.bundled_files.getIndex(import_record.path.text) orelse break :try_index_record, + )) + else + break :try_index_record + else + ctx.getCachedIndex(side, import_record.source_index).*; + + if (quick_lookup.getPtr(imported_file_index)) |lookup| { + // If the edge has already been seen, it will be skipped + // to ensure duplicate edges never exist. + if (lookup.seen) continue; + lookup.seen = true; + + const dep = &g.edges.items[lookup.edge_index.get()]; + dep.next_import = new_imports.*; + new_imports.* = lookup.edge_index.toOptional(); + } else { + // A new edge is needed to represent the dependency and import. + const first_dep = &g.first_dep.items[imported_file_index.get()]; + const edge = try g.newEdge(.{ + .next_import = new_imports.*, + .next_dependency = first_dep.*, + .prev_dependency = .none, + .imported = imported_file_index, + .dependency = file_index, + }); + if (first_dep.*.unwrap()) |dep| { + g.edges.items[dep.get()].prev_dependency = edge.toOptional(); + } + new_imports.* = edge.toOptional(); + first_dep.* = edge.toOptional(); + + log("attach edge={d} | id={d} {} -> id={d} {}", .{ + edge.get(), + file_index.get(), + bun.fmt.quote(g.bundled_files.keys()[file_index.get()]), + imported_file_index.get(), + bun.fmt.quote(g.bundled_files.keys()[imported_file_index.get()]), + }); + } + } + } + } + + const TraceDependencyKind = enum { + stop_at_boundary, + no_stop, + }; + + fn traceDependencies(g: *@This(), file_index: FileIndex, trace_kind: TraceDependencyKind) !void { + g.owner().graph_safety_lock.assertLocked(); + + if (Environment.enable_logs) { + igLog("traceDependencies(.{s}, {}{s})", .{ + @tagName(side), + bun.fmt.quote(g.bundled_files.keys()[file_index.get()]), + if (g.affected_by_trace.isSet(file_index.get())) " [already visited]" else "", + }); + } + + if (g.affected_by_trace.isSet(file_index.get())) + return; + g.affected_by_trace.set(file_index.get()); + + const file = g.bundled_files.values()[file_index.get()]; + + switch (side) { + .server => { + const dev = g.owner(); + if (file.is_route) { + const route_index = dev.route_lookup.get(file_index) orelse + Output.panic("Route not in lookup index: {d} {}", .{ file_index.get(), bun.fmt.quote(g.bundled_files.keys()[file_index.get()]) }); + igLog("\\<- Route", .{}); + + try dev.incremental_result.routes_affected.append(dev.allocator, route_index); + } + if (file.is_client_component_boundary) { + try dev.incremental_result.client_components_affected.append(dev.allocator, file_index); + } + }, + .client => { + if (file.flags.is_hmr_root) { + const dev = g.owner(); + const key = g.bundled_files.keys()[file_index.get()]; + const index = dev.server_graph.getFileIndex(key) orelse + Output.panic("Server Incremental Graph is missing component for {}", .{bun.fmt.quote(key)}); + try dev.server_graph.traceDependencies(index, trace_kind); + } + }, + } + + // Certain files do not propagate updates to dependencies. + // This is how updating a client component doesn't cause + // a server-side reload. + if (trace_kind == .stop_at_boundary) { + if (file.stopsDependencyTrace()) { + igLog("\\<- this file stops propagation", .{}); + return; + } + } + + // Recurse + var it: ?EdgeIndex = g.first_dep.items[file_index.get()].unwrap(); + while (it) |dep_index| { + const edge = g.edges.items[dep_index.get()]; + it = edge.next_dependency.unwrap(); + try g.traceDependencies(edge.dependency, trace_kind); + } + } + + fn traceImports(g: *@This(), file_index: FileIndex, goal: TraceImportGoal) !void { + g.owner().graph_safety_lock.assertLocked(); + + if (Environment.enable_logs) { + igLog("traceImports(.{s}, {}{s})", .{ + @tagName(side), + bun.fmt.quote(g.bundled_files.keys()[file_index.get()]), + if (g.affected_by_trace.isSet(file_index.get())) " [already visited]" else "", + }); + } + + if (g.affected_by_trace.isSet(file_index.get())) + return; + g.affected_by_trace.set(file_index.get()); + + const file = g.bundled_files.values()[file_index.get()]; + + switch (side) { + .server => { + if (file.is_client_component_boundary or file.kind == .css) { + const dev = g.owner(); + const key = g.bundled_files.keys()[file_index.get()]; + const index = dev.client_graph.getFileIndex(key) orelse + Output.panic("Client Incremental Graph is missing component for {}", .{bun.fmt.quote(key)}); + try dev.client_graph.traceImports(index, goal); + } + }, + .client => { + assert(!g.stale_files.isSet(file_index.get())); // should not be left stale + if (file.flags.kind == .css) { + if (goal.find_css) { + try g.current_css_files.append(g.owner().allocator, file.code()); + } + + // Do not count css files as a client module + // and also do not trace its dependencies. + // + // The server version of this code does not need to + // early return, since server css files never have + // imports. + return; + } + + if (goal.find_client_modules) { + try g.current_chunk_parts.append(g.owner().allocator, file_index); + g.current_chunk_len += file.code_len; + } + }, + } + + // Recurse + var it: ?EdgeIndex = g.first_import.items[file_index.get()].unwrap(); + while (it) |dep_index| { + const edge = g.edges.items[dep_index.get()]; + it = edge.next_import.unwrap(); + try g.traceImports(edge.imported, goal); + } + } + + /// Never takes ownership of `abs_path` + /// Marks a chunk but without any content. Used to track dependencies to files that don't exist. + pub fn insertStale(g: *@This(), abs_path: []const u8, is_ssr_graph: bool) bun.OOM!FileIndex { + return g.insertStaleExtra(abs_path, is_ssr_graph, false); + } + + pub fn insertStaleExtra(g: *@This(), abs_path: []const u8, is_ssr_graph: bool, is_route: bool) bun.OOM!FileIndex { + g.owner().graph_safety_lock.assertLocked(); + + debug.log("Insert stale: {s}", .{abs_path}); + const gop = try g.bundled_files.getOrPut(g.owner().allocator, abs_path); + const file_index = FileIndex.init(@intCast(gop.index)); + + if (!gop.found_existing) { + gop.key_ptr.* = try bun.default_allocator.dupe(u8, abs_path); + try g.first_dep.append(g.owner().allocator, .none); + try g.first_import.append(g.owner().allocator, .none); + } else { + if (side == .server) { + if (is_route) gop.value_ptr.*.is_route = is_route; + } + } + + if (g.stale_files.bit_length > gop.index) { + g.stale_files.set(gop.index); + } + + switch (side) { + .client => { + gop.value_ptr.* = File.init("", .{ + .failed = false, + .is_hmr_root = false, + .is_special_framework_file = false, + .kind = .unknown, + }); + }, + .server => { + if (!gop.found_existing) { + gop.value_ptr.* = .{ + .is_rsc = !is_ssr_graph, + .is_ssr = is_ssr_graph, + .is_route = is_route, + .is_client_component_boundary = false, + .failed = false, + .kind = .unknown, + }; + } else if (is_ssr_graph) { + gop.value_ptr.is_ssr = true; + } else { + gop.value_ptr.is_rsc = true; + } + }, + } + + return file_index; + } + + /// Server CSS files are just used to be targets for graph traversal. + /// Its content lives only on the client. + pub fn insertCssFileOnServer(g: *@This(), ctx: *HotUpdateContext, index: bun.JSAst.Index, abs_path: []const u8) bun.OOM!void { + g.owner().graph_safety_lock.assertLocked(); + + debug.log("Insert stale: {s}", .{abs_path}); + const gop = try g.bundled_files.getOrPut(g.owner().allocator, abs_path); + const file_index = FileIndex.init(@intCast(gop.index)); + + if (!gop.found_existing) { + gop.key_ptr.* = try bun.default_allocator.dupe(u8, abs_path); + try g.first_dep.append(g.owner().allocator, .none); + try g.first_import.append(g.owner().allocator, .none); + } + + switch (side) { + .client => @compileError("not implemented: use receiveChunk"), + .server => { + gop.value_ptr.* = .{ + .is_rsc = false, + .is_ssr = false, + .is_route = false, + .is_client_component_boundary = false, + .failed = false, + .kind = .css, + }; + }, + } + + ctx.getCachedIndex(.server, index).* = file_index; + } + + pub fn insertFailure( + g: *@This(), + abs_path: []const u8, + log: *const Log, + is_ssr_graph: bool, + ) bun.OOM!void { + g.owner().graph_safety_lock.assertLocked(); + + debug.log("Insert stale: {s}", .{abs_path}); + const gop = try g.bundled_files.getOrPut(g.owner().allocator, abs_path); + const file_index = FileIndex.init(@intCast(gop.index)); + + if (!gop.found_existing) { + gop.key_ptr.* = try bun.default_allocator.dupe(u8, abs_path); + try g.first_dep.append(g.owner().allocator, .none); + try g.first_import.append(g.owner().allocator, .none); + } + + if (g.stale_files.bit_length > gop.index) { + g.stale_files.set(gop.index); + } + + switch (side) { + .client => { + gop.value_ptr.* = File.init("", .{ + .failed = true, + .is_hmr_root = false, + .is_special_framework_file = false, + .kind = .unknown, + }); + }, + .server => { + if (!gop.found_existing) { + gop.value_ptr.* = .{ + .is_rsc = !is_ssr_graph, + .is_ssr = is_ssr_graph, + .is_route = false, + .is_client_component_boundary = false, + .failed = true, + .kind = .unknown, + }; + } else { + if (is_ssr_graph) { + gop.value_ptr.is_ssr = true; + } else { + gop.value_ptr.is_rsc = true; + } + gop.value_ptr.failed = true; + } + }, + } + + const dev = g.owner(); + + const fail_owner: SerializedFailure.Owner = switch (side) { + .server => .{ .server = file_index }, + .client => .{ .client = file_index }, + }; + const failure = try SerializedFailure.initFromLog( + fail_owner, + dev.relativePath(abs_path), + log.msgs.items, + ); + const fail_gop = try dev.bundling_failures.getOrPut(dev.allocator, failure); + try dev.incremental_result.failures_added.append(dev.allocator, failure); + if (fail_gop.found_existing) { + try dev.incremental_result.failures_removed.append(dev.allocator, fail_gop.key_ptr.*); + fail_gop.key_ptr.* = failure; + } + } + + pub fn ensureStaleBitCapacity(g: *@This(), are_new_files_stale: bool) !void { + try g.stale_files.resize( + g.owner().allocator, + std.mem.alignForward( + usize, + @max(g.bundled_files.count(), g.stale_files.bit_length), + // allocate 8 in 8 usize chunks + std.mem.byte_size_in_bits * @sizeOf(usize) * 8, + ), + are_new_files_stale, + ); + } + + pub fn invalidate(g: *@This(), paths: []const []const u8, out_paths: *std.ArrayList(BakeEntryPoint)) !void { + g.owner().graph_safety_lock.assertLocked(); + const values = g.bundled_files.values(); + for (paths) |path| { + const index = g.bundled_files.getIndex(path) orelse { + // cannot enqueue because we don't know what targets to + // bundle for. instead, a failing bundle must retrieve the + // list of files and add them as stale. + continue; + }; + g.stale_files.set(index); + const data = &values[index]; + switch (side) { + .client => { + // When re-bundling SCBs, only bundle the server. Otherwise + // the bundler gets confused and bundles both sides without + // knowledge of the boundary between them. + if (data.flags.kind == .css) + try out_paths.append(BakeEntryPoint.initCss(path)) + else if (!data.flags.is_hmr_root) + try out_paths.append(BakeEntryPoint.init(path, .client)); + }, + .server => { + if (data.is_rsc) + try out_paths.append(BakeEntryPoint.init(path, .server)); + if (data.is_ssr and !data.is_client_component_boundary) + try out_paths.append(BakeEntryPoint.init(path, .ssr)); + }, + } + } + } + + fn reset(g: *@This()) void { + g.current_chunk_len = 0; + g.current_chunk_parts.clearRetainingCapacity(); + if (side == .client) g.current_css_files.clearRetainingCapacity(); + } + + pub fn takeBundle( + g: *@This(), + kind: ChunkKind, + initial_response_entry_point: []const u8, + ) ![]const u8 { + var chunk = std.ArrayList(u8).init(g.owner().allocator); + try g.takeBundleToList(kind, &chunk, initial_response_entry_point); + bun.assert(chunk.items.len == chunk.capacity); + return chunk.items; + } + + pub fn takeBundleToList( + g: *@This(), + kind: ChunkKind, + list: *std.ArrayList(u8), + initial_response_entry_point: []const u8, + ) !void { + g.owner().graph_safety_lock.assertLocked(); + // initial bundle needs at least the entry point + // hot updates shouldn't be emitted if there are no chunks + assert(g.current_chunk_len > 0); + + const runtime = switch (kind) { + .initial_response => bun.bake.getHmrRuntime(side), + .hmr_chunk => "({\n", + }; + + // A small amount of metadata is present at the end of the chunk + // to inform the HMR runtime some crucial entry-point info. The + // exact upper bound of this can be calculated, but is not to + // avoid worrying about windows paths. + var end_sfa = std.heap.stackFallback(65536, g.owner().allocator); + var end_list = std.ArrayList(u8).initCapacity(end_sfa.get(), 65536) catch unreachable; + defer end_list.deinit(); + const end = end: { + const w = end_list.writer(); + switch (kind) { + .initial_response => { + const fw = g.owner().framework; + try w.writeAll("}, {\n main: "); + try bun.js_printer.writeJSONString( + g.owner().relativePath(initial_response_entry_point), + @TypeOf(w), + w, + .utf8, + ); + switch (side) { + .client => { + try w.writeAll(",\n version: \""); + try w.writeAll(&g.owner().configuration_hash_key); + try w.writeAll("\""); + if (fw.react_fast_refresh) |rfr| { + try w.writeAll(",\n refresh: "); + try bun.js_printer.writeJSONString( + g.owner().relativePath(rfr.import_source), + @TypeOf(w), + w, + .utf8, + ); + } + }, + .server => { + if (fw.server_components) |sc| { + if (sc.separate_ssr_graph) { + try w.writeAll(",\n separateSSRGraph: true"); + } + } + }, + } + try w.writeAll("\n})"); + }, + .hmr_chunk => { + try w.writeAll("\n})"); + }, + } + break :end end_list.items; + }; + + const files = g.bundled_files.values(); + + const start = list.items.len; + if (start == 0) + try list.ensureTotalCapacityPrecise(g.current_chunk_len + runtime.len + end.len) + else + try list.ensureUnusedCapacity(g.current_chunk_len + runtime.len + end.len); + + list.appendSliceAssumeCapacity(runtime); + for (g.current_chunk_parts.items) |entry| { + list.appendSliceAssumeCapacity(switch (side) { + // entry is an index into files + .client => files[entry.get()].code(), + // entry is the '[]const u8' itself + .server => entry, + }); + } + list.appendSliceAssumeCapacity(end); + + if (bun.FeatureFlags.bake_debugging_features) if (g.owner().dump_dir) |dump_dir| { + const rel_path_escaped = "latest_chunk.js"; + dumpBundle(dump_dir, switch (side) { + .client => .client, + .server => .server, + }, rel_path_escaped, list.items[start..], false) catch |err| { + bun.handleErrorReturnTrace(err, @errorReturnTrace()); + Output.warn("Could not dump bundle: {}", .{err}); + }; + }; + } + + fn disconnectAndDeleteFile(g: *@This(), file_index: FileIndex) void { + const last = FileIndex.init(@intCast(g.bundled_files.count() - 1)); + + bun.assert(g.bundled_files.count() > 1); // never remove all files + bun.assert(g.first_dep.items[file_index.get()] == .none); // must have no dependencies + + // Disconnect all imports + { + var it: ?EdgeIndex = g.first_import.items[file_index.get()].unwrap(); + while (it) |edge_index| { + const dep = g.edges.items[edge_index.get()]; + it = dep.next_import.unwrap(); + assert(dep.dependency == file_index); + + g.disconnectEdgeFromDependencyList(edge_index); + g.freeEdge(edge_index); + } + } + + // TODO: it is infeasible to do this since FrameworkRouter contains file indices + // to the server graph + { + return; + } + + g.bundled_files.swapRemoveAt(file_index.get()); + + // Move out-of-line data from `last` to replace `file_index` + _ = g.first_dep.swapRemove(file_index.get()); + _ = g.first_import.swapRemove(file_index.get()); + + if (file_index != last) { + g.stale_files.setValue(file_index.get(), g.stale_files.isSet(last.get())); + + // This set is not always initialized, so ignore if it's empty + if (g.affected_by_trace.bit_length > 0) { + g.affected_by_trace.setValue(file_index.get(), g.affected_by_trace.isSet(last.get())); + } + + // Adjust all referenced edges to point to the new file + { + var it: ?EdgeIndex = g.first_import.items[file_index.get()].unwrap(); + while (it) |edge_index| { + const dep = &g.edges.items[edge_index.get()]; + it = dep.next_import.unwrap(); + assert(dep.dependency == last); + dep.dependency = file_index; + } + } + { + var it: ?EdgeIndex = g.first_dep.items[file_index.get()].unwrap(); + while (it) |edge_index| { + const dep = &g.edges.items[edge_index.get()]; + it = dep.next_dependency.unwrap(); + assert(dep.imported == last); + dep.imported = file_index; + } + } + } + } + + fn newEdge(g: *@This(), edge: Edge) !EdgeIndex { + if (g.edges_free_list.popOrNull()) |index| { + g.edges.items[index.get()] = edge; + return index; + } + + const index = EdgeIndex.init(@intCast(g.edges.items.len)); + try g.edges.append(g.owner().allocator, edge); + return index; + } + + /// Does nothing besides release the `Edge` for reallocation by `newEdge` + /// Caller must detach the dependency from the linked list it is in. + fn freeEdge(g: *@This(), edge_index: EdgeIndex) void { + if (Environment.isDebug) { + g.edges.items[edge_index.get()] = undefined; + } + + if (edge_index.get() == (g.edges.items.len - 1)) { + g.edges.items.len -= 1; + } else { + g.edges_free_list.append(g.owner().allocator, edge_index) catch { + // Leak an edge object; Ok since it may get cleaned up by + // the next incremental graph garbage-collection cycle. + }; + } + } + + pub fn owner(g: *@This()) *DevServer { + return @alignCast(@fieldParentPtr(@tagName(side) ++ "_graph", g)); + } + }; +} + +const IncrementalResult = struct { + /// When tracing a file's dependencies via `traceDependencies`, this is + /// populated with the hit `Route.Index`s. To know what `RouteBundle`s + /// are affected, the route graph must be traced downwards. + /// Tracing is used for multiple purposes. + routes_affected: ArrayListUnmanaged(RouteIndexAndRecurseFlag), + + // Following three fields are populated during `receiveChunk` + + /// Components to add to the client manifest + client_components_added: ArrayListUnmanaged(IncrementalGraph(.server).FileIndex), + /// Components to add to the client manifest + client_components_removed: ArrayListUnmanaged(IncrementalGraph(.server).FileIndex), + /// This list acts as a free list. The contents of these slices must remain + /// valid; they have to be so the affected routes can be cleared of the + /// failures and potentially be marked valid. At the end of an + /// incremental update, the slices are freed. + failures_removed: ArrayListUnmanaged(SerializedFailure), + + /// Client boundaries that have been added or modified. At the end of a hot + /// update, these are traced to their route to mark the bundles as stale (to + /// be generated on Cmd+R) + /// + /// Populated during `traceDependencies` + client_components_affected: ArrayListUnmanaged(IncrementalGraph(.server).FileIndex), + + /// The list of failures which will have to be traced to their route. Such + /// tracing is deferred until the second pass of finalizeBundler as the + /// dependency graph may not fully exist at the time the failure is indexed. + /// + /// Populated from within the bundler via `handleParseTaskFailure` + failures_added: ArrayListUnmanaged(SerializedFailure), + + /// Removing files clobbers indices, so removing anything is deferred. + // TODO: remove + delete_client_files_later: ArrayListUnmanaged(IncrementalGraph(.client).FileIndex), + + const empty: IncrementalResult = .{ + .routes_affected = .{}, + .failures_removed = .{}, + .failures_added = .{}, + .client_components_added = .{}, + .client_components_removed = .{}, + .client_components_affected = .{}, + .delete_client_files_later = .{}, + }; + + fn reset(result: *IncrementalResult) void { + result.routes_affected.clearRetainingCapacity(); + assert(result.failures_removed.items.len == 0); + result.failures_added.clearRetainingCapacity(); + result.client_components_added.clearRetainingCapacity(); + result.client_components_removed.clearRetainingCapacity(); + result.client_components_affected.clearRetainingCapacity(); + } +}; + +const GraphTraceState = struct { + client_bits: DynamicBitSetUnmanaged, + server_bits: DynamicBitSetUnmanaged, + + fn deinit(gts: *GraphTraceState, alloc: Allocator) void { + gts.client_bits.deinit(alloc); + gts.server_bits.deinit(alloc); + } + + fn clear(gts: *GraphTraceState) void { + gts.server_bits.setAll(false); + gts.client_bits.setAll(false); + } +}; + +const TraceImportGoal = struct { + // gts: *GraphTraceState, + find_css: bool = false, + find_client_modules: bool = false, +}; + +fn initGraphTraceState(dev: *const DevServer, sfa: Allocator) !GraphTraceState { + const server_bits = try DynamicBitSetUnmanaged.initEmpty(sfa, dev.server_graph.bundled_files.count()); + errdefer server_bits.deinit(sfa); + const client_bits = try DynamicBitSetUnmanaged.initEmpty(sfa, dev.client_graph.bundled_files.count()); + return .{ .server_bits = server_bits, .client_bits = client_bits }; +} + +/// When a file fails to import a relative path, directory watchers are added so +/// that when a matching file is created, the dependencies can be rebuilt. This +/// handles HMR cases where a user writes an import before creating the file, +/// or moves files around. +/// +/// This structure manages those watchers, including releasing them once +/// import resolution failures are solved. +const DirectoryWatchStore = struct { + /// This guards all store state + lock: Mutex, + + /// List of active watchers. Can be re-ordered on removal + watches: bun.StringArrayHashMapUnmanaged(Entry), + dependencies: ArrayListUnmanaged(Dep), + /// Dependencies cannot be re-ordered. This list tracks what indexes are free. + dependencies_free_list: ArrayListUnmanaged(Dep.Index), + + const empty: DirectoryWatchStore = .{ + .lock = .{}, + .watches = .{}, + .dependencies = .{}, + .dependencies_free_list = .{}, + }; + + pub fn owner(store: *DirectoryWatchStore) *DevServer { + return @alignCast(@fieldParentPtr("directory_watchers", store)); + } + + pub fn trackResolutionFailure( + store: *DirectoryWatchStore, + import_source: []const u8, + specifier: []const u8, + renderer: bake.Graph, + ) bun.OOM!void { + store.lock.lock(); + defer store.lock.unlock(); + + // When it does not resolve to a file path, there is + // nothing to track. Bake does not watch node_modules. + if (!(bun.strings.startsWith(specifier, "./") or + bun.strings.startsWith(specifier, "../"))) return; + if (!std.fs.path.isAbsolute(import_source)) return; + + const joined = bun.path.joinAbs(bun.path.dirname(import_source, .auto), .auto, specifier); + const dir = bun.path.dirname(joined, .auto); + + // `import_source` is not a stable string. let's share memory with the file graph. + // this requires that + const dev = store.owner(); + const owned_file_path = switch (renderer) { + .client => path: { + const index = try dev.client_graph.insertStale(import_source, false); + break :path dev.client_graph.bundled_files.keys()[index.get()]; + }, + .server, .ssr => path: { + const index = try dev.client_graph.insertStale(import_source, renderer == .ssr); + break :path dev.client_graph.bundled_files.keys()[index.get()]; + }, + }; + + store.insert(dir, owned_file_path, specifier) catch |err| switch (err) { + error.Ignore => {}, // ignoring watch errors. + error.OutOfMemory => |e| return e, + }; + } + + /// `dir_name_to_watch` is cloned + /// `file_path` must have lifetime that outlives the watch + /// `specifier` is cloned + fn insert( + store: *DirectoryWatchStore, + dir_name_to_watch: []const u8, + file_path: []const u8, + specifier: []const u8, + ) !void { + // TODO: watch the parent dir too. + const dev = store.owner(); + + debug.log("DirectoryWatchStore.insert({}, {}, {})", .{ + bun.fmt.quote(dir_name_to_watch), + bun.fmt.quote(file_path), + bun.fmt.quote(specifier), + }); + + if (store.dependencies_free_list.items.len == 0) + try store.dependencies.ensureUnusedCapacity(dev.allocator, 1); + + const gop = try store.watches.getOrPut(dev.allocator, dir_name_to_watch); + if (gop.found_existing) { + const specifier_cloned = try dev.allocator.dupe(u8, specifier); + errdefer dev.allocator.free(specifier_cloned); + + // TODO: check for dependency + + const dep = store.appendDepAssumeCapacity(.{ + .next = gop.value_ptr.first_dep.toOptional(), + .source_file_path = file_path, + .specifier = specifier_cloned, + }); + gop.value_ptr.first_dep = dep; + + return; + } + errdefer store.watches.swapRemoveAt(gop.index); + + // Try to use an existing open directory handle + const cache_fd = if (dev.server_bundler.resolver.readDirInfo(dir_name_to_watch) catch null) |cache| fd: { + const fd = cache.getFileDescriptor(); + break :fd if (fd == .zero) null else fd; + } else null; + + const fd, const owned_fd = if (cache_fd) |fd| + .{ fd, false } + else + .{ + switch (bun.sys.open( + &(std.posix.toPosixPath(dir_name_to_watch) catch |err| switch (err) { + error.NameTooLong => return, // wouldn't be able to open, ignore + }), + bun.O.DIRECTORY, + 0, + )) { + .result => |fd| fd, + .err => |err| switch (err.getErrno()) { + // If this directory doesn't exist, a watcher should be + // placed on the parent directory. Then, if this + // directory is later created, the watcher can be + // properly initialized. This would happen if you write + // an import path like `./dir/whatever/hello.tsx` and + // `dir` does not exist, Bun must place a watcher on + // `.`, see the creation of `dir`, and repeat until it + // can open a watcher on `whatever` to see the creation + // of `hello.tsx` + .NOENT => { + // TODO: implement that. for now it ignores + return; + }, + .NOTDIR => return error.Ignore, // ignore + else => { + bun.todoPanic(@src(), "log watcher error", .{}); + }, + }, + }, + true, + }; + errdefer _ = if (owned_fd) bun.sys.close(fd); + + debug.log("-> fd: {} ({s})", .{ + fd, + if (owned_fd) "from dir cache" else "owned fd", + }); + + const dir_name = try dev.allocator.dupe(u8, dir_name_to_watch); + errdefer dev.allocator.free(dir_name); + + gop.key_ptr.* = dir_name; + + const specifier_cloned = try dev.allocator.dupe(u8, specifier); + errdefer dev.allocator.free(specifier_cloned); + + const watch_index = switch (dev.bun_watcher.addDirectory(fd, dir_name, bun.JSC.GenericWatcher.getHash(dir_name), false)) { + .err => return error.Ignore, + .result => |id| id, + }; + const dep = store.appendDepAssumeCapacity(.{ + .next = .none, + .source_file_path = file_path, + .specifier = specifier_cloned, + }); + store.watches.putAssumeCapacity(dir_name, .{ + .dir = fd, + .dir_fd_owned = owned_fd, + .first_dep = dep, + .watch_index = watch_index, + }); + } + + /// Caller must detach the dependency from the linked list it is in. + fn freeDependencyIndex(store: *DirectoryWatchStore, alloc: Allocator, index: Dep.Index) !void { + alloc.free(store.dependencies.items[index.get()].specifier); + + if (Environment.isDebug) { + store.dependencies.items[index.get()] = undefined; + } + + if (index.get() == (store.dependencies.items.len - 1)) { + store.dependencies.items.len -= 1; + } else { + try store.dependencies_free_list.append(alloc, index); + } + } + + /// Expects dependency list to be already freed + fn freeEntry(store: *DirectoryWatchStore, entry_index: usize) void { + const entry = store.watches.values()[entry_index]; + + debug.log("DirectoryWatchStore.freeEntry({d}, {})", .{ + entry_index, + entry.dir, + }); + + store.owner().bun_watcher.removeAtIndex(entry.watch_index, 0, &.{}, .file); + + defer _ = if (entry.dir_fd_owned) bun.sys.close(entry.dir); + store.watches.swapRemoveAt(entry_index); + + if (store.watches.entries.len == 0) { + assert(store.dependencies.items.len == 0); + store.dependencies_free_list.clearRetainingCapacity(); + } + } + + fn appendDepAssumeCapacity(store: *DirectoryWatchStore, dep: Dep) Dep.Index { + if (store.dependencies_free_list.popOrNull()) |index| { + store.dependencies.items[index.get()] = dep; + return index; + } + + const index = Dep.Index.init(@intCast(store.dependencies.items.len)); + store.dependencies.appendAssumeCapacity(dep); + return index; + } + + const Entry = struct { + /// The directory handle the watch is placed on + dir: bun.FileDescriptor, + dir_fd_owned: bool, + /// Files which request this import index + first_dep: Dep.Index, + /// To pass to Watcher.remove + watch_index: u16, + }; + + const Dep = struct { + next: Index.Optional, + /// The file used + source_file_path: []const u8, + /// The specifier that failed. Before running re-build, it is resolved for, as + /// creating an unrelated file should not re-emit another error. Default-allocator + specifier: []const u8, + + const Index = bun.GenericIndex(u32, Dep); + }; +}; + +const ChunkKind = enum { + initial_response, + hmr_chunk, +}; + +/// Errors sent to the HMR client in the browser are serialized. The same format +/// is used for thrown JavaScript exceptions as well as bundler errors. +/// Serialized failures contain a handle on what file or route they came from, +/// which allows the bundler to dismiss or update stale failures via index as +/// opposed to re-sending a new payload. This also means only changed files are +/// rebuilt, instead of all of the failed files. +/// +/// The HMR client in the browser is expected to sort the final list of errors +/// for deterministic output; there is code in DevServer that uses `swapRemove`. +pub const SerializedFailure = struct { + /// Serialized data is always owned by default_allocator + /// The first 32 bits of this slice contain the owner + data: []u8, + + pub fn deinit(f: SerializedFailure) void { + bun.default_allocator.free(f.data); + } + + /// The metaphorical owner of an incremental file error. The packed variant + /// is given to the HMR runtime as an opaque handle. + pub const Owner = union(enum) { + none, + route: RouteBundle.Index, + client: IncrementalGraph(.client).FileIndex, + server: IncrementalGraph(.server).FileIndex, + + pub fn encode(owner: Owner) Packed { + return switch (owner) { + .none => .{ .kind = .none, .data = 0 }, + .client => |data| .{ .kind = .client, .data = data.get() }, + .server => |data| .{ .kind = .server, .data = data.get() }, + .route => |data| .{ .kind = .route, .data = data.get() }, + }; + } + + pub const Packed = packed struct(u32) { + kind: enum(u2) { none, route, client, server }, + data: u30, + + pub fn decode(owner: Packed) Owner { + return switch (owner.kind) { + .none => .none, + .client => .{ .client = IncrementalGraph(.client).FileIndex.init(owner.data) }, + .server => .{ .server = IncrementalGraph(.server).FileIndex.init(owner.data) }, + .route => .{ .route = RouteBundle.Index.init(owner.data) }, + }; + } + }; + }; + + fn getOwner(failure: SerializedFailure) Owner { + return std.mem.bytesAsValue(Owner.Packed, failure.data[0..4]).decode(); + } + + /// This assumes the hash map contains only one SerializedFailure per owner. + /// This is okay since SerializedFailure can contain more than one error. + const ArrayHashContextViaOwner = struct { + pub fn hash(_: ArrayHashContextViaOwner, k: SerializedFailure) u32 { + return std.hash.uint32(@bitCast(k.getOwner().encode())); + } + + pub fn eql(_: ArrayHashContextViaOwner, a: SerializedFailure, b: SerializedFailure, _: usize) bool { + return @as(u32, @bitCast(a.getOwner().encode())) == @as(u32, @bitCast(b.getOwner().encode())); + } + }; + + const ArrayHashAdapter = struct { + pub fn hash(_: ArrayHashAdapter, own: Owner) u32 { + return std.hash.uint32(@bitCast(own.encode())); + } + + pub fn eql(_: ArrayHashAdapter, a: Owner, b: SerializedFailure, _: usize) bool { + return @as(u32, @bitCast(a.encode())) == @as(u32, @bitCast(b.getOwner().encode())); + } + }; + + const ErrorKind = enum(u8) { + // A log message. The `logger.Kind` is encoded here. + bundler_log_err = 0, + bundler_log_warn = 1, + bundler_log_note = 2, + bundler_log_debug = 3, + bundler_log_verbose = 4, + + /// new Error(message) + js_error, + /// new TypeError(message) + js_error_type, + /// new RangeError(message) + js_error_range, + /// Other forms of `Error` objects, including when an error has a + /// `code`, and other fields. + js_error_extra, + /// Non-error with a stack trace + js_primitive_exception, + /// Non-error JS values + js_primitive, + /// new AggregateError(errors, message) + js_aggregate, + }; + + pub fn initFromJs(owner: Owner, value: JSValue) !SerializedFailure { + { + _ = value; + @panic("TODO"); + } + // Avoid small re-allocations without requesting so much from the heap + var sfb = std.heap.stackFallback(65536, bun.default_allocator); + var payload = std.ArrayList(u8).initCapacity(sfb.get(), 65536) catch + unreachable; // enough space + const w = payload.writer(); + + try w.writeInt(u32, @bitCast(owner.encode()), .little); + // try writeJsValue(value); + + // Avoid-recloning if it is was moved to the hap + const data = if (payload.items.ptr == &sfb.buffer) + try bun.default_allocator.dupe(u8, payload.items) + else + payload.items; + + return .{ .data = data }; + } + + pub fn initFromLog( + owner: Owner, + owner_display_name: []const u8, + messages: []const bun.logger.Msg, + ) !SerializedFailure { + assert(messages.len > 0); + + // Avoid small re-allocations without requesting so much from the heap + var sfb = std.heap.stackFallback(65536, bun.default_allocator); + var payload = std.ArrayList(u8).initCapacity(sfb.get(), 65536) catch + unreachable; // enough space + const w = payload.writer(); + + try w.writeInt(u32, @bitCast(owner.encode()), .little); + + try writeString32(owner_display_name, w); + + try w.writeInt(u32, @intCast(messages.len), .little); + + for (messages) |*msg| { + try writeLogMsg(msg, w); + } + + // Avoid-recloning if it is was moved to the hap + const data = if (payload.items.ptr == &sfb.buffer) + try bun.default_allocator.dupe(u8, payload.items) + else + payload.items; + + return .{ .data = data }; + } + + // All "write" functions get a corresponding "read" function in ./client/error.ts + + const Writer = std.ArrayList(u8).Writer; + + fn writeLogMsg(msg: *const bun.logger.Msg, w: Writer) !void { + try w.writeByte(switch (msg.kind) { + inline else => |k| @intFromEnum(@field(ErrorKind, "bundler_log_" ++ @tagName(k))), + }); + try writeLogData(msg.data, w); + const notes = msg.notes; + try w.writeInt(u32, @intCast(notes.len), .little); + for (notes) |note| { + try writeLogData(note, w); + } + } + + fn writeLogData(data: bun.logger.Data, w: Writer) !void { + try writeString32(data.text, w); + if (data.location) |loc| { + assert(loc.line >= 0); // one based and not negative + assert(loc.column >= 0); // zero based and not negative + + try w.writeInt(u32, @intCast(loc.line), .little); + try w.writeInt(u32, @intCast(loc.column), .little); + try w.writeInt(u32, @intCast(loc.length), .little); + + // TODO: syntax highlighted line text + give more context lines + try writeString32(loc.line_text orelse "", w); + + // The file is not specified here. Since the bundler runs every file + // in isolation, it would be impossible to reference any other file + // in this Log. Thus, it is not serialized. + } else { + try w.writeInt(u32, 0, .little); + } + } + + fn writeString32(data: []const u8, w: Writer) !void { + try w.writeInt(u32, @intCast(data.len), .little); + try w.writeAll(data); + } + + // fn writeJsValue(value: JSValue, global: *JSC.JSGlobalObject, w: *Writer) !void { + // if (value.isAggregateError(global)) { + // // + // } + // if (value.jsType() == .DOMWrapper) { + // if (value.as(JSC.BuildMessage)) |build_error| { + // _ = build_error; // autofix + // // + // } else if (value.as(JSC.ResolveMessage)) |resolve_error| { + // _ = resolve_error; // autofix + // @panic("TODO"); + // } + // } + // _ = w; // autofix + + // @panic("TODO"); + // } +}; + +// For debugging, it is helpful to be able to see bundles. +fn dumpBundle(dump_dir: std.fs.Dir, side: bake.Graph, rel_path: []const u8, chunk: []const u8, wrap: bool) !void { + const name = bun.path.joinAbsString("/", &.{ + @tagName(side), + rel_path, + }, .auto)[1..]; + var inner_dir = try dump_dir.makeOpenPath(bun.Dirname.dirname(u8, name).?, .{}); + defer inner_dir.close(); + + const file = try inner_dir.createFile(bun.path.basename(name), .{}); + defer file.close(); + + var bufw = std.io.bufferedWriter(file.writer()); + + try bufw.writer().print("// {s} bundled for {s}\n", .{ + bun.fmt.quote(rel_path), + @tagName(side), + }); + try bufw.writer().print("// Bundled at {d}, Bun " ++ bun.Global.package_json_version_with_canary ++ "\n", .{ + std.time.nanoTimestamp(), + }); + + // Wrap in an object to make it valid syntax. Regardless, these files + // are never executable on their own as they contain only a single module. + + if (wrap) + try bufw.writer().writeAll("({\n"); + + try bufw.writer().writeAll(chunk); + + if (wrap) + try bufw.writer().writeAll("});\n"); + + try bufw.flush(); +} + +fn emitVisualizerMessageIfNeeded(dev: *DevServer) !void { + if (!bun.FeatureFlags.bake_debugging_features) return; + if (dev.emit_visualizer_events == 0) return; + + var sfb = std.heap.stackFallback(65536, bun.default_allocator); + var payload = try std.ArrayList(u8).initCapacity(sfb.get(), 65536); + defer payload.deinit(); + + try dev.writeVisualizerMessage(&payload); + + dev.publish(HmrSocket.visualizer_topic, payload.items, .binary); +} + +fn writeVisualizerMessage(dev: *DevServer, payload: *std.ArrayList(u8)) !void { + payload.appendAssumeCapacity(MessageId.visualizer.char()); + const w = payload.writer(); + + inline for ( + [2]bake.Side{ .client, .server }, + .{ &dev.client_graph, &dev.server_graph }, + ) |side, g| { + try w.writeInt(u32, @intCast(g.bundled_files.count()), .little); + for ( + g.bundled_files.keys(), + g.bundled_files.values(), + 0.., + ) |k, v, i| { + const normalized_key = dev.relativePath(k); + try w.writeInt(u32, @intCast(normalized_key.len), .little); + if (k.len == 0) continue; + try w.writeAll(normalized_key); + try w.writeByte(@intFromBool(g.stale_files.isSet(i) or switch (side) { + .server => v.failed, + .client => v.flags.failed, + })); + try w.writeByte(@intFromBool(side == .server and v.is_rsc)); + try w.writeByte(@intFromBool(side == .server and v.is_ssr)); + try w.writeByte(@intFromBool(side == .server and v.is_route)); + try w.writeByte(@intFromBool(side == .client and v.flags.is_special_framework_file)); + try w.writeByte(@intFromBool(switch (side) { + .server => v.is_client_component_boundary, + .client => v.flags.is_hmr_root, + })); + } + } + inline for (.{ &dev.client_graph, &dev.server_graph }) |g| { + const G = @TypeOf(g.*); + + try w.writeInt(u32, @intCast(g.edges.items.len - g.edges_free_list.items.len), .little); + for (g.edges.items, 0..) |edge, i| { + if (std.mem.indexOfScalar(G.EdgeIndex, g.edges_free_list.items, G.EdgeIndex.init(@intCast(i))) != null) + continue; + + try w.writeInt(u32, @intCast(edge.dependency.get()), .little); + try w.writeInt(u32, @intCast(edge.imported.get()), .little); + } + } +} + +pub fn onWebSocketUpgrade( + dev: *DevServer, + res: *Response, + req: *Request, + upgrade_ctx: *uws.uws_socket_context_t, + id: usize, +) void { + assert(id == 0); + + const dw = bun.create(dev.allocator, HmrSocket, .{ + .dev = dev, + .emit_visualizer_events = false, + }); + res.upgrade( + *HmrSocket, + dw, + req.header("sec-websocket-key") orelse "", + req.header("sec-websocket-protocol") orelse "", + req.header("sec-websocket-extension") orelse "", + upgrade_ctx, + ); +} + +/// Every message is to use `.binary`/`ArrayBuffer` transport mode. The first byte +/// indicates a Message ID; see comments on each type for how to interpret the rest. +/// +/// This format is only intended for communication for the browser build of +/// `hmr-runtime.ts` <-> `DevServer.zig`. Server-side HMR is implemented using a +/// different interface. This document is aimed for contributors to these two +/// components; Any other use-case is unsupported. +/// +/// All integers are sent in little-endian +pub const MessageId = enum(u8) { + /// Version payload. Sent on connection startup. The client should issue a + /// hard-reload when it mismatches with its `config.version`. + version = 'V', + /// Sent on a successful bundle, containing client code and changed CSS files. + /// + /// - u32: Number of CSS updates. For Each: + /// - [16]u8 ASCII: CSS identifier (hash of source path) + /// - u32: Length of CSS code + /// - [n]u8 UTF-8: CSS payload + /// - [n]u8 UTF-8: JS Payload. No length, rest of buffer is text. + /// + /// The JS payload will be code to hand to `eval` + // TODO: the above structure does not consider CSS attachments/detachments + hot_update = 'u', + /// Sent on a successful bundle, containing a list of routes that have + /// server changes. This is not sent when only client code changes. + /// + /// - `u32`: Number of updated routes. + /// - For each route: + /// - `u32`: Route ID + /// - `u32`: Length of route pattern + /// - `[n]u8` UTF-8: Route pattern + /// + /// HMR Runtime contains code that performs route matching at runtime + /// against `location.pathname`. The server is unaware of its routing + /// state. + route_update = 'R', + /// Sent when the list of errors changes. + /// + /// - `u32`: Removed errors. For Each: + /// - `u32`: Error owner + /// - Remainder are added errors. For Each: + /// - `SerializedFailure`: Error Data + errors = 'E', + /// Sent when all errors are cleared. + // TODO: Remove this message ID + errors_cleared = 'c', + /// Payload for `incremental_visualizer.html`. This can be accessed via + /// `/_bun/incremental_visualizer`. This contains both graphs. + /// + /// - `u32`: Number of files in `client_graph`. For Each: + /// - `u32`: Length of name. If zero then no other fields are provided. + /// - `[n]u8`: File path in UTF-8 encoded text + /// - `u8`: If file is stale, set 1 + /// - `u8`: If file is in server graph, set 1 + /// - `u8`: If file is in ssr graph, set 1 + /// - `u8`: If file is a server-side route root, set 1 + /// - `u8`: If file is a server-side component boundary file, set 1 + /// - `u32`: Number of files in the server graph. For Each: + /// - Repeat the same parser for the client graph + /// - `u32`: Number of client edges. For Each: + /// - `u32`: File index of the dependency file + /// - `u32`: File index of the imported file + /// - `u32`: Number of server edges. For Each: + /// - `u32`: File index of the dependency file + /// - `u32`: File index of the imported file + visualizer = 'v', + + pub inline fn char(id: MessageId) u8 { + return @intFromEnum(id); + } +}; + +pub const IncomingMessageId = enum(u8) { + /// Subscribe to `.visualizer` events. No payload. + visualizer = 'v', + /// Invalid data + _, +}; + +const HmrSocket = struct { + dev: *DevServer, + emit_visualizer_events: bool, + + pub const global_topic = "*"; + pub const visualizer_topic = "v"; + + pub fn onOpen(s: *HmrSocket, ws: AnyWebSocket) void { + _ = ws.send(&(.{MessageId.version.char()} ++ s.dev.configuration_hash_key), .binary, false, true); + _ = ws.subscribe(global_topic); + } + + pub fn onMessage(s: *HmrSocket, ws: AnyWebSocket, msg: []const u8, opcode: uws.Opcode) void { + _ = opcode; + + if (msg.len == 0) { + ws.close(); + return; + } + + switch (@as(IncomingMessageId, @enumFromInt(msg[0]))) { + .visualizer => { + if (!s.emit_visualizer_events) { + s.emit_visualizer_events = true; + s.dev.emit_visualizer_events += 1; + _ = ws.subscribe(visualizer_topic); + s.dev.emitVisualizerMessageIfNeeded() catch bun.outOfMemory(); + } + }, + else => { + ws.close(); + }, + } + } + + pub fn onClose(s: *HmrSocket, ws: AnyWebSocket, exit_code: i32, message: []const u8) void { + _ = ws; + _ = exit_code; + _ = message; + + if (s.emit_visualizer_events) { + s.dev.emit_visualizer_events -= 1; + } + + defer s.dev.allocator.destroy(s); + } +}; + +const c = struct { + // BakeSourceProvider.cpp + extern fn BakeGetDefaultExportFromModule(global: *JSC.JSGlobalObject, module: JSValue) JSValue; + + fn BakeLoadServerHmrPatch(global: *JSC.JSGlobalObject, code: bun.String) !JSValue { + const f = @extern( + *const fn (*JSC.JSGlobalObject, bun.String) callconv(.C) JSValue.MaybeException, + .{ .name = "BakeLoadServerHmrPatch" }, + ); + return f(global, code).unwrap(); + } + + fn BakeLoadInitialServerCode(global: *JSC.JSGlobalObject, code: bun.String, separate_ssr_graph: bool) bun.JSError!JSValue { + const f = @extern(*const fn (*JSC.JSGlobalObject, bun.String, bool) callconv(.C) JSValue.MaybeException, .{ + .name = "BakeLoadInitialServerCode", + }); + return f(global, code, separate_ssr_graph).unwrap(); + } +}; + +/// Called on DevServer thread via HotReloadTask +pub fn reload(dev: *DevServer, reload_task: *HotReloadTask) bun.OOM!void { + defer reload_task.files.clearRetainingCapacity(); + + const changed_file_paths = reload_task.files.keys(); + // TODO: check for .delete and remove items from graph. this has to be done + // with care because some editors save by deleting and recreating the file. + // delete events are not to be trusted at face value. also, merging of + // events can cause .write and .delete to be true at the same time. + const changed_file_attributes = reload_task.files.values(); + _ = changed_file_attributes; + + var timer = std.time.Timer.start() catch + @panic("timers unsupported"); + + var sfb = std.heap.stackFallback(4096, bun.default_allocator); + var temp_alloc = sfb.get(); + + // pre-allocate a few files worth of strings. it is unlikely but supported + // to change more than 8 files in the same bundling round. + var files = std.ArrayList(BakeEntryPoint).initCapacity(temp_alloc, 8) catch unreachable; + defer files.deinit(); + + { + dev.graph_safety_lock.lock(); + defer dev.graph_safety_lock.unlock(); + + inline for (.{ &dev.server_graph, &dev.client_graph }) |g| { + g.invalidate(changed_file_paths, &files) catch bun.outOfMemory(); + } + } + + if (files.items.len == 0) { + Output.debugWarn("nothing to bundle?? this is a bug?", .{}); + return; + } + + dev.incremental_result.reset(); + defer { + // Remove files last to start, to avoid issues where removing a file + // invalidates the last file index. + std.sort.pdq( + IncrementalGraph(.client).FileIndex, + dev.incremental_result.delete_client_files_later.items, + {}, + IncrementalGraph(.client).FileIndex.sortFnDesc, + ); + for (dev.incremental_result.delete_client_files_later.items) |client_index| { + dev.client_graph.disconnectAndDeleteFile(client_index); + } + dev.incremental_result.delete_client_files_later.clearRetainingCapacity(); + } + + dev.bundle(files.items) catch |err| { + bun.handleErrorReturnTrace(err, @errorReturnTrace()); + return; + }; + + dev.graph_safety_lock.lock(); + defer dev.graph_safety_lock.unlock(); + + // This list of routes affected excludes client code. This means changing + // a client component wont count as a route to trigger a reload on. + // + // A second trace is required to determine what routes had changed bundles, + // since changing a layout affects all child routes. Additionally, routes + // that do not have a bundle will not be cleared (as there is nothing to + // clear for those) + if (dev.incremental_result.routes_affected.items.len > 0) { + // re-use some earlier stack memory + files.clearAndFree(); + sfb = std.heap.stackFallback(4096, bun.default_allocator); + temp_alloc = sfb.get(); + + // A bit-set is used to avoid duplicate entries. This is not a problem + // with `dev.incremental_result.routes_affected` + var second_trace_result = try DynamicBitSetUnmanaged.initEmpty(temp_alloc, dev.route_bundles.items.len); + for (dev.incremental_result.routes_affected.items) |request| { + const route = dev.router.routePtr(request.route_index); + if (route.bundle.unwrap()) |id| second_trace_result.set(id.get()); + if (request.should_recurse_when_visiting) { + markAllRouteChildren(&dev.router, &second_trace_result, request.route_index); + } + } + + var sfb2 = std.heap.stackFallback(65536, bun.default_allocator); + var payload = std.ArrayList(u8).initCapacity(sfb2.get(), 65536) catch + unreachable; // enough space + defer payload.deinit(); + payload.appendAssumeCapacity(MessageId.route_update.char()); + const w = payload.writer(); + const count = second_trace_result.count(); + assert(count > 0); + try w.writeInt(u32, @intCast(count), .little); + + var it = second_trace_result.iterator(.{ .kind = .set }); + while (it.next()) |bundled_route_index| { + try w.writeInt(u32, @intCast(bundled_route_index), .little); + const pattern = dev.route_bundles.items[bundled_route_index].full_pattern; + try w.writeInt(u32, @intCast(pattern.len), .little); + try w.writeAll(pattern); + } + + // Notify + dev.publish(HmrSocket.global_topic, payload.items, .binary); + } + + // When client component roots get updated, the `client_components_affected` + // list contains the server side versions of these roots. These roots are + // traced to the routes so that the client-side bundles can be properly + // invalidated. + if (dev.incremental_result.client_components_affected.items.len > 0) { + dev.incremental_result.routes_affected.clearRetainingCapacity(); + dev.server_graph.affected_by_trace.setAll(false); + + var sfa_state = std.heap.stackFallback(65536, dev.allocator); + const sfa = sfa_state.get(); + dev.server_graph.affected_by_trace = try DynamicBitSetUnmanaged.initEmpty(sfa, dev.server_graph.bundled_files.count()); + defer dev.server_graph.affected_by_trace.deinit(sfa); + + for (dev.incremental_result.client_components_affected.items) |index| { + try dev.server_graph.traceDependencies(index, .no_stop); + } + + // TODO: + // for (dev.incremental_result.routes_affected.items) |route| { + // // Free old bundles + // if (dev.routes[route.get()].client_bundle) |old| { + // dev.allocator.free(old); + // } + // dev.routes[route.get()].client_bundle = null; + // } + } + + // TODO: improve this visual feedback + if (dev.bundling_failures.count() == 0) { + const clear_terminal = !debug.isVisible(); + if (clear_terminal) { + Output.flush(); + Output.disableBuffering(); + Output.resetTerminalAll(); + } + + dev.bundles_since_last_error += 1; + if (dev.bundles_since_last_error > 1) { + Output.prettyError("[x{d}] ", .{dev.bundles_since_last_error}); + } + + Output.prettyError("Reloaded in {d}ms: {s}", .{ @divFloor(timer.read(), std.time.ns_per_ms), dev.relativePath(changed_file_paths[0]) }); + if (changed_file_paths.len > 1) { + Output.prettyError(" + {d} more", .{files.items.len - 1}); + } + Output.prettyError("\n", .{}); + Output.flush(); + } else {} +} + +fn markAllRouteChildren(router: *FrameworkRouter, bits: *DynamicBitSetUnmanaged, route_index: Route.Index) void { + var next = router.routePtr(route_index).first_child.unwrap(); + while (next) |child_index| { + const route = router.routePtr(child_index); + if (route.bundle.unwrap()) |index| bits.set(index.get()); + markAllRouteChildren(router, bits, child_index); + next = route.next_sibling.unwrap(); + } +} + +pub const HotReloadTask = struct { + /// Align to cache lines to reduce contention. + const Aligned = struct { aligned: HotReloadTask align(std.atomic.cache_line) }; + + dev: *DevServer, + concurrent_task: JSC.ConcurrentTask = undefined, + + files: bun.StringArrayHashMapUnmanaged(Watcher.Event.Op), + + /// I am sorry. + state: std.atomic.Value(u32), + + pub fn initEmpty(dev: *DevServer) HotReloadTask { + return .{ + .dev = dev, + .files = .{}, + .state = .{ .raw = 0 }, + }; + } + + pub fn append( + task: *HotReloadTask, + allocator: Allocator, + file_path: []const u8, + op: Watcher.Event.Op, + ) void { + const gop = task.files.getOrPut(allocator, file_path) catch bun.outOfMemory(); + if (gop.found_existing) { + gop.value_ptr.* = gop.value_ptr.merge(op); + } else { + gop.value_ptr.* = op; + } + } + + pub fn run(initial: *HotReloadTask) void { + debug.log("HMR Task start", .{}); + defer debug.log("HMR Task end", .{}); + + // TODO: audit the atomics with this reloading strategy + // It was not written by an expert. + + const dev = initial.dev; + if (Environment.allow_assert) { + assert(initial.state.load(.seq_cst) == 0); + } + + // const start_timestamp = std.time.nanoTimestamp(); + dev.reload(initial) catch bun.outOfMemory(); + + // if there was a pending run, do it now + if (dev.watch_state.swap(0, .seq_cst) > 1) { + // debug.log("dual event fire", .{}); + const current = if (initial == &dev.watch_events[0].aligned) + &dev.watch_events[1].aligned + else + &dev.watch_events[0].aligned; + if (current.state.swap(1, .seq_cst) == 0) { + // debug.log("case 1 (run now)", .{}); + dev.reload(current) catch bun.outOfMemory(); + current.state.store(0, .seq_cst); + } else { + // Watcher will emit an event since it reads watch_state 0 + // debug.log("case 2 (run later)", .{}); + } + } + } +}; + +/// Called on watcher's thread; Access to dev-server state restricted. +pub fn onFileUpdate(dev: *DevServer, events: []Watcher.Event, changed_files: []?[:0]u8, watchlist: Watcher.ItemList) void { + debug.log("onFileUpdate start", .{}); + defer debug.log("onFileUpdate end", .{}); + + _ = changed_files; + const slice = watchlist.slice(); + const file_paths = slice.items(.file_path); + const counts = slice.items(.count); + const kinds = slice.items(.kind); + + // TODO: audit the atomics with this reloading strategy + // It was not written by an expert. + + // Get a Hot reload task pointer + var ev: *HotReloadTask = &dev.watch_events[dev.watch_current].aligned; + if (ev.state.swap(1, .seq_cst) == 1) { + debug.log("work got stolen, must guarantee the other is free", .{}); + dev.watch_current +%= 1; + ev = &dev.watch_events[dev.watch_current].aligned; + bun.assert(ev.state.swap(1, .seq_cst) == 0); + } + defer { + // Submit the Hot reload task for bundling + if (ev.files.entries.len > 0) { + const prev_state = dev.watch_state.fetchAdd(1, .seq_cst); + ev.state.store(0, .seq_cst); + debug.log("prev_state={d}", .{prev_state}); + if (prev_state == 0) { + ev.concurrent_task = .{ .auto_delete = false, .next = null, .task = JSC.Task.init(ev) }; + dev.vm.event_loop.enqueueTaskConcurrent(&ev.concurrent_task); + dev.watch_current +%= 1; + } else { + // DevServer thread is notified. + } + } else { + ev.state.store(0, .seq_cst); + } + } + + defer dev.bun_watcher.flushEvictions(); + + // TODO: alot of code is missing + // TODO: story for busting resolution cache smartly? + for (events) |event| { + const file_path = file_paths[event.index]; + const update_count = counts[event.index] + 1; + counts[event.index] = update_count; + const kind = kinds[event.index]; + + debug.log("{s} change: {s} {}", .{ @tagName(kind), file_path, event.op }); + + switch (kind) { + .file => { + if (event.op.delete or event.op.rename) { + dev.bun_watcher.removeAtIndex(event.index, 0, &.{}, .file); + } + + ev.append(dev.allocator, file_path, event.op); + }, + .directory => { + // bust the directory cache since this directory has changed + // TODO: correctly solve https://github.com/oven-sh/bun/issues/14913 + _ = dev.server_bundler.resolver.bustDirCache(bun.strings.withoutTrailingSlash(file_path)); + + // if a directory watch exists for resolution + // failures, check those now. + dev.directory_watchers.lock.lock(); + defer dev.directory_watchers.lock.unlock(); + if (dev.directory_watchers.watches.getIndex(file_path)) |watcher_index| { + const entry = &dev.directory_watchers.watches.values()[watcher_index]; + var new_chain: DirectoryWatchStore.Dep.Index.Optional = .none; + var it: ?DirectoryWatchStore.Dep.Index = entry.first_dep; + + while (it) |index| { + const dep = &dev.directory_watchers.dependencies.items[index.get()]; + it = dep.next.unwrap(); + if ((dev.server_bundler.resolver.resolve( + bun.path.dirname(dep.source_file_path, .auto), + dep.specifier, + .stmt, + ) catch null) != null) { + // the resolution result is not preserved as safely + // transferring it into BundleV2 is too complicated. the + // resolution is cached, anyways. + ev.append(dev.allocator, dep.source_file_path, .{ .write = true }); + dev.directory_watchers.freeDependencyIndex(dev.allocator, index) catch bun.outOfMemory(); + } else { + // rebuild a new linked list for unaffected files + dep.next = new_chain; + new_chain = index.toOptional(); + } + } + + if (new_chain.unwrap()) |new_first_dep| { + entry.first_dep = new_first_dep; + } else { + // without any files to depend on this watcher is freed + dev.directory_watchers.freeEntry(watcher_index); + } + } + }, + } + } +} + +pub fn onWatchError(_: *DevServer, err: bun.sys.Error) void { + // TODO: how to recover? the watcher can't just ... crash???????? + Output.err(@as(bun.C.E, @enumFromInt(err.errno)), "Watcher crashed", .{}); + if (bun.Environment.isDebug) { + bun.todoPanic(@src(), "Watcher crash", .{}); + } +} + +pub fn publish(dev: *DevServer, topic: []const u8, message: []const u8, opcode: uws.Opcode) void { + if (dev.server) |s| _ = s.publish(topic, message, opcode, false); +} + +pub fn numSubscribers(dev: *DevServer, topic: []const u8) u32 { + return if (dev.server) |s| s.numSubscribers(topic) else 0; +} + +const SafeFileId = packed struct(u32) { + side: bake.Side, + index: u30, + unused: enum(u1) { unused = 0 } = .unused, +}; + +/// Interface function for FrameworkRouter +pub fn getFileIdForRouter(dev: *DevServer, abs_path: []const u8, associated_route: Route.Index, file_kind: Route.FileKind) !OpaqueFileId { + const index = try dev.server_graph.insertStaleExtra(abs_path, false, true); + try dev.route_lookup.put(dev.allocator, index, .{ + .route_index = associated_route, + .should_recurse_when_visiting = file_kind == .layout, + }); + return toOpaqueFileId(.server, index); +} + +fn toOpaqueFileId(comptime side: bake.Side, index: IncrementalGraph(side).FileIndex) OpaqueFileId { + if (Environment.allow_assert) { + return OpaqueFileId.init(@bitCast(SafeFileId{ + .side = side, + .index = index.get(), + })); + } + + return OpaqueFileId.init(index.get()); +} + +fn fromOpaqueFileId(comptime side: bake.Side, id: OpaqueFileId) IncrementalGraph(side).FileIndex { + if (Environment.allow_assert) { + const safe: SafeFileId = @bitCast(id.get()); + assert(side == safe.side); + return IncrementalGraph(side).FileIndex.init(safe.index); + } + return IncrementalGraph(side).FileIndex.init(@intCast(id.get())); +} + +fn relativePath(dev: *const DevServer, path: []const u8) []const u8 { + // TODO: windows slash normalization + bun.assert(dev.root[dev.root.len - 1] != '/'); + if (path.len >= dev.root.len + 1 and + path[dev.root.len] == '/' and + bun.strings.startsWith(path, dev.root)) + { + return path[dev.root.len + 1 ..]; + } + return bun.path.relative(dev.root, path); +} + +fn dumpStateDueToCrash(dev: *DevServer) !void { + comptime assert(bun.FeatureFlags.bake_debugging_features); + + // being conservative about how much stuff is put on the stack. + var filepath_buf: [@min(4096, bun.MAX_PATH_BYTES)]u8 = undefined; + const filepath = std.fmt.bufPrintZ(&filepath_buf, "incremental-graph-crash-dump.{d}.html", .{std.time.timestamp()}) catch "incremental-graph-crash-dump.html"; + const file = std.fs.cwd().createFileZ(filepath, .{}) catch |err| { + bun.handleErrorReturnTrace(err, @errorReturnTrace()); + Output.warn("Could not open directory for dumping sources: {}", .{err}); + return; + }; + defer file.close(); + + const start, const end = comptime brk: { + const visualizer = @embedFile("incremental_visualizer.html"); + const i = (std.mem.indexOf(u8, visualizer, ""); + } catch { + // The chunk cannot be embedded as a UTF-8 string in the script tag. + // No data should have been written yet, so a base64 fallback can be used. + const base64 = btoa(String.fromCodePoint(...chunk)); + controller.write(`Uint8Array.from(atob(\"${base64}\"),m=>m.codePointAt(0))`); + } +} + +/** + * Attempts to combine RSC chunks together to minimize the number of chunks the + * client processes. + */ +function writeManyFlightScriptData( + chunks: Uint8Array[], + decoder: TextDecoder, + controller: { write: (str: string) => void }, +) { + if (chunks.length === 1) return writeSingleFlightScriptData(chunks[0], decoder, controller); + + let i = 0; + try { + // Combine all chunks into a single string if possible. + for (; i < chunks.length; i++) { + // `decode()` will throw on invalid UTF-8 sequences. + const str = toSingleQuote(decoder.decode(chunks[i], { stream: true })); + if (i === 0) controller.write("'"); + controller.write(str); + } + controller.write("')"); + } catch { + // The chunk cannot be embedded as a UTF-8 string in the script tag. + // Since this is rare, just make the rest of the chunks base64. + if (i > 0) controller.write("');__bun_f.push("); + controller.write('Uint8Array.from(atob("'); + for (; i < chunks.length; i++) { + const chunk = chunks[i]; + const base64 = btoa(String.fromCodePoint(...chunk)); + controller.write(base64.slice(1, -1)); + } + controller.write('"),m=>m.codePointAt(0))'); + } +} + +// Instead of using `JSON.stringify`, this uses a single quote variant of it, since +// the RSC payload includes a ton of " characters. This is slower, but an easy +// component to move into native code. +function toSingleQuote(str: string): string { + return ( + str // Escape single quotes, backslashes, and newlines + .replace(/\\/g, "\\\\") + .replace(/'/g, "\\'") + .replace(/\n/g, "\\n") + // Escape closing script tags and HTML comments in JS content. + .replace(/ `./some-asset.png` + /// ^--------------^|^------- .query.index + /// unique_key .query.kind + /// + /// An output piece is the concatenation of source code text and an output + /// path, in that order. An array of pieces makes up an entire file. pub const OutputPiece = struct { - // laid out like this so it takes up the same amount of space as a []const u8 - data_ptr: [*]const u8 = undefined, - data_len: u32 = 0, + /// Pointer and length split to reduce struct size + data_ptr: [*]const u8, + data_len: u32, + query: Query, - index: OutputPieceIndex = .{}, - - pub inline fn data(this: OutputPiece) []const u8 { + pub fn data(this: OutputPiece) []const u8 { return this.data_ptr[0..this.data_len]; } + + pub const Query = packed struct(u32) { + index: u30, + kind: Kind, + + pub const Kind = enum(u2) { + /// The last piece in an array uses this to indicate it is just data + none, + /// Given a source index, print the asset's output + asset, + /// Given a chunk index, print the chunk's output path + chunk, + /// Given a server component boundary index, print the chunk's output path + scb, + }; + + pub const none: Query = .{ .index = 0, .kind = .none }; + }; + + pub fn init(data_slice: []const u8, query: Query) OutputPiece { + return .{ + .data_ptr = data_slice.ptr, + .data_len = @intCast(data_slice.len), + .query = query, + }; + } }; - pub const OutputPieceIndex = packed struct { - index: u30 = 0, - - kind: Kind = Kind.none, - - pub const Kind = enum(u2) { - /// The "kind" may be "none" in which case there is one piece - /// with data and no chunk index. For example, the chunk may not contain any - /// imports. - none, - - asset, - chunk, - }; - }; + pub const OutputPieceIndex = OutputPiece.Query; pub const EntryPoint = packed struct(u64) { /// Index into `Graph.input_files` - source_index: Index.Int = 0, - entry_point_id: ID = 0, + source_index: u32 = 0, + entry_point_id: u31 = 0, is_entry_point: bool = false, - // so it fits in a 64-bit integer + /// so `EntryPoint` can be a u64 pub const ID = u31; }; @@ -11682,12 +14453,107 @@ pub const Chunk = struct { imports_from_other_chunks: ImportsFromOtherChunks = .{}, cross_chunk_prefix_stmts: BabyList(Stmt) = .{}, cross_chunk_suffix_stmts: BabyList(Stmt) = .{}, + + /// Indexes to CSS chunks. Currently this will only ever be zero or one + /// items long, but smarter css chunking will allow multiple js entry points + /// share a css file, or have an entry point contain multiple css files. + /// + /// Mutated while sorting chunks in `computeChunks` + css_chunks: []u32 = &.{}, + }; + + pub const CssChunk = struct { + imports_in_chunk_in_order: BabyList(CssImportOrder), + /// When creating a chunk, this is to be an uninitialized slice with + /// length of `imports_in_chunk_in_order` + /// + /// Multiple imports may refer to the same file/stylesheet, but may need to + /// wrap them in conditions (e.g. a layer). + /// + /// When we go through the `prepareCssAstsForChunk()` step, each import will + /// create a shallow copy of the file's AST (just dereferencing the pointer). + asts: []bun.css.BundlerStyleSheet, + }; + + const CssImportKind = enum { + source_index, + external_path, + import_layers, + }; + + pub const CssImportOrder = struct { + conditions: BabyList(bun.css.ImportConditions) = .{}, + // TODO: unfuck this + condition_import_records: BabyList(ImportRecord) = .{}, + + kind: union(enum) { + // kind == .import_layers + layers: [][]const u8, + // kind == .external_path + external_path: bun.fs.Path, + // kind == .source_idnex + source_index: Index, + }, + + pub fn hash(this: *const CssImportOrder, hasher: anytype) void { + // TODO: conditions, condition_import_records + + bun.writeAnyToHasher(hasher, std.meta.activeTag(this.kind)); + switch (this.kind) { + .layers => |layers| { + for (layers) |layer| hasher.update(layer); + hasher.update("\x00"); + }, + .external_path => |path| hasher.update(path.text), + .source_index => |idx| bun.writeAnyToHasher(hasher, idx), + } + } + + pub fn format(this: *const CssImportOrder, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { + try writer.print("{s} = ", .{@tagName(this.kind)}); + switch (this.kind) { + .layers => |layers| { + try writer.print("[", .{}); + for (layers, 0..) |layer, i| { + if (i > 0) try writer.print(", ", .{}); + try writer.print("\"{s}\"", .{layer}); + } + try writer.print("]", .{}); + }, + .external_path => |path| { + try writer.print("\"{s}\"", .{path.pretty}); + }, + .source_index => |source_index| { + try writer.print("{d}", .{source_index.get()}); + }, + } + } }; pub const ImportsFromOtherChunks = std.AutoArrayHashMapUnmanaged(Index.Int, CrossChunkImport.Item.List); - pub const Content = union(enum) { + pub const ContentKind = enum { + javascript, + css, + }; + + pub const Content = union(ContentKind) { javascript: JavaScriptChunk, + css: CssChunk, + + pub fn loader(this: *const Content) Loader { + return switch (this.*) { + .javascript => .js, + .css => .css, + }; + } + + pub fn ext(this: *const Content) string { + return switch (this.*) { + .javascript => "js", + .css => "css", + }; + } }; }; @@ -11754,11 +14620,17 @@ pub const CrossChunkImport = struct { } }; -const CompileResult = union(enum) { +pub const CompileResult = union(enum) { javascript: struct { source_index: Index.Int, result: js_printer.PrintResult, }, + css: struct { + source_index: Index.Int, + code: []const u8, + // TODO: we need to do this + source_map: ?bun.sourcemap.Chunk = null, + }, pub const empty = CompileResult{ .javascript = .{ @@ -11777,7 +14649,7 @@ const CompileResult = union(enum) { .result => |r2| r2.code, else => "", }, - // else => "", + .css => |*c| c.code, }; } @@ -11787,12 +14659,14 @@ const CompileResult = union(enum) { .result => |r2| r2.source_map, else => null, }, + .css => |*c| c.source_map, }; } pub fn sourceIndex(this: *const CompileResult) Index.Int { return switch (this.*) { .javascript => |r| r.source_index, + .css => |*c| c.source_index, // else => 0, }; } @@ -11836,7 +14710,7 @@ const ContentHasher = struct { // meant to be fast but not 100% thorough // users can correctly put in a trailing slash if they want // this is just being nice -fn cheapPrefixNormalizer(prefix: []const u8, suffix: []const u8) [2]string { +pub fn cheapPrefixNormalizer(prefix: []const u8, suffix: []const u8) [2]string { if (prefix.len == 0) { const suffix_no_slash = bun.strings.removeLeadingDotSlash(suffix); return .{ @@ -11870,113 +14744,6 @@ fn cheapPrefixNormalizer(prefix: []const u8, suffix: []const u8) [2]string { }; } -const components_manifest_path = "./components-manifest.blob"; - -// For Server Components, we generate an entry point which re-exports all client components -// This is a "shadow" of the server entry point. -// The client is expected to import this shadow entry point -const ShadowEntryPoint = struct { - from_source_index: Index.Int, - to_source_index: Index.Int, - - named_exports: bun.BabyList(NamedExport) = .{}, - - pub const NamedExport = struct { - // TODO: packed string - from: string, - to: string, - source_index: Index.Int, - }; - - pub const Builder = struct { - source_code_buffer: MutableString, - ctx: *BundleV2, - resolved_source_indices: std.ArrayList(Index.Int), - shadow: *ShadowEntryPoint, - - pub fn addClientComponent( - this: *ShadowEntryPoint.Builder, - source_index: usize, - ) void { - var writer = this.source_code_buffer.writer(); - const path = this.ctx.graph.input_files.items(.source)[source_index].path; - // TODO: tree-shaking to named imports only - writer.print( - \\// {s} - \\import {} from '${d}'; - \\export {}; - \\ - , - .{ - path.pretty, - ImportsFormatter{ .ctx = this.ctx, .source_index = @as(Index.Int, @intCast(source_index)), .pretty = path.pretty }, - bun.fmt.hexIntUpper(bun.hash(path.pretty)), - ExportsFormatter{ .ctx = this.ctx, .source_index = @as(Index.Int, @intCast(source_index)), .pretty = path.pretty, .shadow = this.shadow }, - }, - ) catch unreachable; - this.resolved_source_indices.append(@as(Index.Int, @truncate(source_index))) catch unreachable; - } - }; - const ImportsFormatter = struct { - ctx: *BundleV2, - pretty: string, - source_index: Index.Int, - pub fn format(self: ImportsFormatter, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { - var this = self.ctx; - const named_exports: *js_ast.Ast.NamedExports = &this.graph.ast.items(.named_exports)[self.source_index]; - try writer.writeAll("{"); - for (named_exports.keys()) |*named| { - named.* = try std.fmt.allocPrint( - this.graph.allocator, - "${}_{s}", - .{ - bun.fmt.hexIntLower(bun.hash(self.pretty)), - named.*, - }, - ); - } - try named_exports.reIndex(); - - for (named_exports.keys(), 0..) |name, i| { - try writer.writeAll(name); - if (i < named_exports.count() - 1) { - try writer.writeAll(" , "); - } - } - try writer.writeAll("}"); - } - }; - - const ExportsFormatter = struct { - ctx: *BundleV2, - pretty: string, - source_index: Index.Int, - shadow: *ShadowEntryPoint, - pub fn format(self: ExportsFormatter, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { - var this = self.ctx; - const named_exports: js_ast.Ast.NamedExports = this.graph.ast.items(.named_exports)[self.source_index]; - try writer.writeAll("{"); - var shadow = self.shadow; - try shadow.named_exports.ensureUnusedCapacity(this.graph.allocator, named_exports.count()); - const last = named_exports.count() - 1; - for (named_exports.keys(), 0..) |name, i| { - try shadow.named_exports.push(this.graph.allocator, .{ - .from = name, - .to = name, - .source_index = self.source_index, - }); - - try writer.writeAll(name); - - if (i < last) { - try writer.writeAll(" , "); - } - } - try writer.writeAll("}"); - } - }; -}; - fn getRedirectId(id: u32) ?u32 { if (id == std.math.maxInt(u32)) { return null; @@ -11998,3 +14765,379 @@ fn targetFromHashbang(buffer: []const u8) ?options.Target { return null; } + +/// Utility to construct `Ast`s intended for generated code, such as the +/// boundary modules when dealing with server components. This is a saner +/// alternative to building a string, then sending it through `js_parser` +/// +/// For in-depth details on the fields, most of these are documented +/// inside of `js_parser` +pub const AstBuilder = struct { + allocator: std.mem.Allocator, + source: *const Logger.Source, + source_index: u31, + stmts: std.ArrayListUnmanaged(Stmt), + scopes: std.ArrayListUnmanaged(*Scope), + symbols: std.ArrayListUnmanaged(Symbol), + import_records: std.ArrayListUnmanaged(ImportRecord), + named_imports: js_ast.Ast.NamedImports, + named_exports: js_ast.Ast.NamedExports, + import_records_for_current_part: std.ArrayListUnmanaged(u32), + export_star_import_records: std.ArrayListUnmanaged(u32), + current_scope: *Scope, + log: Logger.Log, + module_ref: Ref, + declared_symbols: js_ast.DeclaredSymbol.List, + /// When set, codegen is altered + hot_reloading: bool, + + // stub fields for ImportScanner duck typing + comptime options: js_parser.Parser.Options = .{ + .jsx = .{}, + .bundle = true, + }, + comptime import_items_for_namespace: struct { + pub fn get(_: @This(), _: Ref) ?js_parser.ImportItemForNamespaceMap { + return null; + } + } = .{}, + pub const parser_features = struct { + pub const typescript = false; + }; + + pub fn init(allocator: std.mem.Allocator, source: *const Logger.Source, hot_reloading: bool) !AstBuilder { + const scope = try allocator.create(Scope); + scope.* = .{ + .kind = .entry, + .label_ref = null, + .parent = null, + .generated = .{}, + }; + var ab: AstBuilder = .{ + .allocator = allocator, + .current_scope = scope, + .source = source, + .source_index = @intCast(source.index.get()), + .stmts = .{}, + .scopes = .{}, + .symbols = .{}, + .import_records = .{}, + .import_records_for_current_part = .{}, + .named_imports = .{}, + .named_exports = .{}, + .log = Logger.Log.init(allocator), + .export_star_import_records = .{}, + .module_ref = Ref.None, + .declared_symbols = .{}, + .hot_reloading = hot_reloading, + }; + ab.module_ref = try ab.newSymbol(.other, "module"); + return ab; + } + + pub fn pushScope(p: *AstBuilder, kind: Scope.Kind) *js_ast.Scope { + try p.scopes.ensureUnusedCapacity(p.allocator, 1); + try p.current_scope.children.ensureUnusedCapacity(p.allocator, 1); + const scope = try p.allocator.create(Scope); + scope.* = .{ + .kind = kind, + .label_ref = null, + .parent = p.current_scope, + .generated = .{}, + }; + p.current_scope.children.appendAssumeCapacity(scope); + p.scopes.appendAssumeCapacity(p.current_scope); + p.current_scope = scope; + return scope; + } + + pub fn popScope(p: *AstBuilder) void { + p.current_scope = p.scopes.pop(); + } + + pub fn newSymbol(p: *AstBuilder, kind: Symbol.Kind, identifier: []const u8) !Ref { + const inner_index: Ref.Int = @intCast(p.symbols.items.len); + try p.symbols.append(p.allocator, .{ + .kind = kind, + .original_name = identifier, + .debug_mode_source_index = if (Environment.allow_assert) @intCast(p.source_index) else 0, + }); + const ref: Ref = .{ + .inner_index = inner_index, + .source_index = p.source_index, + .tag = .symbol, + }; + try p.current_scope.generated.push(p.allocator, ref); + try p.declared_symbols.append(p.allocator, .{ + .ref = ref, + .is_top_level = p.scopes.items.len == 0 or p.current_scope == p.scopes.items[0], + }); + return ref; + } + + pub fn getSymbol(p: *AstBuilder, ref: Ref) *Symbol { + bun.assert(ref.source_index == p.source.index.get()); + return &p.symbols.items[ref.inner_index]; + } + + pub fn addImportRecord(p: *AstBuilder, path: []const u8, kind: ImportKind) !u32 { + const index = p.import_records.items.len; + try p.import_records.append(p.allocator, .{ + .path = bun.fs.Path.init(path), + .kind = kind, + .range = .{}, + }); + return @intCast(index); + } + + pub fn addImportStmt( + p: *AstBuilder, + path: []const u8, + identifiers_to_import: anytype, + ) ![identifiers_to_import.len]Expr { + var out: [identifiers_to_import.len]Expr = undefined; + + const record = try p.addImportRecord(path, .stmt); + + var path_name = bun.fs.PathName.init(path); + const name = try strings.append(p.allocator, "import_", try path_name.nonUniqueNameString(p.allocator)); + const namespace_ref = try p.newSymbol(.other, name); + + const clauses = try p.allocator.alloc(js_ast.ClauseItem, identifiers_to_import.len); + + inline for (identifiers_to_import, &out, clauses) |import_id_untyped, *out_ref, *clause| { + const import_id: []const u8 = import_id_untyped; // must be given '[N][]const u8' + const ref = try p.newSymbol(.import, import_id); + if (p.hot_reloading) { + p.getSymbol(ref).namespace_alias = .{ + .namespace_ref = namespace_ref, + .alias = import_id, + .import_record_index = record, + }; + } + out_ref.* = p.newExpr(E.ImportIdentifier{ .ref = ref }); + clause.* = .{ + .name = .{ .loc = Logger.Loc.Empty, .ref = ref }, + .original_name = import_id, + .alias = import_id, + }; + } + + try p.appendStmt(S.Import{ + .namespace_ref = namespace_ref, + .import_record_index = record, + .items = clauses, + .is_single_line = identifiers_to_import.len < 1, + }); + + return out; + } + + pub fn appendStmt(p: *AstBuilder, data: anytype) !void { + try p.stmts.ensureUnusedCapacity(p.allocator, 1); + p.stmts.appendAssumeCapacity(p.newStmt(data)); + } + + pub fn newStmt(p: *AstBuilder, data: anytype) Stmt { + _ = p; + return Stmt.alloc(@TypeOf(data), data, Logger.Loc.Empty); + } + + pub fn newExpr(p: *AstBuilder, data: anytype) Expr { + _ = p; + return Expr.init(@TypeOf(data), data, Logger.Loc.Empty); + } + + pub fn newExternalSymbol(p: *AstBuilder, name: []const u8) !Ref { + const ref = try p.newSymbol(.other, name); + const sym = p.getSymbol(ref); + sym.must_not_be_renamed = true; + return ref; + } + + pub fn toBundledAst(p: *AstBuilder, target: options.Target) !js_ast.BundledAst { + // TODO: missing import scanner + bun.assert(p.scopes.items.len == 0); + const module_scope = p.current_scope; + + var parts = try js_ast.Part.List.initCapacity(p.allocator, 2); + parts.len = 2; + parts.mut(0).* = .{}; + parts.mut(1).* = .{ + .stmts = p.stmts.items, + .can_be_removed_if_unused = false, + + // pretend that every symbol was used + .symbol_uses = uses: { + var map: js_ast.Part.SymbolUseMap = .{}; + try map.ensureTotalCapacity(p.allocator, p.symbols.items.len); + for (0..p.symbols.items.len) |i| { + map.putAssumeCapacity(Ref{ + .tag = .symbol, + .source_index = p.source_index, + .inner_index = @intCast(i), + }, .{ .count_estimate = 1 }); + } + break :uses map; + }, + }; + + const single_u32 = try BabyList(u32).fromSlice(p.allocator, &.{1}); + + var top_level_symbols_to_parts = js_ast.Ast.TopLevelSymbolToParts{}; + try top_level_symbols_to_parts.entries.setCapacity(p.allocator, module_scope.generated.len); + top_level_symbols_to_parts.entries.len = module_scope.generated.len; + const slice = top_level_symbols_to_parts.entries.slice(); + for ( + slice.items(.key), + slice.items(.value), + module_scope.generated.slice(), + ) |*k, *v, ref| { + k.* = ref; + v.* = single_u32; + } + try top_level_symbols_to_parts.reIndex(p.allocator); + + // For more details on this section, look at js_parser.toAST + // This is mimicking how it calls ImportScanner + if (p.hot_reloading) { + var hmr_transform_ctx = js_parser.ConvertESMExportsForHmr{ + .last_part = parts.last() orelse + unreachable, // was definitely allocated + }; + try hmr_transform_ctx.stmts.ensureTotalCapacity(p.allocator, prealloc_count: { + // get a estimate on how many statements there are going to be + const count = p.stmts.items.len; + break :prealloc_count count + 2; + }); + + _ = try js_parser.ImportScanner.scan(AstBuilder, p, p.stmts.items, false, true, &hmr_transform_ctx); + + const new_parts = try hmr_transform_ctx.finalize(p, parts.slice()); + // preserve original capacity + parts.len = @intCast(new_parts.len); + bun.assert(new_parts.ptr == parts.ptr); + } else { + const result = try js_parser.ImportScanner.scan(AstBuilder, p, p.stmts.items, false, false, {}); + parts.mut(1).stmts = result.stmts; + } + + parts.mut(1).declared_symbols = p.declared_symbols; + parts.mut(1).scopes = p.scopes.items; + parts.mut(1).import_record_indices = BabyList(u32).fromList(p.import_records_for_current_part); + + return .{ + .parts = parts, + .module_scope = module_scope.*, + .symbols = js_ast.Symbol.List.fromList(p.symbols), + .exports_ref = Ref.None, + .wrapper_ref = Ref.None, + .module_ref = p.module_ref, + .import_records = ImportRecord.List.fromList(p.import_records), + .export_star_import_records = &.{}, + .approximate_newline_count = 1, + .exports_kind = .esm, + .named_imports = p.named_imports, + .named_exports = p.named_exports, + .top_level_symbols_to_parts = top_level_symbols_to_parts, + .char_freq = .{}, + .flags = .{}, + .target = target, + .top_level_await_keyword = Logger.Range.None, + // .nested_scope_slot_counts = if (p.options.features.minify_identifiers) + // renamer.assignNestedScopeSlots(p.allocator, p.scopes.items[0], p.symbols.items) + // else + // js_ast.SlotCounts{}, + }; + } + + // stub methods for ImportScanner duck typing + + pub fn generateTempRef(ab: *AstBuilder, name: ?[]const u8) Ref { + return ab.newSymbol(.other, name orelse "temp") catch bun.outOfMemory(); + } + + pub fn recordExport(p: *AstBuilder, _: Logger.Loc, alias: []const u8, ref: Ref) !void { + if (p.named_exports.get(alias)) |_| { + // Duplicate exports are an error + Output.panic( + "In generated file, duplicate export \"{s}\"", + .{alias}, + ); + } else { + try p.named_exports.put(p.allocator, alias, .{ .alias_loc = Logger.Loc.Empty, .ref = ref }); + } + } + + pub fn recordExportedBinding(p: *AstBuilder, binding: Binding) void { + switch (binding.data) { + .b_missing => {}, + .b_identifier => |ident| { + p.recordExport(binding.loc, p.symbols.items[ident.ref.innerIndex()].original_name, ident.ref) catch unreachable; + }, + .b_array => |array| { + for (array.items) |prop| { + p.recordExportedBinding(prop.binding); + } + }, + .b_object => |obj| { + for (obj.properties) |prop| { + p.recordExportedBinding(prop.value); + } + }, + } + } + + pub fn ignoreUsage(p: *AstBuilder, ref: Ref) void { + _ = p; + _ = ref; + } + + pub fn panic(p: *AstBuilder, comptime fmt: []const u8, args: anytype) noreturn { + _ = p; + Output.panic(fmt, args); + } + + pub fn @"module.exports"(p: *AstBuilder, loc: Logger.Loc) Expr { + return p.newExpr(E.Dot{ .name = "exports", .name_loc = loc, .target = p.newExpr(E.Identifier{ .ref = p.module_ref }) }); + } +}; + +/// The lifetime of output pointers is tied to the bundler's arena +pub const BakeBundleOutput = struct { + chunks: []Chunk, + css_file_list: struct { + indexes: []const Index, + metas: []const CssEntryPointMeta, + }, + + pub const CssEntryPointMeta = struct { + /// When this is true, a stub file is added to the Server's IncrementalGraph + imported_on_server: bool, + }; + + pub fn jsPseudoChunk(out: BakeBundleOutput) *Chunk { + return &out.chunks[0]; + } + + pub fn cssChunks(out: BakeBundleOutput) []Chunk { + return out.chunks[1..]; + } +}; + +pub fn generateUniqueKey() u64 { + const key = std.crypto.random.int(u64) & @as(u64, 0x0FFFFFFF_FFFFFFFF); + // without this check, putting unique_key in an object key would + // sometimes get converted to an identifier. ensuring it starts + // with a number forces that optimization off. + if (Environment.isDebug) { + var buf: [16]u8 = undefined; + const hex = std.fmt.bufPrint(&buf, "{}", .{bun.fmt.hexIntLower(key)}) catch + unreachable; + switch (hex[0]) { + '0'...'9' => {}, + else => Output.panic("unique key is a valid identifier: {s}", .{hex}), + } + } + return key; +} diff --git a/src/bundler/entry_points.zig b/src/bundler/entry_points.zig index 1237f02fa4..ac4e4fbb70 100644 --- a/src/bundler/entry_points.zig +++ b/src/bundler/entry_points.zig @@ -172,28 +172,31 @@ pub const ServerEntryPoint = struct { break :brk try std.fmt.allocPrint( allocator, \\// @bun - \\var hmrSymbol = Symbol.for("BunServerHMR"); \\import * as start from '{}'; + \\var hmrSymbol = Symbol("BunServerHMR"); \\var entryNamespace = start; \\if (typeof entryNamespace?.then === 'function') {{ \\ entryNamespace = entryNamespace.then((entryNamespace) => {{ - \\ if(typeof entryNamespace?.default?.fetch === 'function') {{ + \\ var def = entryNamespace?.default; + \\ if (def && (typeof def.fetch === 'function' || def.app != undefined)) {{ \\ var server = globalThis[hmrSymbol]; \\ if (server) {{ - \\ server.reload(entryNamespace.default); + \\ server.reload(def); + \\ console.debug(`Reloaded ${{server.development ? 'development ' : ''}}server ${{server.protocol}}://${{server.hostname}}:${{server.port}}`); \\ }} else {{ - \\ server = globalThis[hmrSymbol] = Bun.serve(entryNamespace.default); - \\ console.debug(`Started server ${{server.protocol}}://${{server.hostname}}:${{server.port}}`); + \\ server = globalThis[hmrSymbol] = Bun.serve(def); + \\ console.debug(`Started ${{server.development ? 'development ' : ''}}server ${{server.protocol}}://${{server.hostname}}:${{server.port}}`); \\ }} \\ }} \\ }}, reportError); - \\}} else if (typeof entryNamespace?.default?.fetch === 'function') {{ + \\}} else if (typeof entryNamespace?.default?.fetch === 'function' || entryNamespace?.default?.app != undefined) {{ \\ var server = globalThis[hmrSymbol]; \\ if (server) {{ \\ server.reload(entryNamespace.default); + \\ console.debug(`Reloaded ${{server.development ? 'development ' : ''}}server ${{server.protocol}}://${{server.hostname}}:${{server.port}}`); \\ }} else {{ \\ server = globalThis[hmrSymbol] = Bun.serve(entryNamespace.default); - \\ console.debug(`Started server ${{server.protocol}}://${{server.hostname}}:${{server.port}}`); + \\ console.debug(`Started ${{server.development ? 'development ' : ''}}server ${{server.protocol}}://${{server.hostname}}:${{server.port}}`); \\ }} \\}} \\ @@ -210,12 +213,14 @@ pub const ServerEntryPoint = struct { \\var entryNamespace = start; \\if (typeof entryNamespace?.then === 'function') {{ \\ entryNamespace = entryNamespace.then((entryNamespace) => {{ - \\ if(typeof entryNamespace?.default?.fetch === 'function') {{ - \\ Bun.serve(entryNamespace.default); + \\ if (typeof entryNamespace?.default?.fetch === 'function') {{ + \\ const server = Bun.serve(entryNamespace.default); + \\ console.debug(`Started ${{server.development ? 'development ' : ''}}server ${{server.protocol}}://${{server.hostname}}:${{server.port}}`); \\ }} \\ }}, reportError); - \\}} else if (typeof entryNamespace?.default?.fetch === 'function') {{ - \\ Bun.serve(entryNamespace.default); + \\}} else if (typeof entryNamespace?.default?.fetch === 'function' || entryNamespace?.default?.app != null) {{ + \\ const server = Bun.serve(entryNamespace.default); + \\ console.debug(`Started ${{server.development ? 'development ' : ''}}server ${{server.protocol}}://${{server.hostname}}:${{server.port}}`); \\}} \\ , diff --git a/src/bunfig.zig b/src/bunfig.zig index ede8389cde..4f1d73d68c 100644 --- a/src/bunfig.zig +++ b/src/bunfig.zig @@ -52,12 +52,20 @@ pub const Bunfig = struct { ctx: Command.Context, fn addError(this: *Parser, loc: logger.Loc, comptime text: string) !void { - this.log.addError(this.source, loc, text) catch unreachable; + this.log.addErrorOpts(text, .{ + .source = this.source, + .loc = loc, + .redact_sensitive_information = true, + }) catch unreachable; return error.@"Invalid Bunfig"; } fn addErrorFormat(this: *Parser, loc: logger.Loc, allocator: std.mem.Allocator, comptime text: string, args: anytype) !void { - this.log.addErrorFmt(this.source, loc, allocator, text, args) catch unreachable; + this.log.addErrorFmtOpts(allocator, text, args, .{ + .source = this.source, + .loc = loc, + .redact_sensitive_information = true, + }) catch unreachable; return error.@"Invalid Bunfig"; } @@ -336,15 +344,15 @@ pub const Bunfig = struct { } if (comptime cmd.isNPMRelated() or cmd == .RunCommand or cmd == .AutoCommand) { - if (json.get("install")) |_bun| { + if (json.getObject("install")) |install_obj| { var install: *Api.BunInstall = this.ctx.install orelse brk: { - const install_ = try this.allocator.create(Api.BunInstall); - install_.* = std.mem.zeroes(Api.BunInstall); - this.ctx.install = install_; - break :brk install_; + const install = try this.allocator.create(Api.BunInstall); + install.* = std.mem.zeroes(Api.BunInstall); + this.ctx.install = install; + break :brk install; }; - if (_bun.get("auto")) |auto_install_expr| { + if (install_obj.get("auto")) |auto_install_expr| { if (auto_install_expr.data == .e_string) { this.ctx.debug.global_cache = options.GlobalCache.Map.get(auto_install_expr.asString(this.allocator) orelse "") orelse { try this.addError(auto_install_expr.loc, "Invalid auto install setting, must be one of true, false, or \"force\" \"fallback\" \"disable\""); @@ -361,13 +369,46 @@ pub const Bunfig = struct { } } - if (_bun.get("exact")) |exact| { + if (install_obj.get("cafile")) |cafile| { + install.cafile = try cafile.asStringCloned(allocator) orelse { + try this.addError(cafile.loc, "Invalid cafile. Expected a string."); + return; + }; + } + + if (install_obj.get("ca")) |ca| { + switch (ca.data) { + .e_array => |arr| { + var list = try allocator.alloc([]const u8, arr.items.len); + for (arr.items.slice(), 0..) |item, i| { + list[i] = try item.asStringCloned(allocator) orelse { + try this.addError(item.loc, "Invalid CA. Expected a string."); + return; + }; + } + install.ca = .{ + .list = list, + }; + }, + .e_string => |str| { + install.ca = .{ + .str = try str.stringCloned(allocator), + }; + }, + else => { + try this.addError(ca.loc, "Invalid CA. Expected a string or an array of strings."); + return; + }, + } + } + + if (install_obj.get("exact")) |exact| { if (exact.asBool()) |value| { install.exact = value; } } - if (_bun.get("prefer")) |prefer_expr| { + if (install_obj.get("prefer")) |prefer_expr| { try this.expectString(prefer_expr); if (Prefer.get(prefer_expr.asString(bun.default_allocator) orelse "")) |setting| { @@ -377,11 +418,11 @@ pub const Bunfig = struct { } } - if (_bun.get("registry")) |registry| { + if (install_obj.get("registry")) |registry| { install.default_registry = try this.parseRegistry(registry); } - if (_bun.get("scopes")) |scopes| { + if (install_obj.get("scopes")) |scopes| { var registry_map = install.scoped orelse Api.NpmRegistryMap{}; try this.expect(scopes, .e_object); @@ -399,32 +440,32 @@ pub const Bunfig = struct { install.scoped = registry_map; } - if (_bun.get("dryRun")) |dry_run| { + if (install_obj.get("dryRun")) |dry_run| { if (dry_run.asBool()) |value| { install.dry_run = value; } } - if (_bun.get("production")) |production| { + if (install_obj.get("production")) |production| { if (production.asBool()) |value| { install.production = value; } } - if (_bun.get("frozenLockfile")) |frozen_lockfile| { + if (install_obj.get("frozenLockfile")) |frozen_lockfile| { if (frozen_lockfile.asBool()) |value| { install.frozen_lockfile = value; } } - if (_bun.get("concurrentScripts")) |jobs| { + if (install_obj.get("concurrentScripts")) |jobs| { if (jobs.data == .e_number) { install.concurrent_scripts = jobs.data.e_number.toU32(); if (install.concurrent_scripts.? == 0) install.concurrent_scripts = null; } } - if (_bun.get("lockfile")) |lockfile_expr| { + if (install_obj.get("lockfile")) |lockfile_expr| { if (lockfile_expr.get("print")) |lockfile| { try this.expectString(lockfile); if (lockfile.asString(this.allocator)) |value| { @@ -457,41 +498,41 @@ pub const Bunfig = struct { } } - if (_bun.get("optional")) |optional| { + if (install_obj.get("optional")) |optional| { if (optional.asBool()) |value| { install.save_optional = value; } } - if (_bun.get("peer")) |optional| { + if (install_obj.get("peer")) |optional| { if (optional.asBool()) |value| { install.save_peer = value; } } - if (_bun.get("dev")) |optional| { + if (install_obj.get("dev")) |optional| { if (optional.asBool()) |value| { install.save_dev = value; } } - if (_bun.get("globalDir")) |dir| { + if (install_obj.get("globalDir")) |dir| { if (dir.asString(allocator)) |value| { install.global_dir = value; } } - if (_bun.get("globalBinDir")) |dir| { + if (install_obj.get("globalBinDir")) |dir| { if (dir.asString(allocator)) |value| { install.global_bin_dir = value; } } - if (_bun.get("logLevel")) |expr| { + if (install_obj.get("logLevel")) |expr| { try this.loadLogLevel(expr); } - if (_bun.get("cache")) |cache| { + if (install_obj.get("cache")) |cache| { load: { if (cache.asBool()) |value| { if (!value) { @@ -684,20 +725,6 @@ pub const Bunfig = struct { jsx.development = jsx_dev; } - switch (comptime cmd) { - .AutoCommand, .BuildCommand => { - if (json.get("publicDir")) |public_dir| { - try this.expectString(public_dir); - this.bunfig.router = Api.RouteConfig{ - .extensions = &.{}, - .dir = &.{}, - .static_dir = try public_dir.data.e_string.string(allocator), - }; - } - }, - else => {}, - } - if (json.get("debug")) |expr| { if (expr.get("editor")) |editor| { if (editor.asString(allocator)) |value| { @@ -738,13 +765,6 @@ pub const Bunfig = struct { } } - if (json.get("framework")) |expr| { - try this.expectString(expr); - this.bunfig.framework = Api.FrameworkConfig{ - .package = expr.asString(allocator).?, - }; - } - if (json.get("loader")) |expr| { try this.expect(expr, .e_object); const properties = expr.data.e_object.properties.slice(); @@ -777,11 +797,20 @@ pub const Bunfig = struct { pub fn expectString(this: *Parser, expr: js_ast.Expr) !void { switch (expr.data) { - .e_string, .e_utf8_string => {}, + .e_string => {}, else => { - this.log.addErrorFmt(this.source, expr.loc, this.allocator, "expected string but received {}", .{ - @as(js_ast.Expr.Tag, expr.data), - }) catch unreachable; + this.log.addErrorFmtOpts( + this.allocator, + "expected string but received {}", + .{ + @as(js_ast.Expr.Tag, expr.data), + }, + .{ + .source = this.source, + .loc = expr.loc, + .redact_sensitive_information = true, + }, + ) catch unreachable; return error.@"Invalid Bunfig"; }, } @@ -789,10 +818,19 @@ pub const Bunfig = struct { pub fn expect(this: *Parser, expr: js_ast.Expr, token: js_ast.Expr.Tag) !void { if (@as(js_ast.Expr.Tag, expr.data) != token) { - this.log.addErrorFmt(this.source, expr.loc, this.allocator, "expected {} but received {}", .{ - token, - @as(js_ast.Expr.Tag, expr.data), - }) catch unreachable; + this.log.addErrorFmtOpts( + this.allocator, + "expected {} but received {}", + .{ + token, + @as(js_ast.Expr.Tag, expr.data), + }, + .{ + .source = this.source, + .loc = expr.loc, + .redact_sensitive_information = true, + }, + ) catch unreachable; return error.@"Invalid Bunfig"; } } @@ -801,14 +839,20 @@ pub const Bunfig = struct { pub fn parse(allocator: std.mem.Allocator, source: logger.Source, ctx: Command.Context, comptime cmd: Command.Tag) !void { const log_count = ctx.log.errors + ctx.log.warnings; - const expr = if (strings.eqlComptime(source.path.name.ext[1..], "toml")) TOML.parse(&source, ctx.log, allocator) catch |err| { + const expr = if (strings.eqlComptime(source.path.name.ext[1..], "toml")) TOML.parse(&source, ctx.log, allocator, true) catch |err| { if (ctx.log.errors + ctx.log.warnings == log_count) { - ctx.log.addErrorFmt(&source, logger.Loc.Empty, allocator, "Failed to parse", .{}) catch unreachable; + try ctx.log.addErrorOpts("Failed to parse", .{ + .source = &source, + .redact_sensitive_information = true, + }); } return err; - } else JSONParser.ParseTSConfig(&source, ctx.log, allocator) catch |err| { + } else JSONParser.parseTSConfig(&source, ctx.log, allocator, true) catch |err| { if (ctx.log.errors + ctx.log.warnings == log_count) { - ctx.log.addErrorFmt(&source, logger.Loc.Empty, allocator, "Failed to parse", .{}) catch unreachable; + try ctx.log.addErrorOpts("Failed to parse", .{ + .source = &source, + .redact_sensitive_information = true, + }); } return err; }; diff --git a/src/c.zig b/src/c.zig index ff9226a660..1579667ab9 100644 --- a/src/c.zig +++ b/src/c.zig @@ -31,12 +31,6 @@ pub extern "c" fn fchmodat(c_int, [*c]const u8, mode_t, c_int) c_int; pub extern "c" fn fchown(std.c.fd_t, std.c.uid_t, std.c.gid_t) c_int; pub extern "c" fn lchown(path: [*:0]const u8, std.c.uid_t, std.c.gid_t) c_int; pub extern "c" fn chown(path: [*:0]const u8, std.c.uid_t, std.c.gid_t) c_int; -// TODO: this is wrong on Windows -pub extern "c" fn lstat64([*c]const u8, [*c]libc_stat) c_int; -// TODO: this is wrong on Windows -pub extern "c" fn fstat64([*c]const u8, [*c]libc_stat) c_int; -// TODO: this is wrong on Windows -pub extern "c" fn stat64([*c]const u8, [*c]libc_stat) c_int; pub extern "c" fn lchmod(path: [*:0]const u8, mode: mode_t) c_int; pub extern "c" fn truncate([*:0]const u8, i64) c_int; // note: truncate64 is not a thing @@ -46,19 +40,31 @@ pub extern "c" fn mkdtemp(template: [*c]u8) ?[*:0]u8; pub extern "c" fn memcmp(s1: [*c]const u8, s2: [*c]const u8, n: usize) c_int; pub extern "c" fn memchr(s: [*]const u8, c: u8, n: usize) ?[*]const u8; -pub const lstat = lstat64; -pub const fstat = fstat64; -pub const stat = stat64; - pub extern "c" fn strchr(str: [*]const u8, char: u8) ?[*]const u8; +pub const lstat = blk: { + const T = *const fn ([*c]const u8, [*c]libc_stat) callconv(.C) c_int; // TODO: this is wrong on Windows + if (bun.Environment.isMusl) break :blk @extern(T, .{ .library_name = "c", .name = "lstat" }); + break :blk @extern(T, .{ .name = "lstat64" }); +}; +pub const fstat = blk: { + const T = *const fn (c_int, [*c]libc_stat) callconv(.C) c_int; // TODO: this is wrong on Windows + if (bun.Environment.isMusl) break :blk @extern(T, .{ .library_name = "c", .name = "fstat" }); + break :blk @extern(T, .{ .name = "fstat64" }); +}; +pub const stat = blk: { + const T = *const fn ([*c]const u8, [*c]libc_stat) callconv(.C) c_int; // TODO: this is wrong on Windows + if (bun.Environment.isMusl) break :blk @extern(T, .{ .library_name = "c", .name = "stat" }); + break :blk @extern(T, .{ .name = "stat64" }); +}; + pub fn lstat_absolute(path: [:0]const u8) !Stat { if (builtin.os.tag == .windows) { @compileError("Not implemented yet, conside using bun.sys.lstat()"); } var st = zeroes(libc_stat); - switch (errno(lstat64(path.ptr, &st))) { + switch (errno(lstat(path.ptr, &st))) { .SUCCESS => {}, .NOENT => return error.FileNotFound, // .EINVAL => unreachable, diff --git a/src/cache.zig b/src/cache.zig index b82e2e10b7..4ea8616cac 100644 --- a/src/cache.zig +++ b/src/cache.zig @@ -294,12 +294,12 @@ pub const Json = struct { pub fn init(_: std.mem.Allocator) Json { return Json{}; } - fn parse(_: *@This(), log: *logger.Log, source: logger.Source, allocator: std.mem.Allocator, comptime func: anytype) anyerror!?js_ast.Expr { + fn parse(_: *@This(), log: *logger.Log, source: logger.Source, allocator: std.mem.Allocator, comptime func: anytype, comptime force_utf8: bool) anyerror!?js_ast.Expr { var temp_log = logger.Log.init(allocator); defer { temp_log.appendToMaybeRecycled(log, &source) catch {}; } - return func(&source, &temp_log, allocator) catch handler: { + return func(&source, &temp_log, allocator, force_utf8) catch handler: { break :handler null; }; } @@ -308,17 +308,17 @@ pub const Json = struct { // They are JSON files with comments and trailing commas. // Sometimes tooling expects this to work. if (source.path.isJSONCFile()) { - return try parse(cache, log, source, allocator, json_parser.ParseTSConfig); + return try parse(cache, log, source, allocator, json_parser.parseTSConfig, true); } - return try parse(cache, log, source, allocator, json_parser.ParseJSON); + return try parse(cache, log, source, allocator, json_parser.parse, false); } - pub fn parsePackageJSON(cache: *@This(), log: *logger.Log, source: logger.Source, allocator: std.mem.Allocator) anyerror!?js_ast.Expr { - return try parse(cache, log, source, allocator, json_parser.ParseTSConfig); + pub fn parsePackageJSON(cache: *@This(), log: *logger.Log, source: logger.Source, allocator: std.mem.Allocator, comptime force_utf8: bool) anyerror!?js_ast.Expr { + return try parse(cache, log, source, allocator, json_parser.parseTSConfig, force_utf8); } pub fn parseTSConfig(cache: *@This(), log: *logger.Log, source: logger.Source, allocator: std.mem.Allocator) anyerror!?js_ast.Expr { - return try parse(cache, log, source, allocator, json_parser.ParseTSConfig); + return try parse(cache, log, source, allocator, json_parser.parseTSConfig, true); } }; diff --git a/src/ci_info.zig b/src/ci_info.zig new file mode 100644 index 0000000000..ed88efc291 --- /dev/null +++ b/src/ci_info.zig @@ -0,0 +1,421 @@ +// A modified port of ci-info@4.0.0 (https://github.com/watson/ci-info) +// Only gets the CI name, `isPR` is not implemented. + +// Names are changed to match what `npm publish` uses +// https://github.com/npm/cli/blob/63d6a732c3c0e9c19fd4d147eaa5cc27c29b168d/workspaces/config/lib/definitions/definitions.js#L2129 +// `name.toLowerCase().split(' ').join('-')` + +const std = @import("std"); +const bun = @import("root").bun; +const strings = bun.strings; + +var ci_name: ?[]const u8 = null; + +pub fn detectCI() ?[]const u8 { + const ci = ci_name orelse ci_name: { + CI.once.call(); + break :ci_name ci_name.?; + }; + + return if (ci.len == 0) null else ci; +} + +const CI = enum { + @"agola-ci", + appcircle, + appveyor, + @"aws-codebuild", + @"azure-pipelines", + bamboo, + @"bitbucket-pipelines", + bitrise, + buddy, + buildkite, + circleci, + @"cirrus-ci", + codefresh, + codemagic, + codeship, + drone, + dsari, + earthly, + @"expo-application-services", + gerrit, + @"gitea-actions", + @"github-actions", + @"gitlab-ci", + gocd, + @"google-cloud-build", + @"harness-ci", + // heroku, + hudson, + jenkins, + layerci, + @"magnum-ci", + @"netlify-ci", + nevercode, + prow, + releasehub, + render, + @"sail-ci", + screwdriver, + semaphore, + sourcehut, + @"strider-cd", + taskcluster, + teamcity, + @"travis-ci", + vela, + vercel, + @"visual-studio-app-center", + woodpecker, + @"xcode-cloud", + @"xcode-server", + + pub var once = std.once(struct { + pub fn once() void { + var name: []const u8 = ""; + defer ci_name = name; + + if (bun.getenvZ("CI")) |ci| { + if (strings.eqlComptime(ci, "false")) { + return; + } + } + + // Special case Heroku + if (bun.getenvZ("NODE")) |node| { + if (strings.containsComptime(node, "/app/.heroku/node/bin/node")) { + name = "heroku"; + return; + } + } + + ci: for (CI.array.values, 0..) |item, i| { + const any, const pairs = item; + + pairs: for (pairs) |pair| { + const key, const value = pair; + + if (bun.getenvZ(key)) |env| { + if (value.len == 0 or bun.strings.eqlLong(env, value, true)) { + if (!any) continue :pairs; + + name = @tagName(Array.Indexer.keyForIndex(i)); + return; + } + } + + if (!any) continue :ci; + } + + if (!any) { + name = @tagName(Array.Indexer.keyForIndex(i)); + return; + } + } + } + }.once); + + pub const Array = std.EnumArray(CI, struct { bool, []const [2][:0]const u8 }); + + pub const array = Array.init(.{ + .@"agola-ci" = .{ + false, + &.{ + .{ "AGOLA_GIT_REF", "" }, + }, + }, + .appcircle = .{ + false, + &.{ + .{ "AC_APPCIRCLE", "" }, + }, + }, + .appveyor = .{ + false, + &.{ + .{ "APPVEYOR", "" }, + }, + }, + .@"aws-codebuild" = .{ + false, + &.{ + .{ "CODEBUILD_BUILD_ARN", "" }, + }, + }, + .@"azure-pipelines" = .{ + false, + &.{ + .{ "TF_BUILD", "" }, + }, + }, + .bamboo = .{ + false, + &.{ + .{ "bamboo_planKey", "" }, + }, + }, + .@"bitbucket-pipelines" = .{ + false, + &.{ + .{ "BITBUCKET_COMMIT", "" }, + }, + }, + .bitrise = .{ + false, + &.{ + .{ "BITRISE_IO", "" }, + }, + }, + .buddy = .{ + false, + &.{ + .{ "BUDDY_WORKSPACE_ID", "" }, + }, + }, + .buildkite = .{ + false, + &.{ + .{ "BUILDKITE", "" }, + }, + }, + .circleci = .{ + false, + &.{ + .{ "CIRCLECI", "" }, + }, + }, + .@"cirrus-ci" = .{ + false, + &.{ + .{ "CIRRUS_CI", "" }, + }, + }, + .codefresh = .{ + false, + &.{ + .{ "CF_BUILD_ID", "" }, + }, + }, + .codemagic = .{ + false, + &.{ + .{ "CM_BUILD_ID", "" }, + }, + }, + .codeship = .{ + false, + &.{ + .{ "CI_NAME", "codeship" }, + }, + }, + .drone = .{ + false, + &.{ + .{ "DRONE", "" }, + }, + }, + .dsari = .{ + false, + &.{ + .{ "DSARI", "" }, + }, + }, + .earthly = .{ + false, + &.{ + .{ "EARTHLY_CI", "" }, + }, + }, + .@"expo-application-services" = .{ + false, + &.{ + .{ "EAS_BUILD", "" }, + }, + }, + .gerrit = .{ + false, + &.{ + .{ "GERRIT_PROJECT", "" }, + }, + }, + .@"gitea-actions" = .{ + false, + &.{ + .{ "GITEA_ACTIONS", "" }, + }, + }, + .@"github-actions" = .{ + false, + &.{ + .{ "GITHUB_ACTIONS", "" }, + }, + }, + .@"gitlab-ci" = .{ + false, + &.{ + .{ "GITLAB_CI", "" }, + }, + }, + .gocd = .{ + false, + &.{ + .{ "GO_PIPELINE_LABEL", "" }, + }, + }, + .@"google-cloud-build" = .{ + false, + &.{ + .{ "BUILDER_OUTPUT", "" }, + }, + }, + .@"harness-ci" = .{ + false, + &.{ + .{ "HARNESS_BUILD_ID", "" }, + }, + }, + .hudson = .{ + false, + &.{ + .{ "HUDSON_URL", "" }, + }, + }, + .jenkins = .{ + false, + &.{ + .{ "JENKINS_URL", "" }, + .{ "BUILD_ID", "" }, + }, + }, + .layerci = .{ + false, + &.{ + .{ "LAYERCI", "" }, + }, + }, + .@"magnum-ci" = .{ + false, + &.{ + .{ "MAGNUM", "" }, + }, + }, + .@"netlify-ci" = .{ + false, + &.{ + .{ "NETLIFY", "" }, + }, + }, + .nevercode = .{ + false, + &.{ + .{ "NEVERCODE", "" }, + }, + }, + .prow = .{ + false, + &.{ + .{ "PROW_JOB_ID", "" }, + }, + }, + .releasehub = .{ + false, + &.{ + .{ "RELEASE_BUILD_ID", "" }, + }, + }, + .render = .{ + false, + &.{ + .{ "RENDER", "" }, + }, + }, + .@"sail-ci" = .{ + false, + &.{ + .{ "SAILCI", "" }, + }, + }, + .screwdriver = .{ + false, + &.{ + .{ "SCREWDRIVER", "" }, + }, + }, + .semaphore = .{ + false, + &.{ + .{ "SEMAPHORE", "" }, + }, + }, + .sourcehut = .{ + false, + &.{ + .{ "CI_NAME", "sourcehut" }, + }, + }, + .@"strider-cd" = .{ + false, + &.{ + .{ "STRIDER", "" }, + }, + }, + .taskcluster = .{ + false, + &.{ + .{ "TASK_ID", "" }, + .{ "RUN_ID", "" }, + }, + }, + .teamcity = .{ + false, + &.{ + .{ "TEAMCITY_VERSION", "" }, + }, + }, + .@"travis-ci" = .{ + false, + &.{ + .{ "TRAVIS", "" }, + }, + }, + .vela = .{ + false, + &.{ + .{ "VELA", "" }, + }, + }, + .vercel = .{ + true, + &.{ + .{ "NOW_BUILDER", "" }, + .{ "VERCEL", "" }, + }, + }, + .@"visual-studio-app-center" = .{ + false, + &.{ + .{ "APPCENTER_BUILD_ID", "" }, + }, + }, + .woodpecker = .{ + false, + &.{ + .{ "CI", "woodpecker" }, + }, + }, + .@"xcode-cloud" = .{ + false, + &.{ + .{ "CI_XCODE_PROJECT", "" }, + }, + }, + .@"xcode-server" = .{ + false, + &.{ + .{ "XCS", "" }, + }, + }, + }); +}; diff --git a/src/cli.zig b/src/cli.zig index 31946329c6..f76416230c 100644 --- a/src/cli.zig +++ b/src/cli.zig @@ -44,6 +44,7 @@ const MacroMap = @import("./resolver/package_json.zig").MacroMap; const TestCommand = @import("./cli/test_command.zig").TestCommand; pub var start_time: i128 = undefined; const Bunfig = @import("./bunfig.zig").Bunfig; +const OOM = bun.OOM; pub const Cli = struct { pub const CompileTarget = @import("./compile_target.zig"); @@ -60,7 +61,7 @@ pub const Cli = struct { // var panicker = MainPanicHandler.init(log); // MainPanicHandler.Singleton = &panicker; Command.start(allocator, log) catch |err| { - log.printForLogLevel(Output.errorWriter()) catch {}; + log.print(Output.errorWriter()) catch {}; bun.crash_handler.handleRootError(err, @errorReturnTrace()); }; @@ -72,6 +73,7 @@ pub const Cli = struct { pub const debug_flags = if (Environment.isDebug) struct { var resolve_breakpoints: []const []const u8 = &.{}; + var print_breakpoints: []const []const u8 = &.{}; pub fn hasResolveBreakpoint(str: []const u8) bool { for (resolve_breakpoints) |bp| { @@ -81,6 +83,18 @@ pub const debug_flags = if (Environment.isDebug) struct { } return false; } + + pub fn hasPrintBreakpoint(path: fs.Path) bool { + for (print_breakpoints) |bp| { + if (strings.contains(path.pretty, bp)) { + return true; + } + if (strings.contains(path.text, bp)) { + return true; + } + } + return false; + } } else @compileError("Do not access this namespace in a release build"); const LoaderMatcher = strings.ExactSizeMatcher(4); @@ -115,6 +129,10 @@ pub const BunxCommand = @import("./cli/bunx_command.zig").BunxCommand; pub const ExecCommand = @import("./cli/exec_command.zig").ExecCommand; pub const PatchCommand = @import("./cli/patch_command.zig").PatchCommand; pub const PatchCommitCommand = @import("./cli/patch_commit_command.zig").PatchCommitCommand; +pub const OutdatedCommand = @import("./cli/outdated_command.zig").OutdatedCommand; +pub const PublishCommand = @import("./cli/publish_command.zig").PublishCommand; +pub const PackCommand = @import("./cli/pack_command.zig").PackCommand; +pub const InitCommand = @import("./cli/init_command.zig").InitCommand; pub const Arguments = struct { pub fn loader_resolver(in: string) !Api.Loader { @@ -164,21 +182,24 @@ pub const Arguments = struct { clap.parseParam("--cwd Absolute path to resolve files & entry points from. This just changes the process' cwd.") catch unreachable, clap.parseParam("-c, --config ? Specify path to Bun config file. Default $cwd/bunfig.toml") catch unreachable, clap.parseParam("-h, --help Display this menu and exit") catch unreachable, - clap.parseParam("...") catch unreachable, - } ++ if (builtin.have_error_return_tracing) [_]ParamType{ + } ++ (if (builtin.have_error_return_tracing) [_]ParamType{ // This will print more error return traces, as a debug aid - clap.parseParam("--verbose-error-trace") catch unreachable, - } else [_]ParamType{}; + clap.parseParam("--verbose-error-trace Dump error return traces") catch unreachable, + } else [_]ParamType{}) ++ [_]ParamType{ + clap.parseParam("...") catch unreachable, + }; const debug_params = [_]ParamType{ clap.parseParam("--breakpoint-resolve ... DEBUG MODE: breakpoint when resolving something that includes this string") catch unreachable, + clap.parseParam("--breakpoint-print ... DEBUG MODE: breakpoint when printing something that includes this string") catch unreachable, }; const transpiler_params_ = [_]ParamType{ - clap.parseParam("--main-fields ... Main fields to lookup in package.json. Defaults to --target dependent") catch unreachable, + clap.parseParam("--main-fields ... Main fields to lookup in package.json. Defaults to --target dependent") catch unreachable, clap.parseParam("--extension-order ... Defaults to: .tsx,.ts,.jsx,.js,.json ") catch unreachable, - clap.parseParam("--tsconfig-override Specify custom tsconfig.json. Default $cwd/tsconfig.json") catch unreachable, - clap.parseParam("-d, --define ... Substitute K:V while parsing, e.g. --define process.env.NODE_ENV:\"development\". Values are parsed as JSON.") catch unreachable, + clap.parseParam("--tsconfig-override Specify custom tsconfig.json. Default $cwd/tsconfig.json") catch unreachable, + clap.parseParam("-d, --define ... Substitute K:V while parsing, e.g. --define process.env.NODE_ENV:\"development\". Values are parsed as JSON.") catch unreachable, + clap.parseParam("--drop ... Remove function calls, e.g. --drop=console removes all console.* calls.") catch unreachable, clap.parseParam("-l, --loader ... Parse files with .ext:loader, e.g. --loader .js:jsx. Valid loaders: js, jsx, ts, tsx, json, toml, text, file, wasm, napi") catch unreachable, clap.parseParam("--no-macros Disable macros from being executed in the bundler, transpiler and runtime") catch unreachable, clap.parseParam("--jsx-factory Changes the function called when compiling JSX elements using the classic JSX runtime") catch unreachable, @@ -208,6 +229,7 @@ pub const Arguments = struct { clap.parseParam("-u, --origin ") catch unreachable, clap.parseParam("--conditions ... Pass custom conditions to resolve") catch unreachable, clap.parseParam("--fetch-preconnect ... Preconnect to a URL while code is loading") catch unreachable, + clap.parseParam("--max-http-header-size Set the maximum size of HTTP headers in bytes. Default is 16KiB") catch unreachable, }; const auto_or_run_params = [_]ParamType{ @@ -235,12 +257,15 @@ pub const Arguments = struct { const build_only_params = [_]ParamType{ clap.parseParam("--compile Generate a standalone Bun executable containing your bundled code") catch unreachable, + clap.parseParam("--bytecode Use a bytecode cache") catch unreachable, clap.parseParam("--watch Automatically restart the process on file change") catch unreachable, clap.parseParam("--no-clear-screen Disable clearing the terminal screen on reload when --watch is enabled") catch unreachable, clap.parseParam("--target The intended execution environment for the bundle. \"browser\", \"bun\" or \"node\"") catch unreachable, clap.parseParam("--outdir Default to \"dist\" if multiple files") catch unreachable, clap.parseParam("--outfile Write to a file") catch unreachable, - clap.parseParam("--sourcemap ? Build with sourcemaps - 'inline', 'external', or 'none'") catch unreachable, + clap.parseParam("--sourcemap ? Build with sourcemaps - 'linked', 'inline', 'external', or 'none'") catch unreachable, + clap.parseParam("--banner Add a banner to the bundled output such as \"use client\"; for a bundle being used with RSCs") catch unreachable, + clap.parseParam("--footer Add a footer to the bundled output such as // built with bun!") catch unreachable, clap.parseParam("--format Specifies the module format to build to. Only \"esm\" is supported.") catch unreachable, clap.parseParam("--root Root directory used for multiple entry points") catch unreachable, clap.parseParam("--splitting Enable code splitting") catch unreachable, @@ -250,16 +275,23 @@ pub const Arguments = struct { clap.parseParam("--entry-naming Customize entry point filenames. Defaults to \"[dir]/[name].[ext]\"") catch unreachable, clap.parseParam("--chunk-naming Customize chunk filenames. Defaults to \"[name]-[hash].[ext]\"") catch unreachable, clap.parseParam("--asset-naming Customize asset filenames. Defaults to \"[name]-[hash].[ext]\"") catch unreachable, - clap.parseParam("--server-components Enable React Server Components (experimental)") catch unreachable, + clap.parseParam("--react-fast-refresh Enable React Fast Refresh transform (does not emit hot-module code, use this for testing)") catch unreachable, clap.parseParam("--no-bundle Transpile file only, do not bundle") catch unreachable, clap.parseParam("--emit-dce-annotations Re-emit DCE annotations in bundles. Enabled by default unless --minify-whitespace is passed.") catch unreachable, clap.parseParam("--minify Enable all minification flags") catch unreachable, clap.parseParam("--minify-syntax Minify syntax and inline data") catch unreachable, clap.parseParam("--minify-whitespace Minify whitespace") catch unreachable, clap.parseParam("--minify-identifiers Minify identifiers") catch unreachable, + clap.parseParam("--experimental-css Enabled experimental CSS bundling") catch unreachable, + clap.parseParam("--experimental-css-chunking Chunk CSS files together to reduce duplicated CSS loaded in a browser. Only has an affect when multiple entrypoints import CSS") catch unreachable, clap.parseParam("--dump-environment-variables") catch unreachable, clap.parseParam("--conditions ... Pass custom conditions to resolve") catch unreachable, - }; + } ++ if (FeatureFlags.bake) [_]ParamType{ + clap.parseParam("--app (EXPERIMENTAL) Build a web app for production using Bun Bake") catch unreachable, + clap.parseParam("--server-components (EXPERIMENTAL) Enable server components") catch unreachable, + clap.parseParam("--define-client ... When --server-components is set, these defines are applied to client components. Same format as --define") catch unreachable, + clap.parseParam("--debug-dump-server-files When --app is set, dump all server files to disk even when building statically") catch unreachable, + } else .{}; pub const build_params = build_only_params ++ transpiler_params_ ++ base_params_; // TODO: update test completions @@ -322,14 +354,21 @@ pub const Arguments = struct { return null; } - pub fn loadConfig(allocator: std.mem.Allocator, user_config_path_: ?string, ctx: Command.Context, comptime cmd: Command.Tag) !void { + pub fn loadConfig(allocator: std.mem.Allocator, user_config_path_: ?string, ctx: Command.Context, comptime cmd: Command.Tag) OOM!void { var config_buf: bun.PathBuffer = undefined; if (comptime cmd.readGlobalConfig()) { if (!ctx.has_loaded_global_config) { ctx.has_loaded_global_config = true; if (getHomeConfigPath(&config_buf)) |path| { - try loadConfigPath(allocator, true, path, ctx, comptime cmd); + loadConfigPath(allocator, true, path, ctx, comptime cmd) catch |err| { + if (ctx.log.hasAny()) { + ctx.log.print(Output.errorWriter()) catch {}; + } + if (ctx.log.hasAny()) Output.printError("\n", .{}); + Output.err(err, "failed to load bunfig", .{}); + Global.crash(); + }; } } } @@ -377,7 +416,14 @@ pub const Arguments = struct { config_path = config_buf[0..config_path_.len :0]; } - try loadConfigPath(allocator, auto_loaded, config_path, ctx, comptime cmd); + loadConfigPath(allocator, auto_loaded, config_path, ctx, comptime cmd) catch |err| { + if (ctx.log.hasAny()) { + ctx.log.print(Output.errorWriter()) catch {}; + } + if (ctx.log.hasAny()) Output.printError("\n", .{}); + Output.err(err, "failed to load bunfig", .{}); + Global.crash(); + }; } pub fn loadConfigWithCmdArgs( @@ -385,7 +431,7 @@ pub const Arguments = struct { allocator: std.mem.Allocator, args: clap.Args(clap.Help, cmd.params()), ctx: Command.Context, - ) !void { + ) OOM!void { return try loadConfig(allocator, args.option("--config"), ctx, comptime cmd); } @@ -542,6 +588,8 @@ pub const Arguments = struct { }; } + opts.drop = args.options("--drop"); + const loader_tuple = try LoaderColonList.resolve(allocator, args.options("--loader")); if (loader_tuple.keys.len > 0) { @@ -556,7 +604,6 @@ pub const Arguments = struct { else null; - opts.serve = false; // TODO opts.main_fields = args.options("--main-fields"); // we never actually supported inject. // opts.inject = args.options("--inject"); @@ -565,7 +612,7 @@ pub const Arguments = struct { ctx.passthrough = args.remaining(); - if (cmd == .AutoCommand or cmd == .RunCommand or cmd == .BuildCommand) { + if (cmd == .AutoCommand or cmd == .RunCommand or cmd == .BuildCommand or cmd == .TestCommand) { if (args.options("--conditions").len > 0) { opts.conditions = args.options("--conditions"); } @@ -604,13 +651,31 @@ pub const Arguments = struct { ctx.runtime_options.eval.eval_and_print = true; } else { opts.port = std.fmt.parseInt(u16, port_str, 10) catch { - Output.errGeneric("Invalid value for --port: \"{s}\". Must be a number\n", .{port_str}); + Output.errFmt( + bun.fmt.outOfRange(port_str, .{ + .field_name = "--port", + .min = 0, + .max = std.math.maxInt(u16), + }), + ); Output.note("To evaluate TypeScript here, use 'bun --print'", .{}); Global.exit(1); }; } } + if (args.option("--max-http-header-size")) |size_str| { + const size = std.fmt.parseInt(usize, size_str, 10) catch { + Output.errGeneric("Invalid value for --max-http-header-size: \"{s}\". Must be a positive integer\n", .{size_str}); + Global.exit(1); + }; + if (size == 0) { + bun.http.max_http_header_size = 1024 * 1024 * 1024; + } else { + bun.http.max_http_header_size = size; + } + } + ctx.debug.offline_mode_setting = if (args.flag("--prefer-offline")) Bunfig.OfflineMode.offline else if (args.flag("--prefer-latest")) @@ -703,11 +768,35 @@ pub const Arguments = struct { if (cmd == .BuildCommand) { ctx.bundler_options.transform_only = args.flag("--no-bundle"); + ctx.bundler_options.bytecode = args.flag("--bytecode"); + + if (FeatureFlags.bake and args.flag("--app")) { + ctx.bundler_options.bake = true; + ctx.bundler_options.bake_debug_dump_server = args.flag("--debug-dump-server-files"); + } + + // TODO: support --format=esm + if (ctx.bundler_options.bytecode) { + ctx.bundler_options.output_format = .cjs; + ctx.args.target = .bun; + } if (args.option("--public-path")) |public_path| { ctx.bundler_options.public_path = public_path; } + if (args.option("--banner")) |banner| { + ctx.bundler_options.banner = banner; + } + + if (args.option("--footer")) |footer| { + ctx.bundler_options.footer = footer; + } + + const experimental_css = args.flag("--experimental-css"); + ctx.bundler_options.experimental_css = experimental_css; + ctx.bundler_options.css_chunking = args.flag("--experimental-css-chunking"); + const minify_flag = args.flag("--minify"); ctx.bundler_options.minify_syntax = minify_flag or args.flag("--minify-syntax"); ctx.bundler_options.minify_whitespace = minify_flag or args.flag("--minify-whitespace"); @@ -717,9 +806,9 @@ pub const Arguments = struct { !ctx.bundler_options.minify_whitespace; if (args.options("--external").len > 0) { - var externals = try allocator.alloc([]u8, args.options("--external").len); + var externals = try allocator.alloc([]const u8, args.options("--external").len); for (args.options("--external"), 0..) |external, i| { - externals[i] = @constCast(external); + externals[i] = external; } opts.external = externals; } @@ -759,8 +848,18 @@ pub const Arguments = struct { else => invalidTarget(&diag, _target), }; - if (opts.target.? == .bun) + if (opts.target.? == .bun) { ctx.debug.run_in_bun = opts.target.? == .bun; + } else { + if (ctx.bundler_options.bytecode) { + Output.errGeneric("target must be 'bun' when bytecode is true. Received: {s}", .{@tagName(opts.target.?)}); + Global.exit(1); + } + + if (ctx.bundler_options.bake) { + Output.errGeneric("target must be 'bun' when using --app. Received: {s}", .{@tagName(opts.target.?)}); + } + } } if (args.flag("--watch")) { @@ -795,15 +894,27 @@ pub const Arguments = struct { if (args.option("--format")) |format_str| { const format = options.Format.fromString(format_str) orelse { - Output.prettyErrorln("error: Invalid format - must be esm, cjs, or iife", .{}); + Output.errGeneric("Invalid format - must be esm, cjs, or iife", .{}); Global.crash(); }; + switch (format) { - .esm => {}, - else => { - Output.prettyErrorln("error: Formats besides 'esm' are not implemented", .{}); - Global.crash(); + .internal_bake_dev => { + bun.Output.warn("--format={s} is for debugging only, and may experience breaking changes at any moment", .{format_str}); + bun.Output.flush(); }, + .cjs => { + if (ctx.args.target == null) { + ctx.args.target = .node; + } + }, + else => {}, + } + + ctx.bundler_options.output_format = format; + if (format != .cjs and ctx.bundler_options.bytecode) { + Output.errGeneric("format must be 'cjs' when bytecode is true. Eventually we'll add esm support as well.", .{}); + Global.exit(1); } } @@ -823,12 +934,22 @@ pub const Arguments = struct { ctx.bundler_options.asset_naming = try strings.concat(allocator, &.{ "./", bun.strings.removeLeadingDotSlash(asset_naming) }); } - if (comptime FeatureFlags.react_server_components) { - if (args.flag("--server-components")) { - ctx.bundler_options.react_server_components = true; + if (bun.FeatureFlags.bake and args.flag("--server-components")) { + ctx.bundler_options.server_components = true; + if (opts.target) |target| { + if (!bun.options.Target.from(target).isServerSide()) { + bun.Output.errGeneric("Cannot use client-side --target={s} with --server-components", .{@tagName(target)}); + Global.crash(); + } else { + opts.target = .bun; + } } } + if (args.flag("--react-fast-refresh")) { + ctx.bundler_options.react_fast_refresh = true; + } + if (args.option("--sourcemap")) |setting| { if (setting.len == 0) { // In the future, Bun is going to make this default to .linked @@ -848,6 +969,13 @@ pub const Arguments = struct { Output.prettyErrorln("error: Invalid sourcemap setting: \"{s}\"", .{setting}); Global.crash(); } + + // when using --compile, only `external` works, as we do not + // look at the source map comment. so after we validate the + // user's choice was in the list, we secretly override it + if (ctx.bundler_options.compile) { + opts.source_map = .external; + } } } @@ -945,9 +1073,9 @@ pub const Arguments = struct { } if (cmd == .BuildCommand) { - if (opts.entry_points.len == 0 and opts.framework == null) { - Output.prettyErrorln("bun build v" ++ Global.package_json_version_with_sha ++ "", .{}); - Output.prettyError("error: Missing entrypoints. What would you like to bundle?\n\n", .{}); + if (opts.entry_points.len == 0 and !ctx.bundler_options.bake) { + Output.prettyln("bun build v" ++ Global.package_json_version_with_sha ++ "", .{}); + Output.pretty("error: Missing entrypoints. What would you like to bundle?\n\n", .{}); Output.flush(); Output.pretty("Usage:\n $ bun build \\ [...\\] [...flags] \n", .{}); Output.pretty("\nTo see full documentation:\n $ bun build --help\n", .{}); @@ -989,6 +1117,7 @@ pub const Arguments = struct { if (Environment.isDebug) { debug_flags.resolve_breakpoints = args.options("--breakpoint-resolve"); + debug_flags.print_breakpoints = args.options("--breakpoint-print"); } return opts; @@ -1000,7 +1129,6 @@ const AutoCommand = struct { try HelpCommand.execWithReason(allocator, .invalid_command); } }; -const InitCommand = @import("./cli/init_command.zig").InitCommand; pub const HelpCommand = struct { pub fn exec(allocator: std.mem.Allocator) !void { @@ -1075,8 +1203,10 @@ pub const HelpCommand = struct { \\ add {s:<16} Add a dependency to package.json (bun a) \\ remove {s:<16} Remove a dependency from package.json (bun rm) \\ update {s:<16} Update outdated dependencies + \\ outdated Display latest versions of outdated dependencies \\ link [\] Register or link a local npm package \\ unlink Unregister a local npm package + \\ publish Publish a package to the npm registry \\ patch \ Prepare a package for patching \\ pm \ Additional package management utilities \\ @@ -1144,9 +1274,9 @@ pub const HelpCommand = struct { printWithReason(reason, false); if (reason == .invalid_command) { - std.process.exit(1); + Global.exit(1); } - std.process.exit(0); + Global.exit(0); } }; @@ -1181,6 +1311,10 @@ pub var is_bunx_exe = false; pub const Command = struct { var script_name_buf: bun.PathBuffer = undefined; + pub fn get() Context { + return global_cli_ctx; + } + pub const DebugOptions = struct { dump_environment_variables: bool = false, dump_limits: bool = false, @@ -1252,18 +1386,18 @@ pub const Command = struct { args: Api.TransformOptions, log: *logger.Log, allocator: std.mem.Allocator, - positionals: []const string = &[_]string{}, - passthrough: []const string = &[_]string{}, + positionals: []const string = &.{}, + passthrough: []const string = &.{}, install: ?*Api.BunInstall = null, - debug: DebugOptions = DebugOptions{}, - test_options: TestOptions = TestOptions{}, - bundler_options: BundlerOptions = BundlerOptions{}, - runtime_options: RuntimeOptions = RuntimeOptions{}, + debug: DebugOptions = .{}, + test_options: TestOptions = .{}, + bundler_options: BundlerOptions = .{}, + runtime_options: RuntimeOptions = .{}, - filters: []const []const u8 = &[_][]const u8{}, + filters: []const []const u8 = &.{}, - preloads: []const string = &[_]string{}, + preloads: []const string = &.{}, has_loaded_global_config: bool = false, pub const BundlerOptions = struct { @@ -1277,7 +1411,8 @@ pub const Command = struct { entry_naming: []const u8 = "[dir]/[name].[ext]", chunk_naming: []const u8 = "./[name]-[hash].[ext]", asset_naming: []const u8 = "./[name]-[hash].[ext]", - react_server_components: bool = false, + server_components: bool = false, + react_fast_refresh: bool = false, code_splitting: bool = false, transform_only: bool = false, inline_entrypoint_import_meta_main: bool = false, @@ -1286,6 +1421,15 @@ pub const Command = struct { minify_identifiers: bool = false, ignore_dce_annotations: bool = false, emit_dce_annotations: bool = true, + output_format: options.Format = .esm, + bytecode: bool = false, + banner: []const u8 = "", + footer: []const u8 = "", + experimental_css: bool = false, + css_chunking: bool = false, + + bake: bool = false, + bake_debug_dump_server: bool = false, }; pub fn create(allocator: std.mem.Allocator, log: *logger.Log, comptime command: Command.Tag) anyerror!Context { @@ -1320,7 +1464,7 @@ pub const Command = struct { // std.process.args allocates! const ArgsIterator = struct { - buf: [][:0]const u8 = undefined, + buf: [][:0]const u8, i: u32 = 0, pub fn next(this: *ArgsIterator) ?[]const u8 { @@ -1383,7 +1527,7 @@ pub const Command = struct { } const first_arg_name = next_arg; - const RootCommandMatcher = strings.ExactSizeMatcher(16); + const RootCommandMatcher = strings.ExactSizeMatcher(12); return switch (RootCommandMatcher.match(first_arg_name)) { RootCommandMatcher.case("init") => .InitCommand, @@ -1431,6 +1575,9 @@ pub const Command = struct { RootCommandMatcher.case("exec") => .ExecCommand, + RootCommandMatcher.case("outdated") => .OutdatedCommand, + RootCommandMatcher.case("publish") => .PublishCommand, + // These are reserved for future use by Bun, so that someone // doing `bun deploy` to run a script doesn't accidentally break // when we add our actual command @@ -1443,9 +1590,7 @@ pub const Command = struct { RootCommandMatcher.case("login") => .ReservedCommand, RootCommandMatcher.case("logout") => .ReservedCommand, RootCommandMatcher.case("whoami") => .ReservedCommand, - RootCommandMatcher.case("publish") => .ReservedCommand, RootCommandMatcher.case("prune") => .ReservedCommand, - RootCommandMatcher.case("outdated") => .ReservedCommand, RootCommandMatcher.case("list") => .ReservedCommand, RootCommandMatcher.case("why") => .ReservedCommand, @@ -1567,6 +1712,20 @@ pub const Command = struct { try PatchCommitCommand.exec(ctx); return; }, + .OutdatedCommand => { + if (comptime bun.fast_debug_build_mode and bun.fast_debug_build_cmd != .OutdatedCommand) unreachable; + const ctx = try Command.init(allocator, log, .OutdatedCommand); + + try OutdatedCommand.exec(ctx); + return; + }, + .PublishCommand => { + if (comptime bun.fast_debug_build_mode and bun.fast_debug_build_cmd != .PublishCommand) unreachable; + const ctx = try Command.init(allocator, log, .PublishCommand); + + try PublishCommand.exec(ctx); + return; + }, .BunxCommand => { if (comptime bun.fast_debug_build_mode and bun.fast_debug_build_cmd != .BunxCommand) unreachable; const ctx = try Command.init(allocator, log, .BunxCommand); @@ -1609,6 +1768,20 @@ pub const Command = struct { if (comptime bun.fast_debug_build_mode and bun.fast_debug_build_cmd != .PackageManagerCommand) unreachable; const ctx = try Command.init(allocator, log, .PackageManagerCommand); + // const maybe_subcommand, const maybe_arg = PackageManagerCommand.which(command_index); + // if (maybe_subcommand) |subcommand| { + // return switch (subcommand) { + // inline else => |tag| try PackageManagerCommand.exec(ctx, tag), + // }; + // } + + // PackageManagerCommand.printHelp(); + + // if (maybe_arg) |arg| { + // Output.errGeneric("\"{s}\" unknown command", .{arg}); + // Global.crash(); + // } + try PackageManagerCommand.exec(ctx); return; }, @@ -1659,7 +1832,7 @@ pub const Command = struct { const index = AddCompletions.index; outer: { - if (filter.len > 1) { + if (filter.len > 1 and filter[1].len > 0) { const first_letter: FirstLetter = switch (filter[1][0]) { 'a' => FirstLetter.a, 'b' => FirstLetter.b, @@ -2093,11 +2266,7 @@ pub const Command = struct { ) catch |err| { bun.handleErrorReturnTrace(err, @errorReturnTrace()); - if (Output.enable_ansi_colors) { - ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), true) catch {}; - } else { - ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), false) catch {}; - } + ctx.log.print(Output.errorWriter()) catch {}; Output.prettyErrorln("error: Failed to run {s} due to error {s}", .{ std.fs.path.basename(file_path), @@ -2134,6 +2303,8 @@ pub const Command = struct { ExecCommand, PatchCommand, PatchCommitCommand, + OutdatedCommand, + PublishCommand, /// Used by crash reports. /// @@ -2165,6 +2336,8 @@ pub const Command = struct { .ExecCommand => 'e', .PatchCommand => 'x', .PatchCommitCommand => 'z', + .OutdatedCommand => 'o', + .PublishCommand => 'k', }; } @@ -2382,12 +2555,18 @@ pub const Command = struct { \\Note: If executing this from a shell, make sure to escape the string! \\ \\Examples: - \\ bunx exec "echo hi" - \\ bunx exec "echo \"hey friends\"!" + \\ bun exec "echo hi" + \\ bun exec "echo \"hey friends\"!" \\ , .{}); Output.flush(); }, + .OutdatedCommand, .PublishCommand => { + Install.PackageManager.CommandLineArguments.printHelp(switch (cmd) { + .OutdatedCommand => .outdated, + .PublishCommand => .publish, + }); + }, else => { HelpCommand.printWithReason(.explicit); }, @@ -2396,7 +2575,17 @@ pub const Command = struct { pub fn readGlobalConfig(this: Tag) bool { return switch (this) { - .BunxCommand, .PackageManagerCommand, .InstallCommand, .AddCommand, .RemoveCommand, .UpdateCommand, .PatchCommand, .PatchCommitCommand => true, + .BunxCommand, + .PackageManagerCommand, + .InstallCommand, + .AddCommand, + .RemoveCommand, + .UpdateCommand, + .PatchCommand, + .PatchCommitCommand, + .OutdatedCommand, + .PublishCommand, + => true, else => false, }; } @@ -2413,6 +2602,8 @@ pub const Command = struct { .UpdateCommand, .PatchCommand, .PatchCommitCommand, + .OutdatedCommand, + .PublishCommand, => true, else => false, }; @@ -2432,6 +2623,8 @@ pub const Command = struct { .AutoCommand = true, .RunCommand = true, .RunAsNodeCommand = true, + .OutdatedCommand = true, + .PublishCommand = true, }); pub const always_loads_config: std.EnumArray(Tag, bool) = std.EnumArray(Tag, bool).initDefault(false, .{ @@ -2445,6 +2638,8 @@ pub const Command = struct { .PatchCommitCommand = true, .PackageManagerCommand = true, .BunxCommand = true, + .OutdatedCommand = true, + .PublishCommand = true, }); pub const uses_global_options: std.EnumArray(Tag, bool) = std.EnumArray(Tag, bool).initDefault(true, .{ @@ -2459,6 +2654,8 @@ pub const Command = struct { .LinkCommand = false, .UnlinkCommand = false, .BunxCommand = false, + .OutdatedCommand = false, + .PublishCommand = false, }); }; }; diff --git a/src/cli/build_command.zig b/src/cli/build_command.zig index f3b51fb118..8b02cc40e6 100644 --- a/src/cli/build_command.zig +++ b/src/cli/build_command.zig @@ -30,38 +30,33 @@ const bundler = bun.bundler; const DotEnv = @import("../env_loader.zig"); const fs = @import("../fs.zig"); -const Router = @import("../router.zig"); const BundleV2 = @import("../bundler/bundle_v2.zig").BundleV2; -var estimated_input_lines_of_code_: usize = undefined; pub const BuildCommand = struct { const compile_define_keys = &.{ "process.platform", "process.arch", + "process.versions.bun", }; - pub fn exec( - ctx: Command.Context, - ) !void { + pub fn exec(ctx: Command.Context) !void { Global.configureAllocator(.{ .long_running = true }); const allocator = ctx.allocator; var log = ctx.log; - estimated_input_lines_of_code_ = 0; - if (ctx.bundler_options.compile) { + if (ctx.bundler_options.compile or ctx.bundler_options.bytecode) { // set this early so that externals are set up correctly and define is right ctx.args.target = .bun; } + if (ctx.bundler_options.bake) { + return bun.bake.production.buildCommand(ctx); + } + const compile_target = &ctx.bundler_options.compile_target; if (ctx.bundler_options.compile) { const compile_define_values = compile_target.defineValues(); - if (ctx.args.define == null) { - ctx.args.define = .{ - .keys = compile_define_keys, - .values = compile_define_values, - }; - } else if (ctx.args.define) |*define| { + if (ctx.args.define) |*define| { var keys = try std.ArrayList(string).initCapacity(bun.default_allocator, compile_define_keys.len + define.keys.len); keys.appendSliceAssumeCapacity(compile_define_keys); keys.appendSliceAssumeCapacity(define.keys); @@ -71,57 +66,60 @@ pub const BuildCommand = struct { define.keys = keys.items; define.values = values.items; + } else { + ctx.args.define = .{ + .keys = compile_define_keys, + .values = compile_define_values, + }; } } var this_bundler = try bundler.Bundler.init(allocator, log, ctx.args, null); this_bundler.options.source_map = options.SourceMapOption.fromApi(ctx.args.source_map); - this_bundler.resolver.opts.source_map = options.SourceMapOption.fromApi(ctx.args.source_map); this_bundler.options.compile = ctx.bundler_options.compile; - this_bundler.resolver.opts.compile = ctx.bundler_options.compile; if (this_bundler.options.source_map == .external and ctx.bundler_options.outdir.len == 0 and !ctx.bundler_options.compile) { Output.prettyErrorln("error: cannot use an external source map without --outdir", .{}); Global.exit(1); return; } + var outfile = ctx.bundler_options.outfile; + const output_to_stdout = !ctx.bundler_options.compile and outfile.len == 0 and ctx.bundler_options.outdir.len == 0; + + this_bundler.options.supports_multiple_outputs = !(output_to_stdout or outfile.len > 0); this_bundler.options.public_path = ctx.bundler_options.public_path; - this_bundler.resolver.opts.public_path = ctx.bundler_options.public_path; - this_bundler.options.entry_naming = ctx.bundler_options.entry_naming; this_bundler.options.chunk_naming = ctx.bundler_options.chunk_naming; this_bundler.options.asset_naming = ctx.bundler_options.asset_naming; - this_bundler.resolver.opts.entry_naming = ctx.bundler_options.entry_naming; - this_bundler.resolver.opts.chunk_naming = ctx.bundler_options.chunk_naming; - this_bundler.resolver.opts.asset_naming = ctx.bundler_options.asset_naming; - - this_bundler.options.react_server_components = ctx.bundler_options.react_server_components; - this_bundler.resolver.opts.react_server_components = ctx.bundler_options.react_server_components; - + this_bundler.options.server_components = ctx.bundler_options.server_components; + this_bundler.options.react_fast_refresh = ctx.bundler_options.react_fast_refresh; this_bundler.options.inline_entrypoint_import_meta_main = ctx.bundler_options.inline_entrypoint_import_meta_main; - this_bundler.resolver.opts.inline_entrypoint_import_meta_main = ctx.bundler_options.inline_entrypoint_import_meta_main; - this_bundler.options.code_splitting = ctx.bundler_options.code_splitting; - this_bundler.resolver.opts.code_splitting = ctx.bundler_options.code_splitting; - this_bundler.options.minify_syntax = ctx.bundler_options.minify_syntax; - this_bundler.resolver.opts.minify_syntax = ctx.bundler_options.minify_syntax; - this_bundler.options.minify_whitespace = ctx.bundler_options.minify_whitespace; - this_bundler.resolver.opts.minify_whitespace = ctx.bundler_options.minify_whitespace; - this_bundler.options.minify_identifiers = ctx.bundler_options.minify_identifiers; - this_bundler.resolver.opts.minify_identifiers = ctx.bundler_options.minify_identifiers; - this_bundler.options.emit_dce_annotations = ctx.bundler_options.emit_dce_annotations; - this_bundler.resolver.opts.emit_dce_annotations = ctx.bundler_options.emit_dce_annotations; - this_bundler.options.ignore_dce_annotations = ctx.bundler_options.ignore_dce_annotations; - this_bundler.resolver.opts.ignore_dce_annotations = ctx.bundler_options.ignore_dce_annotations; + + this_bundler.options.banner = ctx.bundler_options.banner; + this_bundler.options.footer = ctx.bundler_options.footer; + this_bundler.options.drop = ctx.args.drop; + + this_bundler.options.experimental_css = ctx.bundler_options.experimental_css; + this_bundler.options.css_chunking = ctx.bundler_options.css_chunking; + + this_bundler.options.output_dir = ctx.bundler_options.outdir; + this_bundler.options.output_format = ctx.bundler_options.output_format; + + if (ctx.bundler_options.output_format == .internal_bake_dev) { + this_bundler.options.tree_shaking = false; + } + + this_bundler.options.bytecode = ctx.bundler_options.bytecode; if (ctx.bundler_options.compile) { if (ctx.bundler_options.code_splitting) { @@ -130,12 +128,6 @@ pub const BuildCommand = struct { return; } - if (this_bundler.options.entry_points.len > 1) { - Output.prettyErrorln("error: multiple entry points are not supported with --compile", .{}); - Global.exit(1); - return; - } - if (ctx.bundler_options.outdir.len > 0) { Output.prettyErrorln("error: cannot use --compile with --outdir", .{}); Global.exit(1); @@ -145,7 +137,6 @@ pub const BuildCommand = struct { const base_public_path = bun.StandaloneModuleGraph.targetBasePublicPath(compile_target.os, "root/"); this_bundler.options.public_path = base_public_path; - this_bundler.resolver.opts.public_path = base_public_path; if (outfile.len == 0) { outfile = std.fs.path.basename(this_bundler.options.entry_points[0]); @@ -177,7 +168,7 @@ pub const BuildCommand = struct { } } - if (ctx.bundler_options.outdir.len == 0) { + if (ctx.bundler_options.outdir.len == 0 and !ctx.bundler_options.compile) { if (this_bundler.options.entry_points.len > 1) { Output.prettyErrorln("error: Must use --outdir when specifying more than one entry point.", .{}); Global.exit(1); @@ -190,9 +181,6 @@ pub const BuildCommand = struct { } } - this_bundler.options.output_dir = ctx.bundler_options.outdir; - this_bundler.resolver.opts.output_dir = ctx.bundler_options.outdir; - var src_root_dir_buf: bun.PathBuffer = undefined; const src_root_dir: string = brk1: { const path = brk2: { @@ -220,33 +208,13 @@ pub const BuildCommand = struct { }; this_bundler.options.root_dir = src_root_dir; - this_bundler.resolver.opts.root_dir = src_root_dir; - - this_bundler.options.react_server_components = ctx.bundler_options.react_server_components; - this_bundler.resolver.opts.react_server_components = ctx.bundler_options.react_server_components; this_bundler.options.code_splitting = ctx.bundler_options.code_splitting; - this_bundler.resolver.opts.code_splitting = ctx.bundler_options.code_splitting; this_bundler.options.transform_only = ctx.bundler_options.transform_only; - if (this_bundler.options.transform_only) { - this_bundler.options.resolve_mode = .disable; - } + try this_bundler.configureDefines(); this_bundler.configureLinker(); - // This step is optional - // If it fails for any reason, ignore it and continue bundling - // This is partially a workaround for the 'error.MissingRoutesDir' error - this_bundler.configureRouter(true) catch { - this_bundler.options.routes.routes_enabled = false; - this_bundler.options.framework = null; - if (this_bundler.router) |*router| { - router.config.routes_enabled = false; - router.config.single_page_app_routing = false; - router.config.static_dir_enabled = false; - this_bundler.router = null; - } - }; - + this_bundler.resolver.opts = this_bundler.options; this_bundler.options.jsx.development = !this_bundler.options.production; this_bundler.resolver.opts.jsx.development = this_bundler.options.jsx.development; @@ -260,6 +228,38 @@ pub const BuildCommand = struct { .unspecified => {}, } + var client_bundler: bundler.Bundler = undefined; + if (this_bundler.options.server_components) { + client_bundler = try bundler.Bundler.init(allocator, log, ctx.args, null); + client_bundler.options = this_bundler.options; + client_bundler.options.target = .browser; + client_bundler.options.server_components = true; + try this_bundler.options.conditions.appendSlice(&.{"react-server"}); + this_bundler.options.react_fast_refresh = false; + this_bundler.options.minify_syntax = true; + client_bundler.options.minify_syntax = true; + client_bundler.options.define = try options.Define.init( + allocator, + if (ctx.args.define) |user_defines| + try options.Define.Data.fromInput(try options.stringHashMapFromArrays( + options.defines.RawDefines, + allocator, + user_defines.keys, + user_defines.values, + ), ctx.args.drop, log, allocator) + else + null, + null, + this_bundler.options.define.drop_debugger, + ); + + try bun.bake.addImportMetaDefines(allocator, this_bundler.options.define, .development, .server); + try bun.bake.addImportMetaDefines(allocator, client_bundler.options.define, .development, .client); + + this_bundler.resolver.opts = this_bundler.options; + client_bundler.resolver.opts = client_bundler.options; + } + // var env_loader = this_bundler.env; if (ctx.debug.dump_environment_variables) { @@ -285,7 +285,7 @@ pub const BuildCommand = struct { ); if (log.hasErrors()) { - try log.printForLogLevel(Output.errorWriter()); + try log.print(Output.errorWriter()); if (result.errors.len > 0 or result.output_files.len == 0) { Output.flush(); @@ -301,21 +301,19 @@ pub const BuildCommand = struct { &this_bundler, allocator, bun.JSC.AnyEventLoop.init(ctx.allocator), - std.crypto.random.int(u64), ctx.debug.hot_reload == .watch, &reachable_file_count, &minify_duration, &input_code_length, ) catch |err| { if (log.msgs.items.len > 0) { - try log.printForLogLevel(Output.errorWriter()); + try log.print(Output.errorWriter()); } else { try Output.errorWriter().print("error: {s}", .{@errorName(err)}); } Output.flush(); exitOrWatch(1, ctx.debug.hot_reload == .watch); - unreachable; }).items; }; const bundled_end = std.time.nanoTimestamp(); @@ -358,7 +356,7 @@ pub const BuildCommand = struct { std.fs.cwd() else std.fs.cwd().makeOpenPath(root_path, .{}) catch |err| { - Output.prettyErrorln("{s} while attemping to open output directory {}", .{ @errorName(err), bun.fmt.quote(root_path) }); + Output.prettyErrorln("{s} while attempting to open output directory {}", .{ @errorName(err), bun.fmt.quote(root_path) }); exitOrWatch(1, ctx.debug.hot_reload == .watch); unreachable; }; @@ -408,6 +406,7 @@ pub const BuildCommand = struct { this_bundler.options.public_path, outfile, this_bundler.env, + this_bundler.options.output_format, ); const compiled_elapsed = @divTrunc(@as(i64, @truncate(std.time.nanoTimestamp() - bundled_end)), @as(i64, std.time.ns_per_ms)); const compiled_elapsed_digit_count: isize = switch (compiled_elapsed) { @@ -442,79 +441,11 @@ pub const BuildCommand = struct { // So don't do that unless we actually need to. // const do_we_need_to_close = !FeatureFlags.store_file_descriptors or (@intCast(usize, root_dir.fd) + open_file_limit) < output_files.len; - var filepath_buf: bun.PathBuffer = undefined; - filepath_buf[0] = '.'; - filepath_buf[1] = '/'; - for (output_files) |f| { - var rel_path: []const u8 = undefined; - switch (f.value) { - // Nothing to do in this case - .saved => { - rel_path = f.dest_path; - if (f.dest_path.len > from_path.len) { - rel_path = resolve_path.relative(from_path, f.dest_path); - } - }, - - // easy mode: write the buffer - .buffer => |value| { - rel_path = f.dest_path; - if (f.dest_path.len > from_path.len) { - rel_path = resolve_path.relative(from_path, f.dest_path); - if (std.fs.path.dirname(rel_path)) |parent| { - if (parent.len > root_path.len) { - try root_dir.makePath(parent); - } - } - } - const JSC = bun.JSC; - var path_buf: bun.PathBuffer = undefined; - switch (JSC.Node.NodeFS.writeFileWithPathBuffer( - &path_buf, - JSC.Node.Arguments.WriteFile{ - .data = JSC.Node.StringOrBuffer{ - .buffer = JSC.Buffer{ - .buffer = .{ - .ptr = @constCast(value.bytes.ptr), - // TODO: handle > 4 GB files - .len = @as(u32, @truncate(value.bytes.len)), - .byte_len = @as(u32, @truncate(value.bytes.len)), - }, - }, - }, - .encoding = .buffer, - .mode = if (f.is_executable) 0o755 else 0o644, - .dirfd = bun.toFD(root_dir.fd), - .file = .{ - .path = JSC.Node.PathLike{ - .string = JSC.PathString.init(rel_path), - }, - }, - }, - )) { - .err => |err| { - Output.prettyErrorln("error: failed to write file {}\n{}", .{ bun.fmt.quote(rel_path), err }); - }, - .result => {}, - } - }, - .move => |value| { - const primary = f.dest_path[from_path.len..]; - bun.copy(u8, filepath_buf[2..], primary); - rel_path = filepath_buf[0 .. primary.len + 2]; - rel_path = value.pathname; - - try f.moveTo(root_path, @constCast(rel_path), bun.toFD(root_dir.fd)); - }, - .copy => |value| { - rel_path = value.pathname; - - try f.copyTo(root_path, @constCast(rel_path), bun.toFD(root_dir.fd)); - }, - .noop => {}, - .pending => unreachable, - } + const rel_path = f.writeToDisk(root_dir, from_path) catch |err| { + Output.err(err, "failed to write file '{}'", .{bun.fmt.quote(f.dest_path)}); + continue; + }; // Print summary _ = try writer.write("\n"); @@ -544,13 +475,13 @@ pub const BuildCommand = struct { } } - try log.printForLogLevel(Output.errorWriter()); + try log.print(Output.errorWriter()); exitOrWatch(0, ctx.debug.hot_reload == .watch); } } }; -fn exitOrWatch(code: u8, watch: bool) void { +fn exitOrWatch(code: u8, watch: bool) noreturn { if (watch) { // the watcher thread will exit the process std.time.sleep(std.math.maxInt(u64) - 1); @@ -595,14 +526,14 @@ fn printSummary(bundled_end: i128, minify_duration: u64, minified: bool, input_c Output.prettyln( " minify -{} (estimate)", .{ - bun.fmt.size(@as(usize, @intCast(delta))), + bun.fmt.size(@as(usize, @intCast(delta)), .{}), }, ); } else if (-delta > 1024) { Output.prettyln( " minify +{} (estimate)", .{ - bun.fmt.size(@as(usize, @intCast(-delta))), + bun.fmt.size(@as(usize, @intCast(-delta)), .{}), }, ); } else { diff --git a/src/cli/bunx_command.zig b/src/cli/bunx_command.zig index c0bb2fa7f7..21ce662274 100644 --- a/src/cli/bunx_command.zig +++ b/src/cli/bunx_command.zig @@ -82,7 +82,7 @@ pub const BunxCommand = struct { bun.JSAst.Expr.Data.Store.create(); bun.JSAst.Stmt.Data.Store.create(); - const expr = try bun.JSON.ParsePackageJSONUTF8(&source, bundler.log, bundler.allocator); + const expr = try bun.JSON.parsePackageJSONUTF8(&source, bundler.log, bundler.allocator); // choose the first package that fits if (expr.get("bin")) |bin_expr| { diff --git a/src/cli/create_command.zig b/src/cli/create_command.zig index 7b7bf50c7c..0c81ee3730 100644 --- a/src/cli/create_command.zig +++ b/src/cli/create_command.zig @@ -31,8 +31,8 @@ const fs = @import("../fs.zig"); const URL = @import("../url.zig").URL; const HTTP = bun.http; -const ParseJSON = @import("../json_parser.zig").ParseJSONUTF8; -const Archive = @import("../libarchive/libarchive.zig").Archive; +const JSON = bun.JSON; +const Archiver = bun.libarchive.Archiver; const Zlib = @import("../zlib.zig"); const JSPrinter = bun.js_printer; const DotEnv = @import("../env_loader.zig"); @@ -241,7 +241,7 @@ pub const CreateCommand = struct { @setCold(true); Global.configureAllocator(.{ .long_running = false }); - HTTP.HTTPThread.init(); + HTTP.HTTPThread.init(&.{}); var create_options = try CreateOptions.parse(ctx); const positionals = create_options.positionals; @@ -377,19 +377,19 @@ pub const CreateCommand = struct { progress.refresh(); - var pluckers: [1]Archive.Plucker = if (!create_options.skip_package_json) - [1]Archive.Plucker{try Archive.Plucker.init(comptime strings.literal(bun.OSPathChar, "package.json"), 2048, ctx.allocator)} + var pluckers: [1]Archiver.Plucker = if (!create_options.skip_package_json) + [1]Archiver.Plucker{try Archiver.Plucker.init(comptime strings.literal(bun.OSPathChar, "package.json"), 2048, ctx.allocator)} else - [1]Archive.Plucker{undefined}; + [1]Archiver.Plucker{undefined}; - var archive_context = Archive.Context{ + var archive_context = Archiver.Context{ .pluckers = pluckers[0..@as(usize, @intCast(@intFromBool(!create_options.skip_package_json)))], .all_files = undefined, .overwrite_list = bun.StringArrayHashMap(void).init(ctx.allocator), }; if (!create_options.overwrite) { - try Archive.getOverwritingFileList( + try Archiver.getOverwritingFileList( tarball_buf_list.items, destination, &archive_context, @@ -427,7 +427,7 @@ pub const CreateCommand = struct { } } - _ = try Archive.extractToDisk( + _ = try Archiver.extractToDisk( tarball_buf_list.items, destination, &archive_context, @@ -701,7 +701,7 @@ pub const CreateCommand = struct { var source = logger.Source.initPathString("package.json", package_json_contents.list.items); - var package_json_expr = ParseJSON(&source, ctx.log, ctx.allocator) catch { + var package_json_expr = JSON.parseUTF8(&source, ctx.log, ctx.allocator) catch { package_json_file = null; break :process_package_json; }; @@ -714,11 +714,7 @@ pub const CreateCommand = struct { const properties_list = std.ArrayList(js_ast.G.Property).fromOwnedSlice(default_allocator, package_json_expr.data.e_object.properties.slice()); if (ctx.log.errors > 0) { - if (Output.enable_ansi_colors) { - try ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), true); - } else { - try ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), false); - } + try ctx.log.print(Output.errorWriter()); package_json_file = null; break :process_package_json; @@ -1983,7 +1979,7 @@ pub const Example = struct { async_http.client.progress_node = progress; async_http.client.flags.reject_unauthorized = env_loader.getTLSRejectUnauthorized(); - const response = try async_http.sendSync(true); + const response = try async_http.sendSync(); switch (response.status_code) { 404 => return error.GitHubRepositoryNotFound, @@ -1996,7 +1992,7 @@ pub const Example = struct { var is_expected_content_type = false; var content_type: string = ""; - for (response.headers) |header| { + for (response.headers.list) |header| { if (strings.eqlCaseInsensitiveASCII(header.name, "content-type", true)) { content_type = header.value; @@ -2060,7 +2056,7 @@ pub const Example = struct { async_http.client.progress_node = progress; async_http.client.flags.reject_unauthorized = env_loader.getTLSRejectUnauthorized(); - var response = try async_http.sendSync(true); + var response = try async_http.sendSync(); switch (response.status_code) { 404 => return error.ExampleNotFound, @@ -2075,16 +2071,12 @@ pub const Example = struct { refresher.refresh(); initializeStore(); var source = logger.Source.initPathString("package.json", mutable.list.items); - var expr = ParseJSON(&source, ctx.log, ctx.allocator) catch |err| { + var expr = JSON.parseUTF8(&source, ctx.log, ctx.allocator) catch |err| { progress.end(); refresher.refresh(); if (ctx.log.errors > 0) { - if (Output.enable_ansi_colors) { - try ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), true); - } else { - try ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), false); - } + try ctx.log.print(Output.errorWriter()); Global.exit(1); } else { Output.prettyErrorln("Error parsing package: {s}", .{@errorName(err)}); @@ -2096,11 +2088,7 @@ pub const Example = struct { progress.end(); refresher.refresh(); - if (Output.enable_ansi_colors) { - try ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), true); - } else { - try ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), false); - } + try ctx.log.print(Output.errorWriter()); Global.exit(1); } @@ -2151,7 +2139,7 @@ pub const Example = struct { refresher.maybeRefresh(); - response = try async_http.sendSync(true); + response = try async_http.sendSync(); refresher.maybeRefresh(); @@ -2194,7 +2182,7 @@ pub const Example = struct { async_http.client.progress_node = progress_node; } - const response = async_http.sendSync(true) catch |err| { + const response = async_http.sendSync() catch |err| { switch (err) { error.WouldBlock => { Output.prettyErrorln("Request timed out while trying to fetch examples list. Please try again", .{}); @@ -2214,13 +2202,9 @@ pub const Example = struct { initializeStore(); var source = logger.Source.initPathString("examples.json", mutable.list.items); - const examples_object = ParseJSON(&source, ctx.log, ctx.allocator) catch |err| { + const examples_object = JSON.parseUTF8(&source, ctx.log, ctx.allocator) catch |err| { if (ctx.log.errors > 0) { - if (Output.enable_ansi_colors) { - try ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), true); - } else { - try ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), false); - } + try ctx.log.print(Output.errorWriter()); Global.exit(1); } else { Output.prettyErrorln("Error parsing examples: {s}", .{@errorName(err)}); @@ -2229,11 +2213,7 @@ pub const Example = struct { }; if (ctx.log.errors > 0) { - if (Output.enable_ansi_colors) { - try ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), true); - } else { - try ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), false); - } + try ctx.log.print(Output.errorWriter()); Global.exit(1); } diff --git a/src/cli/filter_arg.zig b/src/cli/filter_arg.zig index d1b67fa6dd..99031058d5 100644 --- a/src/cli/filter_arg.zig +++ b/src/cli/filter_arg.zig @@ -5,7 +5,7 @@ const string = bun.string; const Output = bun.Output; const Global = bun.Global; const strings = bun.strings; -const json_parser = bun.JSON; +const JSON = bun.JSON; const Glob = @import("../glob.zig"); const Package = @import("../install/lockfile.zig").Package; @@ -65,7 +65,7 @@ pub fn getCandidatePackagePatterns(allocator: std.mem.Allocator, log: *bun.logge }; defer allocator.free(json_source.contents); - const json = try json_parser.ParsePackageJSONUTF8(&json_source, log, allocator); + const json = try JSON.parsePackageJSONUTF8(&json_source, log, allocator); const prop = json.asProperty("workspaces") orelse continue; @@ -187,7 +187,7 @@ pub const FilterSet = struct { pub fn matchesPath(self: *const FilterSet, path: []const u8) bool { for (self.filters) |filter| { - if (Glob.matchImpl(filter.codepoints, path)) { + if (Glob.matchImpl(filter.codepoints, path).matches()) { return true; } } @@ -200,7 +200,7 @@ pub const FilterSet = struct { .name => name, .path => path, }; - if (Glob.matchImpl(filter.codepoints, target)) { + if (Glob.matchImpl(filter.codepoints, target).matches()) { return true; } } diff --git a/src/cli/filter_run.zig b/src/cli/filter_run.zig index 0f404fde21..aad627d3d2 100644 --- a/src/cli/filter_run.zig +++ b/src/cli/filter_run.zig @@ -485,34 +485,32 @@ pub fn runScriptsWithFilter(ctx: Command.Context) !noreturn { const PATH = try RunCommand.configurePathForRunWithPackageJsonDir(ctx, dirpath, &this_bundler, null, dirpath, ctx.debug.run_in_bun); for (&[3][]const u8{ pre_script_name, script_name, post_script_name }) |name| { - const content = pkgscripts.get(name) orelse continue; + const original_content = pkgscripts.get(name) orelse continue; + var copy_script_capacity: usize = original_content.len; + for (ctx.passthrough) |part| copy_script_capacity += 1 + part.len; // we leak this - var copy_script = try std.ArrayList(u8).initCapacity(ctx.allocator, content.len); - try RunCommand.replacePackageManagerRun(©_script, content); + var copy_script = try std.ArrayList(u8).initCapacity(ctx.allocator, copy_script_capacity); - // and this, too - var combined_len = content.len; - for (ctx.passthrough) |p| { - combined_len += p.len + 1; - } - var combined = try ctx.allocator.allocSentinel(u8, combined_len, 0); - bun.copy(u8, combined, content); - var remaining_script_buf = combined[content.len..]; + try RunCommand.replacePackageManagerRun(©_script, original_content); + const len_command_only = copy_script.items.len; for (ctx.passthrough) |part| { - const p = part; - remaining_script_buf[0] = ' '; - bun.copy(u8, remaining_script_buf[1..], p); - remaining_script_buf = remaining_script_buf[p.len + 1 ..]; + try copy_script.append(' '); + if (bun.shell.needsEscapeUtf8AsciiLatin1(part)) { + try bun.shell.escape8Bit(part, ©_script, true); + } else { + try copy_script.appendSlice(part); + } } + try copy_script.append(0); try scripts.append(.{ .package_json_path = try ctx.allocator.dupe(u8, package_json_path), .package_name = pkgjson.name, .script_name = name, - .script_content = copy_script.items, - .combined = combined, + .script_content = copy_script.items[0..len_command_only], + .combined = copy_script.items[0 .. copy_script.items.len - 1 :0], .deps = pkgjson.dependencies, .PATH = PATH, }); diff --git a/src/cli/init_command.zig b/src/cli/init_command.zig index 35d22af2fe..86f6efd224 100644 --- a/src/cli/init_command.zig +++ b/src/cli/init_command.zig @@ -12,7 +12,7 @@ const std = @import("std"); const open = @import("../open.zig"); const CLI = @import("../cli.zig"); const Fs = @import("../fs.zig"); -const ParseJSON = @import("../json_parser.zig").ParsePackageJSONUTF8; +const JSON = bun.JSON; const js_parser = bun.js_parser; const js_ast = bun.JSAst; const linker = @import("../linker.zig"); @@ -21,16 +21,14 @@ const initializeStore = @import("./create_command.zig").initializeStore; const lex = bun.js_lexer; const logger = bun.logger; const JSPrinter = bun.js_printer; +const exists = bun.sys.exists; +const existsZ = bun.sys.existsZ; -fn exists(path: anytype) bool { - return bun.sys.exists(path); -} pub const InitCommand = struct { - fn prompt( + pub fn prompt( alloc: std.mem.Allocator, comptime label: string, default: []const u8, - _: bool, ) ![]const u8 { Output.pretty(label, .{}); if (default.len > 0) { @@ -171,7 +169,7 @@ pub const InitCommand = struct { process_package_json: { var source = logger.Source.initPathString("package.json", package_json_contents.list.items); var log = logger.Log.init(alloc); - var package_json_expr = ParseJSON(&source, &log, alloc) catch { + var package_json_expr = JSON.parsePackageJSONUTF8(&source, &log, alloc) catch { package_json_file = null; break :process_package_json; }; @@ -211,7 +209,7 @@ pub const InitCommand = struct { }; for (paths_to_try) |path| { - if (exists(path)) { + if (existsZ(path)) { fields.entry_point = bun.asByteSlice(path); break :infer; } @@ -248,7 +246,6 @@ pub const InitCommand = struct { alloc, "package name ", fields.name, - Output.enable_ansi_colors_stdout, ) catch |err| { if (err == error.EndOfStream) return; return err; @@ -260,7 +257,6 @@ pub const InitCommand = struct { alloc, "entry point ", fields.entry_point, - Output.enable_ansi_colors_stdout, ) catch |err| { if (err == error.EndOfStream) return; return err; @@ -282,16 +278,16 @@ pub const InitCommand = struct { var steps = Steps{}; - steps.write_gitignore = !exists(".gitignore"); + steps.write_gitignore = !existsZ(".gitignore"); - steps.write_readme = !exists("README.md") and !exists("README") and !exists("README.txt") and !exists("README.mdx"); + steps.write_readme = !existsZ("README.md") and !existsZ("README") and !existsZ("README.txt") and !existsZ("README.mdx"); steps.write_tsconfig = brk: { - if (exists("tsconfig.json")) { + if (existsZ("tsconfig.json")) { break :brk false; } - if (exists("jsconfig.json")) { + if (existsZ("jsconfig.json")) { break :brk false; } @@ -439,7 +435,7 @@ pub const InitCommand = struct { " \"'", fields.entry_point, )) { - Output.prettyln(" bun run {any}", .{JSPrinter.formatJSONString(fields.entry_point)}); + Output.prettyln(" bun run {any}", .{bun.fmt.formatJSONString(fields.entry_point)}); } else { Output.prettyln(" bun run {s}", .{fields.entry_point}); } @@ -447,7 +443,7 @@ pub const InitCommand = struct { Output.flush(); - if (exists("package.json")) { + if (existsZ("package.json")) { var process = std.process.Child.init( &.{ try bun.selfExePath(), diff --git a/src/cli/install_command.zig b/src/cli/install_command.zig index 6183d43d86..ca69c32e46 100644 --- a/src/cli/install_command.zig +++ b/src/cli/install_command.zig @@ -9,7 +9,7 @@ pub const InstallCommand = struct { error.InvalidPackageJSON, => { const log = &bun.CLI.Cli.log_; - log.printForLogLevel(bun.Output.errorWriter()) catch {}; + log.print(bun.Output.errorWriter()) catch {}; bun.Global.exit(1); }, else => |e| return e, diff --git a/src/cli/outdated_command.zig b/src/cli/outdated_command.zig new file mode 100644 index 0000000000..51d1e088b0 --- /dev/null +++ b/src/cli/outdated_command.zig @@ -0,0 +1,674 @@ +const std = @import("std"); +const bun = @import("root").bun; +const Global = bun.Global; +const Output = bun.Output; +const Command = bun.CLI.Command; +const Install = bun.install; +const PackageManager = Install.PackageManager; +const Lockfile = Install.Lockfile; +const PackageID = Install.PackageID; +const DependencyID = Install.DependencyID; +const Behavior = Install.Dependency.Behavior; +const invalid_package_id = Install.invalid_package_id; +const Resolution = Install.Resolution; +const string = bun.string; +const strings = bun.strings; +const PathBuffer = bun.PathBuffer; +const FileSystem = bun.fs.FileSystem; +const path = bun.path; +const glob = bun.glob; +const Table = bun.fmt.Table; + +pub const OutdatedCommand = struct { + pub fn exec(ctx: Command.Context) !void { + Output.prettyln("bun outdated v" ++ Global.package_json_version_with_sha ++ "", .{}); + Output.flush(); + + const cli = try PackageManager.CommandLineArguments.parse(ctx.allocator, .outdated); + + const manager, const original_cwd = PackageManager.init(ctx, cli, .outdated) catch |err| { + if (!cli.silent) { + if (err == error.MissingPackageJSON) { + Output.errGeneric("missing package.json, nothing outdated", .{}); + } + Output.errGeneric("failed to initialize bun install: {s}", .{@errorName(err)}); + } + + Global.crash(); + }; + defer ctx.allocator.free(original_cwd); + + return switch (manager.options.log_level) { + inline else => |log_level| outdated(ctx, original_cwd, manager, log_level), + }; + } + + fn outdated(ctx: Command.Context, original_cwd: string, manager: *PackageManager, comptime log_level: PackageManager.Options.LogLevel) !void { + const load_lockfile_result = manager.lockfile.loadFromDisk( + manager, + manager.allocator, + manager.log, + manager.options.lockfile_path, + true, + ); + + manager.lockfile = switch (load_lockfile_result) { + .not_found => { + if (log_level != .silent) { + Output.errGeneric("missing lockfile, nothing outdated", .{}); + } + Global.crash(); + }, + .err => |cause| { + if (log_level != .silent) { + switch (cause.step) { + .open_file => Output.errGeneric("failed to open lockfile: {s}", .{ + @errorName(cause.value), + }), + .parse_file => Output.errGeneric("failed to parse lockfile: {s}", .{ + @errorName(cause.value), + }), + .read_file => Output.errGeneric("failed to read lockfile: {s}", .{ + @errorName(cause.value), + }), + .migrating => Output.errGeneric("failed to migrate lockfile: {s}", .{ + @errorName(cause.value), + }), + } + + if (ctx.log.hasErrors()) { + try manager.log.print(Output.errorWriter()); + } + } + + Global.crash(); + }, + .ok => |ok| ok.lockfile, + }; + + switch (Output.enable_ansi_colors) { + inline else => |enable_ansi_colors| { + if (manager.options.filter_patterns.len > 0) { + const filters = manager.options.filter_patterns; + const workspace_pkg_ids = findMatchingWorkspaces( + bun.default_allocator, + original_cwd, + manager, + filters, + ) catch bun.outOfMemory(); + defer bun.default_allocator.free(workspace_pkg_ids); + + try updateManifestsIfNecessary(manager, log_level, workspace_pkg_ids); + try printOutdatedInfoTable(manager, workspace_pkg_ids, true, enable_ansi_colors); + } else { + // just the current workspace + const root_pkg_id = manager.root_package_id.get(manager.lockfile, manager.workspace_name_hash); + if (root_pkg_id == invalid_package_id) return; + + try updateManifestsIfNecessary(manager, log_level, &.{root_pkg_id}); + try printOutdatedInfoTable(manager, &.{root_pkg_id}, false, enable_ansi_colors); + } + }, + } + } + + // TODO: use in `bun pack, publish, run, ...` + const FilterType = union(enum) { + all, + name: []const u32, + path: []const u32, + + pub fn init(pattern: []const u32, is_path: bool) @This() { + return if (is_path) .{ + .path = pattern, + } else .{ + .name = pattern, + }; + } + + pub fn deinit(this: @This(), allocator: std.mem.Allocator) void { + switch (this) { + .path, .name => |pattern| allocator.free(pattern), + else => {}, + } + } + }; + + fn findMatchingWorkspaces( + allocator: std.mem.Allocator, + original_cwd: string, + manager: *PackageManager, + filters: []const string, + ) error{OutOfMemory}![]const PackageID { + const lockfile = manager.lockfile; + const packages = lockfile.packages.slice(); + const pkg_names = packages.items(.name); + const pkg_resolutions = packages.items(.resolution); + const string_buf = lockfile.buffers.string_bytes.items; + + var workspace_pkg_ids: std.ArrayListUnmanaged(PackageID) = .{}; + for (pkg_resolutions, 0..) |resolution, pkg_id| { + if (resolution.tag != .workspace and resolution.tag != .root) continue; + try workspace_pkg_ids.append(allocator, @intCast(pkg_id)); + } + + const converted_filters = converted_filters: { + const buf = try allocator.alloc(FilterType, filters.len); + for (filters, buf) |filter, *converted| { + if ((filter.len == 1 and filter[0] == '*') or strings.eqlComptime(filter, "**")) { + converted.* = .all; + continue; + } + + const is_path = filter.len > 0 and filter[0] == '.'; + + const joined_filter = if (is_path) + strings.withoutTrailingSlash(path.joinAbsString(original_cwd, &[_]string{filter}, .posix)) + else + filter; + + if (joined_filter.len == 0) { + converted.* = FilterType.init(&.{}, is_path); + continue; + } + + const length = bun.simdutf.length.utf32.from.utf8.le(joined_filter); + const convert_buf = try allocator.alloc(u32, length); + + const convert_result = bun.simdutf.convert.utf8.to.utf32.with_errors.le(joined_filter, convert_buf); + if (!convert_result.isSuccessful()) { + // nothing would match + converted.* = FilterType.init(&.{}, false); + continue; + } + + converted.* = FilterType.init(convert_buf[0..convert_result.count], is_path); + } + break :converted_filters buf; + }; + defer { + for (converted_filters) |filter| { + filter.deinit(allocator); + } + allocator.free(converted_filters); + } + + // move all matched workspaces to front of array + var i: usize = 0; + while (i < workspace_pkg_ids.items.len) { + const workspace_pkg_id = workspace_pkg_ids.items[i]; + + const matched = matched: { + for (converted_filters) |filter| { + switch (filter) { + .path => |pattern| { + if (pattern.len == 0) continue; + const res = pkg_resolutions[workspace_pkg_id]; + + const res_path = switch (res.tag) { + .workspace => res.value.workspace.slice(string_buf), + .root => FileSystem.instance.top_level_dir, + else => unreachable, + }; + + const abs_res_path = path.joinAbsString(FileSystem.instance.top_level_dir, &[_]string{res_path}, .posix); + + if (!glob.matchImpl(pattern, strings.withoutTrailingSlash(abs_res_path)).matches()) { + break :matched false; + } + }, + .name => |pattern| { + const name = pkg_names[workspace_pkg_id].slice(string_buf); + + if (!glob.matchImpl(pattern, name).matches()) { + break :matched false; + } + }, + .all => {}, + } + } + + break :matched true; + }; + + if (matched) { + i += 1; + } else { + _ = workspace_pkg_ids.swapRemove(i); + } + } + + return workspace_pkg_ids.items; + } + + fn printOutdatedInfoTable( + manager: *PackageManager, + workspace_pkg_ids: []const PackageID, + was_filtered: bool, + comptime enable_ansi_colors: bool, + ) !void { + const package_patterns = package_patterns: { + const args = manager.options.positionals[1..]; + if (args.len == 0) break :package_patterns null; + + var at_least_one_greater_than_zero = false; + + const patterns_buf = bun.default_allocator.alloc(FilterType, args.len) catch bun.outOfMemory(); + for (args, patterns_buf) |arg, *converted| { + if (arg.len == 0) { + converted.* = FilterType.init(&.{}, false); + continue; + } + + if ((arg.len == 1 and arg[0] == '*') or strings.eqlComptime(arg, "**")) { + converted.* = .all; + at_least_one_greater_than_zero = true; + continue; + } + + const length = bun.simdutf.length.utf32.from.utf8.le(arg); + const convert_buf = bun.default_allocator.alloc(u32, length) catch bun.outOfMemory(); + + const convert_result = bun.simdutf.convert.utf8.to.utf32.with_errors.le(arg, convert_buf); + if (!convert_result.isSuccessful()) { + converted.* = FilterType.init(&.{}, false); + continue; + } + + converted.* = FilterType.init(convert_buf[0..convert_result.count], false); + at_least_one_greater_than_zero = at_least_one_greater_than_zero or convert_result.count > 0; + } + + // nothing will match + if (!at_least_one_greater_than_zero) return; + + break :package_patterns patterns_buf; + }; + defer { + if (package_patterns) |patterns| { + for (patterns) |pattern| { + pattern.deinit(bun.default_allocator); + } + bun.default_allocator.free(patterns); + } + } + + var max_name: usize = 0; + var max_current: usize = 0; + var max_update: usize = 0; + var max_latest: usize = 0; + var max_workspace: usize = 0; + + const lockfile = manager.lockfile; + const string_buf = lockfile.buffers.string_bytes.items; + const dependencies = lockfile.buffers.dependencies.items; + const packages = lockfile.packages.slice(); + const pkg_names = packages.items(.name); + const pkg_resolutions = packages.items(.resolution); + const pkg_dependencies = packages.items(.dependencies); + + var version_buf = std.ArrayList(u8).init(bun.default_allocator); + defer version_buf.deinit(); + const version_writer = version_buf.writer(); + + var outdated_ids: std.ArrayListUnmanaged(struct { package_id: PackageID, dep_id: DependencyID, workspace_pkg_id: PackageID }) = .{}; + defer outdated_ids.deinit(manager.allocator); + + for (workspace_pkg_ids) |workspace_pkg_id| { + const pkg_deps = pkg_dependencies[workspace_pkg_id]; + for (pkg_deps.begin()..pkg_deps.end()) |dep_id| { + const package_id = lockfile.buffers.resolutions.items[dep_id]; + if (package_id == invalid_package_id) continue; + const dep = lockfile.buffers.dependencies.items[dep_id]; + if (dep.version.tag != .npm and dep.version.tag != .dist_tag) continue; + const resolution = pkg_resolutions[package_id]; + if (resolution.tag != .npm) continue; + + // package patterns match against dependency name (name in package.json) + if (package_patterns) |patterns| { + const match = match: { + for (patterns) |pattern| { + switch (pattern) { + .path => unreachable, + .name => |name_pattern| { + if (name_pattern.len == 0) continue; + if (!glob.matchImpl(name_pattern, dep.name.slice(string_buf)).matches()) { + break :match false; + } + }, + .all => {}, + } + } + + break :match true; + }; + if (!match) { + continue; + } + } + + const package_name = pkg_names[package_id].slice(string_buf); + var expired = false; + const manifest = manager.manifests.byNameAllowExpired( + manager.scopeForPackageName(package_name), + package_name, + &expired, + ) orelse continue; + + const latest = manifest.findByDistTag("latest") orelse continue; + + const update_version = if (dep.version.tag == .npm) + manifest.findBestVersion(dep.version.value.npm.version, string_buf) orelse continue + else + manifest.findByDistTag(dep.version.value.dist_tag.tag.slice(string_buf)) orelse continue; + + if (resolution.value.npm.version.order(latest.version, string_buf, manifest.string_buf) != .lt) continue; + + const package_name_len = package_name.len + + if (dep.behavior.dev) + " (dev)".len + else if (dep.behavior.peer) + " (peer)".len + else if (dep.behavior.optional) + " (optional)".len + else + 0; + + if (package_name_len > max_name) max_name = package_name_len; + + version_writer.print("{}", .{resolution.value.npm.version.fmt(string_buf)}) catch bun.outOfMemory(); + if (version_buf.items.len > max_current) max_current = version_buf.items.len; + version_buf.clearRetainingCapacity(); + + version_writer.print("{}", .{update_version.version.fmt(manifest.string_buf)}) catch bun.outOfMemory(); + if (version_buf.items.len > max_update) max_update = version_buf.items.len; + version_buf.clearRetainingCapacity(); + + version_writer.print("{}", .{latest.version.fmt(manifest.string_buf)}) catch bun.outOfMemory(); + if (version_buf.items.len > max_latest) max_latest = version_buf.items.len; + version_buf.clearRetainingCapacity(); + + const workspace_name = pkg_names[workspace_pkg_id].slice(string_buf); + if (workspace_name.len > max_workspace) max_workspace = workspace_name.len; + + outdated_ids.append( + bun.default_allocator, + .{ + .package_id = package_id, + .dep_id = @intCast(dep_id), + .workspace_pkg_id = workspace_pkg_id, + }, + ) catch bun.outOfMemory(); + } + } + + if (outdated_ids.items.len == 0) return; + + const package_column_inside_length = @max("Packages".len, max_name); + const current_column_inside_length = @max("Current".len, max_current); + const update_column_inside_length = @max("Update".len, max_update); + const latest_column_inside_length = @max("Latest".len, max_latest); + const workspace_column_inside_length = @max("Workspace".len, max_workspace); + + const column_left_pad = 1; + const column_right_pad = 1; + + const table = Table("blue", column_left_pad, column_right_pad, enable_ansi_colors).init( + &if (was_filtered) + [_][]const u8{ + "Package", + "Current", + "Update", + "Latest", + "Workspace", + } + else + [_][]const u8{ + "Package", + "Current", + "Update", + "Latest", + }, + &if (was_filtered) + [_]usize{ + package_column_inside_length, + current_column_inside_length, + update_column_inside_length, + latest_column_inside_length, + workspace_column_inside_length, + } + else + [_]usize{ + package_column_inside_length, + current_column_inside_length, + update_column_inside_length, + latest_column_inside_length, + }, + ); + + table.printTopLineSeparator(); + table.printColumnNames(); + + for (workspace_pkg_ids) |workspace_pkg_id| { + inline for ( + .{ + Behavior{ .normal = true }, + Behavior{ .dev = true }, + Behavior{ .peer = true }, + Behavior{ .optional = true }, + }, + ) |group_behavior| { + for (outdated_ids.items) |ids| { + if (workspace_pkg_id != ids.workspace_pkg_id) continue; + const package_id = ids.package_id; + const dep_id = ids.dep_id; + + const dep = dependencies[dep_id]; + if (@as(u8, @bitCast(group_behavior)) & @as(u8, @bitCast(dep.behavior)) == 0) continue; + + const package_name = pkg_names[package_id].slice(string_buf); + const resolution = pkg_resolutions[package_id]; + + var expired = false; + const manifest = manager.manifests.byNameAllowExpired( + manager.scopeForPackageName(package_name), + package_name, + &expired, + ) orelse continue; + + const latest = manifest.findByDistTag("latest") orelse continue; + const update = if (dep.version.tag == .npm) + manifest.findBestVersion(dep.version.value.npm.version, string_buf) orelse continue + else + manifest.findByDistTag(dep.version.value.dist_tag.tag.slice(string_buf)) orelse continue; + + table.printLineSeparator(); + + { + // package name + const behavior_str = if (dep.behavior.dev) + " (dev)" + else if (dep.behavior.peer) + " (peer)" + else if (dep.behavior.optional) + " (optional)" + else + ""; + + Output.pretty("{s}", .{table.verticalEdge()}); + for (0..column_left_pad) |_| Output.pretty(" ", .{}); + + Output.pretty("{s}{s}", .{ package_name, behavior_str }); + for (package_name.len + behavior_str.len..package_column_inside_length + column_right_pad) |_| Output.pretty(" ", .{}); + } + + { + // current version + Output.pretty("{s}", .{table.verticalEdge()}); + for (0..column_left_pad) |_| Output.pretty(" ", .{}); + + version_writer.print("{}", .{resolution.value.npm.version.fmt(string_buf)}) catch bun.outOfMemory(); + Output.pretty("{s}", .{version_buf.items}); + for (version_buf.items.len..current_column_inside_length + column_right_pad) |_| Output.pretty(" ", .{}); + version_buf.clearRetainingCapacity(); + } + + { + // update version + Output.pretty("{s}", .{table.verticalEdge()}); + for (0..column_left_pad) |_| Output.pretty(" ", .{}); + + version_writer.print("{}", .{update.version.fmt(manifest.string_buf)}) catch bun.outOfMemory(); + Output.pretty("{s}", .{update.version.diffFmt(resolution.value.npm.version, manifest.string_buf, string_buf)}); + for (version_buf.items.len..update_column_inside_length + column_right_pad) |_| Output.pretty(" ", .{}); + version_buf.clearRetainingCapacity(); + } + + { + // latest version + Output.pretty("{s}", .{table.verticalEdge()}); + for (0..column_left_pad) |_| Output.pretty(" ", .{}); + + version_writer.print("{}", .{latest.version.fmt(manifest.string_buf)}) catch bun.outOfMemory(); + Output.pretty("{s}", .{latest.version.diffFmt(resolution.value.npm.version, manifest.string_buf, string_buf)}); + for (version_buf.items.len..latest_column_inside_length + column_right_pad) |_| Output.pretty(" ", .{}); + version_buf.clearRetainingCapacity(); + } + + if (was_filtered) { + Output.pretty("{s}", .{table.verticalEdge()}); + for (0..column_left_pad) |_| Output.pretty(" ", .{}); + + const workspace_name = pkg_names[workspace_pkg_id].slice(string_buf); + Output.pretty("{s}", .{workspace_name}); + + for (workspace_name.len..workspace_column_inside_length + column_right_pad) |_| Output.pretty(" ", .{}); + } + + Output.pretty("{s}\n", .{table.verticalEdge()}); + } + } + } + + table.printBottomLineSeparator(); + } + + fn updateManifestsIfNecessary( + manager: *PackageManager, + comptime log_level: PackageManager.Options.LogLevel, + workspace_pkg_ids: []const PackageID, + ) !void { + const lockfile = manager.lockfile; + const resolutions = lockfile.buffers.resolutions.items; + const dependencies = lockfile.buffers.dependencies.items; + const string_buf = lockfile.buffers.string_bytes.items; + const packages = lockfile.packages.slice(); + const pkg_resolutions = packages.items(.resolution); + const pkg_names = packages.items(.name); + const pkg_dependencies = packages.items(.dependencies); + + for (workspace_pkg_ids) |workspace_pkg_id| { + const pkg_deps = pkg_dependencies[workspace_pkg_id]; + for (pkg_deps.begin()..pkg_deps.end()) |dep_id| { + if (dep_id >= dependencies.len) continue; + const package_id = resolutions[dep_id]; + if (package_id == invalid_package_id) continue; + const dep = dependencies[dep_id]; + if (dep.version.tag != .npm and dep.version.tag != .dist_tag) continue; + const resolution: Install.Resolution = pkg_resolutions[package_id]; + if (resolution.tag != .npm) continue; + + const package_name = pkg_names[package_id].slice(string_buf); + _ = manager.manifests.byName( + manager.scopeForPackageName(package_name), + package_name, + ) orelse { + const task_id = Install.Task.Id.forManifest(package_name); + if (manager.hasCreatedNetworkTask(task_id, dep.behavior.optional)) continue; + + manager.startProgressBarIfNone(); + + var task = manager.getNetworkTask(); + task.* = .{ + .package_manager = PackageManager.get(), + .callback = undefined, + .task_id = task_id, + .allocator = manager.allocator, + }; + try task.forManifest( + package_name, + manager.allocator, + manager.scopeForPackageName(package_name), + null, + dep.behavior.optional, + ); + + manager.enqueueNetworkTask(task); + }; + } + + manager.flushNetworkQueue(); + _ = manager.scheduleTasks(); + + if (manager.pendingTaskCount() > 1) { + try manager.runTasks( + *PackageManager, + manager, + .{ + .onExtract = {}, + .onResolve = {}, + .onPackageManifestError = {}, + .onPackageDownloadError = {}, + .progress_bar = true, + .manifests_only = true, + }, + true, + log_level, + ); + } + } + + manager.flushNetworkQueue(); + _ = manager.scheduleTasks(); + + const RunClosure = struct { + manager: *PackageManager, + err: ?anyerror = null, + pub fn isDone(closure: *@This()) bool { + if (closure.manager.pendingTaskCount() > 0) { + closure.manager.runTasks( + *PackageManager, + closure.manager, + .{ + .onExtract = {}, + .onResolve = {}, + .onPackageManifestError = {}, + .onPackageDownloadError = {}, + .progress_bar = true, + .manifests_only = true, + }, + true, + log_level, + ) catch |err| { + closure.err = err; + return true; + }; + } + + return closure.manager.pendingTaskCount() == 0; + } + }; + + var run_closure: RunClosure = .{ .manager = manager }; + manager.sleepUntil(&run_closure, &RunClosure.isDone); + + if (comptime log_level.showProgress()) { + manager.endProgressBar(); + Output.flush(); + } + + if (run_closure.err) |err| { + return err; + } + } +}; diff --git a/src/cli/pack_command.zig b/src/cli/pack_command.zig new file mode 100644 index 0000000000..02d43702a1 --- /dev/null +++ b/src/cli/pack_command.zig @@ -0,0 +1,2474 @@ +const std = @import("std"); +const bun = @import("root").bun; +const Global = bun.Global; +const Output = bun.Output; +const Command = bun.CLI.Command; +const Install = bun.install; +const Bin = Install.Bin; +const PackageManager = Install.PackageManager; +const Lockfile = Install.Lockfile; +const PackageID = Install.PackageID; +const DependencyID = Install.DependencyID; +const Behavior = Install.Dependency.Behavior; +const string = bun.string; +const stringZ = bun.stringZ; +const libarchive = @import("../libarchive/libarchive.zig").lib; +const Archive = libarchive.Archive; +const Expr = bun.js_parser.Expr; +const Semver = @import("../install/semver.zig"); +const File = bun.sys.File; +const FD = bun.FD; +const strings = bun.strings; +const glob = bun.glob; +const PathBuffer = bun.PathBuffer; +const DirIterator = bun.DirIterator; +const Environment = bun.Environment; +const RunCommand = bun.RunCommand; +const FileSystem = bun.fs.FileSystem; +const OOM = bun.OOM; +const js_printer = bun.js_printer; +const E = bun.js_parser.E; +const Progress = bun.Progress; +const JSON = bun.JSON; +const BoringSSL = bun.BoringSSL; +const sha = bun.sha; +const LogLevel = PackageManager.Options.LogLevel; +const FileDescriptor = bun.FileDescriptor; +const Publish = bun.CLI.PublishCommand; +const Dependency = Install.Dependency; + +pub const PackCommand = struct { + pub const Context = struct { + manager: *PackageManager, + allocator: std.mem.Allocator, + command_ctx: Command.Context, + + // `bun pack` does not require a lockfile, but + // it's possible we will need it for finding + // workspace versions. This is the only valid lockfile + // pointer in this file. `manager.lockfile` is incorrect + lockfile: ?*Lockfile, + + bundled_deps: std.ArrayListUnmanaged(BundledDep) = .{}, + + stats: Stats = .{}, + + const Stats = struct { + unpacked_size: usize = 0, + total_files: usize = 0, + ignored_files: usize = 0, + ignored_directories: usize = 0, + packed_size: usize = 0, + bundled_deps: usize = 0, + }; + + pub fn printSummary( + stats: Stats, + maybe_shasum: ?[sha.SHA1.digest]u8, + maybe_integrity: ?[sha.SHA512.digest]u8, + log_level: LogLevel, + ) void { + if (log_level != .silent) { + Output.prettyln("\nTotal files: {d}", .{stats.total_files}); + if (maybe_shasum) |shasum| { + Output.prettyln("Shasum: {s}", .{bun.fmt.bytesToHex(shasum, .lower)}); + } + if (maybe_integrity) |integrity| { + Output.prettyln("Integrity: {}", .{bun.fmt.integrity(integrity, .short)}); + } + Output.prettyln("Unpacked size: {}", .{ + bun.fmt.size(stats.unpacked_size, .{ .space_between_number_and_unit = false }), + }); + if (stats.packed_size > 0) { + Output.pretty("Packed size: {}\n", .{ + bun.fmt.size(stats.packed_size, .{ .space_between_number_and_unit = false }), + }); + } + if (stats.bundled_deps > 0) { + Output.pretty("Bundled deps: {d}\n", .{stats.bundled_deps}); + } + } + } + }; + + pub const BundledDep = struct { + name: string, + was_packed: bool = false, + from_root_package_json: bool, + }; + + pub fn execWithManager(ctx: Command.Context, manager: *PackageManager) !void { + Output.prettyln("bun pack v" ++ Global.package_json_version_with_sha ++ "", .{}); + Output.flush(); + + var lockfile: Lockfile = undefined; + const load_from_disk_result = lockfile.loadFromDisk( + manager, + manager.allocator, + manager.log, + manager.options.lockfile_path, + false, + ); + + var pack_ctx: Context = .{ + .manager = manager, + .allocator = ctx.allocator, + .command_ctx = ctx, + .lockfile = switch (load_from_disk_result) { + .ok => |ok| ok.lockfile, + .err => |cause| err: { + switch (cause.step) { + .open_file => { + if (cause.value == error.ENOENT) break :err null; + Output.errGeneric("failed to open lockfile: {s}", .{ + @errorName(cause.value), + }); + }, + .parse_file => Output.errGeneric("failed to parse lockfile: {s}", .{ + @errorName(cause.value), + }), + .read_file => Output.errGeneric("failed to read lockfile: {s}", .{ + @errorName(cause.value), + }), + .migrating => Output.errGeneric("failed to migrate lockfile: {s}", .{ + @errorName(cause.value), + }), + } + + if (manager.log.hasErrors()) { + try manager.log.print(Output.errorWriter()); + } + + Global.crash(); + }, + else => null, + }, + }; + + switch (manager.options.log_level) { + inline else => |log_level| { + // var arena = std.heap.ArenaAllocator.init(ctx.allocator); + // defer arena.deinit(); + + // if (manager.options.filter_patterns.len > 0) { + // // TODO: --filter + // // loop, convert, find matching workspaces, then pack each + // return; + // } + + // just pack the current workspace + pack(&pack_ctx, manager.original_package_json_path, log_level, false) catch |err| { + switch (err) { + error.OutOfMemory => bun.outOfMemory(), + error.MissingPackageName, error.MissingPackageVersion => { + Output.errGeneric("package.json must have `name` and `version` fields", .{}); + Global.crash(); + }, + error.InvalidPackageName, error.InvalidPackageVersion => { + Output.errGeneric("package.json `name` and `version` fields must be non-empty strings", .{}); + Global.crash(); + }, + error.MissingPackageJSON => { + Output.errGeneric("failed to find a package.json in: \"{s}\"", .{manager.original_package_json_path}); + Global.crash(); + }, + } + }; + }, + } + } + + pub fn exec(ctx: Command.Context) !void { + const cli = try PackageManager.CommandLineArguments.parse(ctx.allocator, .pack); + + const manager, const original_cwd = PackageManager.init(ctx, cli, .pack) catch |err| { + if (!cli.silent) { + switch (err) { + error.MissingPackageJSON => { + var cwd_buf: bun.PathBuffer = undefined; + const cwd = bun.getcwd(&cwd_buf) catch { + Output.errGeneric("failed to find project package.json", .{}); + Global.crash(); + }; + Output.errGeneric("failed to find project package.json from: \"{s}\"", .{cwd}); + }, + else => Output.errGeneric("failed to initialize bun install: {s}", .{@errorName(err)}), + } + } + + Global.crash(); + }; + defer ctx.allocator.free(original_cwd); + + return execWithManager(ctx, manager); + } + + pub fn PackError(comptime for_publish: bool) type { + return OOM || error{ + MissingPackageName, + InvalidPackageName, + MissingPackageVersion, + InvalidPackageVersion, + MissingPackageJSON, + } || + if (for_publish) error{ + RestrictedUnscopedPackage, + PrivatePackage, + } else error{}; + } + + const package_prefix = "package/"; + + const root_default_ignore_patterns = [_][]const u32{ + &.{ 112, 97, 99, 107, 97, 103, 101, 45, 108, 111, 99, 107, 46, 106, 115, 111, 110 }, // package-lock.json + &.{ 121, 97, 114, 110, 46, 108, 111, 99, 107 }, // yarn.lock + &.{ 112, 110, 112, 109, 45, 108, 111, 99, 107, 46, 121, 97, 109, 108 }, // pnpm-lock.yaml + &.{ 'b', 'u', 'n', '.', 'l', 'o', 'c', 'k', 'b' }, // bun.lockb + &.{ 'b', 'u', 'n', '.', 'l', 'o', 'c', 'k' }, + }; + + // pattern, can override + const default_ignore_patterns = [_]struct { []const u32, bool }{ + .{ &.{ '.', '*', '.', 's', 'w', 'p' }, true }, + .{ &.{ 46, 95, 42 }, true }, // "._*", + .{ &.{ 46, 68, 83, 95, 83, 116, 111, 114, 101 }, true }, // ".DS_Store", + .{ &.{ 46, 103, 105, 116 }, false }, // ".git", + .{ &.{ 46, 103, 105, 116, 105, 103, 110, 111, 114, 101 }, true }, // ".gitignore", + .{ &.{ 46, 104, 103 }, false }, // ".hg", + .{ &.{ 46, 110, 112, 109, 105, 103, 110, 111, 114, 101 }, true }, // ".npmignore", + .{ &.{ 46, 110, 112, 109, 114, 99 }, false }, // ".npmrc", + .{ &.{ 46, 108, 111, 99, 107, 45, 119, 115, 99, 114, 105, 112, 116 }, true }, // ".lock-wscript", + .{ &.{ 46, 115, 118, 110 }, true }, // ".svn", + .{ &.{ 46, 119, 97, 102, 112, 105, 99, 107, 108, 101, 45, 42 }, true }, // ".wafpickle-*", + .{ &.{ 67, 86, 83 }, true }, // "CVS", + .{ &.{ 110, 112, 109, 45, 100, 101, 98, 117, 103, 46, 108, 111, 103 }, true }, // "npm-debug.log", + // mentioned in the docs but does not appear to be ignored by default + // .{ &.{ 99, 111, 110, 102, 105, 103, 46, 103, 121, 112, 105 }, false }, // "config.gypi", + + .{ &.{ '.', 'e', 'n', 'v', '.', 'p', 'r', 'o', 'd', 'u', 'c', 't', 'i', 'o', 'n' }, true }, + .{ &.{ 'b', 'u', 'n', 'f', 'i', 'g', '.', 't', 'o', 'm', 'l' }, true }, + }; + + const PackListEntry = struct { + subpath: stringZ, + size: usize = 0, + }; + const PackList = std.ArrayListUnmanaged(PackListEntry); + + const PackQueueContext = struct { + pub fn lessThan(_: void, a: string, b: string) std.math.Order { + return strings.order(a, b); + } + }; + + const PackQueue = std.PriorityQueue(stringZ, void, PackQueueContext.lessThan); + + const DirInfo = struct { + std.fs.Dir, // the dir + string, // the dir subpath + usize, // dir depth. used to shrink ignore stack + }; + + fn iterateIncludedProjectTree( + allocator: std.mem.Allocator, + includes: []const Pattern, + root_dir: std.fs.Dir, + comptime log_level: LogLevel, + ) OOM!PackQueue { + var pack_queue = PackQueue.init(allocator, {}); + + var ignores: std.ArrayListUnmanaged(IgnorePatterns) = .{}; + defer ignores.deinit(allocator); + + var dirs: std.ArrayListUnmanaged(DirInfo) = .{}; + defer dirs.deinit(allocator); + + try dirs.append(allocator, .{ root_dir, "", 1 }); + + var included_dirs: std.ArrayListUnmanaged(DirInfo) = .{}; + defer included_dirs.deinit(allocator); + + var subpath_dedupe = bun.StringHashMap(void).init(allocator); + defer subpath_dedupe.deinit(); + + // first find included dirs and files + while (dirs.popOrNull()) |dir_info| { + var dir, const dir_subpath, const dir_depth = dir_info; + defer { + if (dir_depth != 1) { + dir.close(); + } + } + + var dir_iter = DirIterator.iterate(dir, .u8); + while (dir_iter.next().unwrap() catch null) |entry| { + if (entry.kind != .file and entry.kind != .directory) continue; + + const entry_name = entry.name.slice(); + const entry_subpath = try entrySubpath(allocator, dir_subpath, entry_name); + + var included = false; + + if (dir_depth == 1) { + if (strings.eqlComptime(entry_name, "package.json")) continue; + if (strings.eqlComptime(entry_name, "node_modules")) continue; + + // TODO: should this be case insensitive on all platforms? + const eql = if (comptime Environment.isLinux) + strings.eqlComptime + else + strings.eqlCaseInsensitiveASCIIICheckLength; + + if (entry.kind == .file and + (eql(entry_name, "package.json") or + eql(entry_name, "LICENSE") or + eql(entry_name, "LICENCE") or + eql(entry_name, "README") or + entry_name.len > "README.".len and eql(entry_name[0.."README.".len], "README."))) + included = true; + } + + if (!included) { + for (includes) |include| { + if (include.dirs_only and entry.kind != .directory) continue; + + // include patters are not recursive unless they start with `**/` + // normally the behavior of `index.js` and `**/index.js` are the same, + // but includes require `**/` + const match_path = if (include.@"leading **/") entry_name else entry_subpath; + switch (glob.matchImpl(include.glob, match_path)) { + .match => included = true, + .negate_no_match => included = false, + + else => {}, + } + } + } + + if (!included) { + if (entry.kind == .directory) { + const subdir = openSubdir(dir, entry_name, entry_subpath); + try dirs.append(allocator, .{ subdir, entry_subpath, dir_depth + 1 }); + } + + continue; + } + + switch (entry.kind) { + .directory => { + const subdir = openSubdir(dir, entry_name, entry_subpath); + try included_dirs.append(allocator, .{ subdir, entry_subpath, dir_depth + 1 }); + }, + .file => { + const dedupe_entry = try subpath_dedupe.getOrPut(entry_subpath); + bun.assertWithLocation(!dedupe_entry.found_existing, @src()); + if (dedupe_entry.found_existing) continue; + + try pack_queue.add(entry_subpath); + }, + else => unreachable, + } + } + } + + // for each included dir, traverse it's entries, exclude any with `negate_no_match`. + for (included_dirs.items) |included_dir_info| { + try addEntireTree(allocator, included_dir_info, &pack_queue, &subpath_dedupe, log_level); + } + + return pack_queue; + } + + /// Adds all files in a directory tree to `pack_list` (default ignores still apply) + fn addEntireTree( + allocator: std.mem.Allocator, + root_dir_info: DirInfo, + pack_queue: *PackQueue, + maybe_dedupe: ?*bun.StringHashMap(void), + comptime log_level: LogLevel, + ) OOM!void { + var dirs: std.ArrayListUnmanaged(DirInfo) = .{}; + defer dirs.deinit(allocator); + + try dirs.append(allocator, root_dir_info); + + var ignores: std.ArrayListUnmanaged(IgnorePatterns) = .{}; + defer ignores.deinit(allocator); + + while (dirs.popOrNull()) |dir_info| { + var dir, const dir_subpath, const dir_depth = dir_info; + defer dir.close(); + + while (ignores.getLastOrNull()) |last| { + if (last.depth < dir_depth) break; + + last.deinit(allocator); + ignores.items.len -= 1; + } + + if (try IgnorePatterns.readFromDisk(allocator, dir, dir_depth)) |patterns| { + try ignores.append(allocator, patterns); + } + + if (comptime Environment.isDebug) { + // make sure depths are in order + if (ignores.items.len > 0) { + for (1..ignores.items.len) |i| { + bun.assertWithLocation(ignores.items[i - 1].depth < ignores.items[i].depth, @src()); + } + } + } + + var iter = DirIterator.iterate(dir, .u8); + while (iter.next().unwrap() catch null) |entry| { + if (entry.kind != .file and entry.kind != .directory) continue; + + const entry_name = entry.name.slice(); + const entry_subpath = try entrySubpath(allocator, dir_subpath, entry_name); + + if (dir_depth == root_dir_info[2]) { + if (entry.kind == .directory and strings.eqlComptime(entry_name, "node_modules")) continue; + } + + if (isExcluded(entry, entry_subpath, dir_depth, ignores.items)) |used_pattern_info| { + if (comptime log_level.isVerbose()) { + const pattern, const kind = used_pattern_info; + Output.prettyln("ignore [{s}:{}] {s}{s}", .{ + @tagName(kind), + bun.fmt.debugUtf32PathFormatter(pattern), + entry_subpath, + if (entry.kind == .directory) "/" else "", + }); + Output.flush(); + } + continue; + } + + switch (entry.kind) { + .file => { + if (maybe_dedupe) |dedupe| { + const dedupe_entry = try dedupe.getOrPut(entry_subpath); + if (dedupe_entry.found_existing) continue; + } + try pack_queue.add(entry_subpath); + }, + .directory => { + const subdir = openSubdir(dir, entry_name, entry_subpath); + + try dirs.append(allocator, .{ + subdir, + entry_subpath, + dir_depth + 1, + }); + }, + else => unreachable, + } + } + } + } + + fn openSubdir( + dir: std.fs.Dir, + entry_name: string, + entry_subpath: stringZ, + ) std.fs.Dir { + return dir.openDirZ( + entryNameZ(entry_name, entry_subpath), + .{ .iterate = true }, + ) catch |err| { + Output.err(err, "failed to open directory \"{s}\" for packing", .{entry_subpath}); + Global.crash(); + }; + } + + fn entrySubpath( + allocator: std.mem.Allocator, + dir_subpath: string, + entry_name: string, + ) OOM!stringZ { + return std.fmt.allocPrintZ(allocator, "{s}{s}{s}", .{ + dir_subpath, + if (dir_subpath.len == 0) "" else "/", + entry_name, + }); + } + + fn entryNameZ( + entry_name: string, + entry_subpath: stringZ, + ) stringZ { + // doing this because `entry_subpath` has a sentinel and I don't trust `entry.name.sliceAssumeZ()` + return entry_subpath[entry_subpath.len - entry_name.len ..][0..entry_name.len :0]; + } + + fn iterateBundledDeps( + ctx: *Context, + root_dir: std.fs.Dir, + comptime log_level: LogLevel, + ) OOM!PackQueue { + var bundled_pack_queue = PackQueue.init(ctx.allocator, {}); + if (ctx.bundled_deps.items.len == 0) return bundled_pack_queue; + + const dir = root_dir.openDirZ("node_modules", .{ .iterate = true }) catch |err| { + switch (err) { + // ignore node_modules if it isn't a directory + error.NotDir => return bundled_pack_queue, + + else => { + Output.err(err, "failed to open \"node_modules\" to pack bundled dependencies", .{}); + Global.crash(); + }, + } + }; + + // A set of bundled dependency locations + // - node_modules/is-even + // - node_modules/is-even/node_modules/is-odd + // - node_modules/is-odd + // - ... + var dedupe = bun.StringHashMap(void).init(ctx.allocator); + defer dedupe.deinit(); + + var additional_bundled_deps: std.ArrayListUnmanaged(DirInfo) = .{}; + defer additional_bundled_deps.deinit(ctx.allocator); + + var iter = DirIterator.iterate(dir, .u8); + while (iter.next().unwrap() catch null) |entry| { + if (entry.kind != .directory) continue; + + const entry_name = entry.name.slice(); + + for (ctx.bundled_deps.items) |*dep| { + bun.assertWithLocation(dep.from_root_package_json, @src()); + if (!strings.eqlLong(entry_name, dep.name, true)) continue; + + const entry_subpath = try entrySubpath(ctx.allocator, "node_modules", entry_name); + + const dedupe_entry = try dedupe.getOrPut(entry_subpath); + if (dedupe_entry.found_existing) { + // already got to it in `addBundledDep` below + dep.was_packed = true; + break; + } + + const subdir = openSubdir(dir, entry_name, entry_subpath); + dep.was_packed = true; + try addBundledDep( + ctx, + root_dir, + .{ subdir, entry_subpath, 2 }, + &bundled_pack_queue, + &dedupe, + &additional_bundled_deps, + log_level, + ); + + break; + } + } + + while (additional_bundled_deps.popOrNull()) |bundled_dir_info| { + const dir_subpath = bundled_dir_info[1]; + const maybe_slash = strings.lastIndexOfChar(dir_subpath, '/'); + bun.assertWithLocation(maybe_slash != null, @src()); + const dep_name: string = if (maybe_slash) |slash| dir_subpath[slash + 1 ..] else dir_subpath; + + try ctx.bundled_deps.append(ctx.allocator, .{ + .name = dep_name, + .from_root_package_json = false, + .was_packed = true, + }); + + try addBundledDep( + ctx, + root_dir, + bundled_dir_info, + &bundled_pack_queue, + &dedupe, + &additional_bundled_deps, + log_level, + ); + } + + return bundled_pack_queue; + } + + fn addBundledDep( + ctx: *Context, + root_dir: std.fs.Dir, + bundled_dir_info: DirInfo, + bundled_pack_queue: *PackQueue, + dedupe: *bun.StringHashMap(void), + additional_bundled_deps: *std.ArrayListUnmanaged(DirInfo), + comptime log_level: LogLevel, + ) OOM!void { + ctx.stats.bundled_deps += 1; + + var dirs: std.ArrayListUnmanaged(DirInfo) = .{}; + defer dirs.deinit(ctx.allocator); + + try dirs.append(ctx.allocator, bundled_dir_info); + + while (dirs.popOrNull()) |dir_info| { + var dir, const dir_subpath, const dir_depth = dir_info; + defer dir.close(); + + var iter = DirIterator.iterate(dir, .u8); + while (iter.next().unwrap() catch null) |entry| { + if (entry.kind != .file and entry.kind != .directory) continue; + + const entry_name = entry.name.slice(); + const entry_subpath = try entrySubpath(ctx.allocator, dir_subpath, entry_name); + + if (dir_depth == bundled_dir_info[2]) root_depth: { + if (strings.eqlComptime(entry_name, "package.json")) { + if (entry.kind != .file) break :root_depth; + // find more dependencies to bundle + const source = File.toSourceAt(dir, entryNameZ(entry_name, entry_subpath), ctx.allocator).unwrap() catch |err| { + Output.err(err, "failed to read package.json: \"{s}\"", .{entry_subpath}); + Global.crash(); + }; + + const json = JSON.parsePackageJSONUTF8(&source, ctx.manager.log, ctx.allocator) catch + break :root_depth; + + // for each dependency in `dependencies` find the closest node_modules folder + // with the dependency name as a dir entry, starting from the node_modules of the + // current bundled dependency + + for ([_]string{ "dependencies", "optionalDependencies" }) |dependency_group| { + const dependencies_expr = json.get(dependency_group) orelse continue; + if (dependencies_expr.data != .e_object) continue; + + const dependencies = dependencies_expr.data.e_object; + next_dep: for (dependencies.properties.slice()) |dep| { + if (dep.key == null) continue; + if (dep.value == null) continue; + + const dep_name = dep.key.?.asString(ctx.allocator) orelse continue; + + const dep_subpath = try std.fmt.allocPrintZ(ctx.allocator, "{s}/node_modules/{s}", .{ + dir_subpath, + dep_name, + }); + + // starting at `node_modules/is-even/node_modules/is-odd` + var dep_dir_depth: usize = bundled_dir_info[2] + 2; + + if (root_dir.openDirZ(dep_subpath, .{ .iterate = true })) |dep_dir| { + const dedupe_entry = try dedupe.getOrPut(dep_subpath); + if (dedupe_entry.found_existing) continue; + + try additional_bundled_deps.append(ctx.allocator, .{ dep_dir, dep_subpath, dep_dir_depth }); + } else |_| { + // keep searching + + // slice off the `node_modules` from above + var remain: []u8 = dep_subpath[0..dir_subpath.len]; + + while (strings.lastIndexOf(remain, "node_modules")) |node_modules_start| { + dep_dir_depth -= 2; + const node_modules_end = node_modules_start + "node_modules".len; + dep_subpath[node_modules_end] = '/'; + @memcpy(dep_subpath[node_modules_end + 1 ..][0..dep_name.len], dep_name); + dep_subpath[node_modules_end + 1 + dep_name.len] = 0; + const parent_dep_subpath = dep_subpath[0 .. node_modules_end + 1 + dep_name.len :0]; + remain = remain[0..node_modules_start]; + + const parent_dep_dir = root_dir.openDirZ(parent_dep_subpath, .{ .iterate = true }) catch continue; + + const dedupe_entry = try dedupe.getOrPut(parent_dep_subpath); + if (dedupe_entry.found_existing) continue :next_dep; + + try additional_bundled_deps.append(ctx.allocator, .{ parent_dep_dir, parent_dep_subpath, dep_dir_depth }); + continue :next_dep; + } + } + } + } + + break :root_depth; + } + + if (strings.eqlComptime(entry_name, "node_modules")) continue; + } + + if (isExcluded(entry, entry_subpath, dir_depth, &.{})) |used_pattern_info| { + if (comptime log_level.isVerbose()) { + const pattern, const kind = used_pattern_info; + Output.prettyln("ignore [{s}:{}] {s}{s}", .{ + @tagName(kind), + bun.fmt.debugUtf32PathFormatter(pattern), + entry_subpath, + if (entry.kind == .directory) "/" else "", + }); + Output.flush(); + } + continue; + } + + switch (entry.kind) { + .file => { + try bundled_pack_queue.add(entry_subpath); + }, + .directory => { + const subdir = openSubdir(dir, entry_name, entry_subpath); + + try dirs.append(ctx.allocator, .{ + subdir, + entry_subpath, + dir_depth + 1, + }); + }, + else => unreachable, + } + } + } + } + + /// Returns a list of files to pack and another list of files from bundled dependencies + fn iterateProjectTree( + allocator: std.mem.Allocator, + root_dir: std.fs.Dir, + comptime log_level: LogLevel, + ) OOM!PackQueue { + var pack_queue = PackQueue.init(allocator, {}); + + var ignores: std.ArrayListUnmanaged(IgnorePatterns) = .{}; + defer ignores.deinit(allocator); + + // Stacks and depth-first traversal. Doing so means we can push and pop from + // ignore patterns without needing to clone the entire list for future use. + var dirs: std.ArrayListUnmanaged(DirInfo) = .{}; + defer dirs.deinit(allocator); + + try dirs.append(allocator, .{ root_dir, "", 1 }); + + while (dirs.popOrNull()) |dir_info| { + var dir, const dir_subpath, const dir_depth = dir_info; + defer { + if (dir_depth != 1) { + dir.close(); + } + } + + while (ignores.getLastOrNull()) |last| { + if (last.depth < dir_depth) break; + + // pop patterns from files greater than or equal to the current depth. + last.deinit(allocator); + ignores.items.len -= 1; + } + + if (try IgnorePatterns.readFromDisk(allocator, dir, dir_depth)) |patterns| { + try ignores.append(allocator, patterns); + } + + if (comptime Environment.isDebug) { + // make sure depths are in order + if (ignores.items.len > 0) { + for (1..ignores.items.len) |i| { + bun.assertWithLocation(ignores.items[i - 1].depth < ignores.items[i].depth, @src()); + } + } + } + + var dir_iter = DirIterator.iterate(dir, .u8); + while (dir_iter.next().unwrap() catch null) |entry| { + if (entry.kind != .file and entry.kind != .directory) continue; + + const entry_name = entry.name.slice(); + const entry_subpath = try entrySubpath(allocator, dir_subpath, entry_name); + + if (dir_depth == 1) { + // Special case root package.json. It is always included + // and is possibly edited, so it's easier to handle it + // separately + if (strings.eqlComptime(entry_name, "package.json")) continue; + + // bundled dependencies are included only if they exist on disk. + // handled later for simplicity + if (strings.eqlComptime(entry_name, "node_modules")) continue; + } + + if (isExcluded(entry, entry_subpath, dir_depth, ignores.items)) |used_pattern_info| { + if (comptime log_level.isVerbose()) { + const pattern, const kind = used_pattern_info; + Output.prettyln("ignore [{s}:{}] {s}{s}", .{ + @tagName(kind), + bun.fmt.debugUtf32PathFormatter(pattern), + entry_subpath, + if (entry.kind == .directory) "/" else "", + }); + Output.flush(); + } + continue; + } + + switch (entry.kind) { + .file => { + bun.assertWithLocation(entry_subpath.len > 0, @src()); + try pack_queue.add(entry_subpath); + }, + .directory => { + const subdir = openSubdir(dir, entry_name, entry_subpath); + + try dirs.append(allocator, .{ + subdir, + entry_subpath, + dir_depth + 1, + }); + }, + else => unreachable, + } + } + } + + return pack_queue; + } + + fn getBundledDeps( + allocator: std.mem.Allocator, + json: Expr, + comptime field: string, + ) OOM!?std.ArrayListUnmanaged(BundledDep) { + var deps: std.ArrayListUnmanaged(BundledDep) = .{}; + const bundled_deps = json.get(field) orelse return null; + + invalid_field: { + var iter = bundled_deps.asArray() orelse switch (bundled_deps.data) { + .e_array => return .{}, + else => break :invalid_field, + }; + while (iter.next()) |bundled_dep_item| { + const bundled_dep = try bundled_dep_item.asStringCloned(allocator) orelse break :invalid_field; + try deps.append(allocator, .{ + .name = bundled_dep, + .from_root_package_json = true, + }); + } + + return deps; + } + + Output.errGeneric("expected `{s}` to be an array of strings", .{field}); + Global.crash(); + } + + const BinInfo = struct { + path: string, + type: Type, + + const Type = enum { + file, + dir, + }; + }; + + fn getPackageBins( + allocator: std.mem.Allocator, + json: Expr, + ) OOM![]const BinInfo { + var bins: std.ArrayListUnmanaged(BinInfo) = .{}; + + var path_buf: PathBuffer = undefined; + + if (json.asProperty("bin")) |bin| { + if (bin.expr.asString(allocator)) |bin_str| { + const normalized = bun.path.normalizeBuf(bin_str, &path_buf, .posix); + try bins.append(allocator, .{ + .path = try allocator.dupe(u8, normalized), + .type = .file, + }); + return bins.items; + } + + switch (bin.expr.data) { + .e_object => |bin_obj| { + if (bin_obj.properties.len == 0) return &.{}; + + for (bin_obj.properties.slice()) |bin_prop| { + if (bin_prop.value) |bin_prop_value| { + if (bin_prop_value.asString(allocator)) |bin_str| { + const normalized = bun.path.normalizeBuf(bin_str, &path_buf, .posix); + try bins.append(allocator, .{ + .path = try allocator.dupe(u8, normalized), + .type = .file, + }); + } + } + } + }, + else => {}, + } + + return bins.items; + } + + if (json.asProperty("directories")) |directories| { + switch (directories.expr.data) { + .e_object => |directories_obj| { + if (directories_obj.asProperty("bin")) |bin| { + if (bin.expr.asString(allocator)) |bin_str| { + const normalized = bun.path.normalizeBuf(bin_str, &path_buf, .posix); + try bins.append(allocator, .{ + .path = try allocator.dupe(u8, normalized), + .type = .dir, + }); + } + } + }, + else => {}, + } + } + + return bins.items; + } + + fn isPackageBin(bins: []const BinInfo, maybe_bin_path: string) bool { + for (bins) |bin| { + switch (bin.type) { + .file => { + if (strings.eqlLong(bin.path, maybe_bin_path, true)) { + return true; + } + }, + .dir => { + const bin_without_trailing = strings.withoutTrailingSlash(bin.path); + if (strings.hasPrefix(maybe_bin_path, bin_without_trailing)) { + const remain = maybe_bin_path[bin_without_trailing.len..]; + if (remain.len > 1 and remain[0] == '/' and !strings.containsChar(remain[1..], '/')) { + return true; + } + } + }, + } + } + + return false; + } + + fn isExcluded( + entry: DirIterator.IteratorResult, + entry_subpath: stringZ, + dir_depth: usize, + ignores: []const IgnorePatterns, + ) ?struct { []const u32, IgnorePatterns.Kind } { + const entry_name = entry.name.slice(); + + if (dir_depth == 1) { + + // TODO: should this be case insensitive on all platforms? + const eql = if (comptime Environment.isLinux) + strings.eqlComptime + else + strings.eqlCaseInsensitiveASCIIICheckLength; + + // first, check files that can never be ignored. project root directory only + if (entry.kind == .file and + (eql(entry_name, "package.json") or + eql(entry_name, "LICENSE") or + eql(entry_name, "LICENCE") or + eql(entry_name, "README") or + entry_name.len > "README.".len and eql(entry_name[0.."README.".len], "README.") or + eql(entry_name, "CHANGELOG") or + entry_name.len > "CHANGELOG.".len and eql(entry_name[0.."CHANGELOG.".len], "CHANGELOG."))) + return null; + + // check default ignores that only apply to the root project directory + for (root_default_ignore_patterns) |pattern| { + switch (glob.matchImpl(pattern, entry_name)) { + .match => { + // cannot be reversed + return .{ + pattern, + .default, + }; + }, + + .no_match => {}, + + // default patterns don't use `!` + .negate_no_match => unreachable, + .negate_match => unreachable, + } + } + } + + var ignore_pattern: []const u32 = &.{}; + var ignore_kind: IgnorePatterns.Kind = .@".npmignore"; + + // then check default ignore list. None of the defaults contain slashes + // so just match agaist entry name + var ignored = false; + + for (default_ignore_patterns) |pattern_info| { + const pattern, const can_override = pattern_info; + switch (glob.matchImpl(pattern, entry_name)) { + .match => { + if (can_override) { + ignored = true; + ignore_pattern = pattern; + ignore_kind = .default; + + // break. doesnt matter if more default patterns + // match this path + break; + } + + return .{ + pattern, + .default, + }; + }, + .no_match => {}, + + // default patterns don't use `!` + .negate_no_match => unreachable, + .negate_match => unreachable, + } + } + + // lastly, check each .npmignore/.gitignore from root directory to + // the current directory. + for (ignores) |ignore| { + var rel = entry_subpath; + if (ignore.has_rel_path) { + // trim parent directories up to the directory + // containing this ignore file + for (1..ignore.depth) |_| { + if (strings.indexOfChar(rel, '/')) |sep| { + rel = rel[sep + 1 ..]; + } + } + } + for (ignore.list) |pattern| { + if (pattern.dirs_only and entry.kind != .directory) continue; + + const match_path = if (pattern.rel_path) rel else entry_name; + switch (glob.matchImpl(pattern.glob, match_path)) { + .match => { + ignored = true; + ignore_pattern = pattern.glob; + ignore_kind = ignore.kind; + }, + .negate_no_match => ignored = false, + else => {}, + } + } + } + + return if (!ignored) + null + else + .{ + ignore_pattern, + ignore_kind, + }; + } + + const BufferedFileReader = std.io.BufferedReader(1024 * 512, File.Reader); + + pub fn pack( + ctx: *Context, + abs_package_json_path: stringZ, + comptime log_level: LogLevel, + comptime for_publish: bool, + ) PackError(for_publish)!if (for_publish) Publish.Context(true) else void { + const manager = ctx.manager; + const json = switch (manager.workspace_package_json_cache.getWithPath(manager.allocator, manager.log, abs_package_json_path, .{ + .guess_indentation = true, + })) { + .read_err => |err| { + Output.err(err, "failed to read package.json: {s}", .{abs_package_json_path}); + Global.crash(); + }, + .parse_err => |err| { + Output.err(err, "failed to parse package.json: {s}", .{abs_package_json_path}); + manager.log.print(Output.errorWriter()) catch {}; + Global.crash(); + }, + .entry => |entry| entry, + }; + + if (comptime for_publish) { + if (json.root.get("publishConfig")) |config| { + if (manager.options.publish_config.tag.len == 0) { + if (try config.getStringCloned(ctx.allocator, "tag")) |tag| { + manager.options.publish_config.tag = tag; + } + } + if (manager.options.publish_config.access == null) { + if (try config.getString(ctx.allocator, "access")) |access| { + manager.options.publish_config.access = PackageManager.Options.Access.fromStr(access[0]) orelse { + Output.errGeneric("invalid `access` value: '{s}'", .{access[0]}); + Global.crash(); + }; + } + } + } + + // maybe otp + } + + const package_name_expr: Expr = json.root.get("name") orelse return error.MissingPackageName; + const package_name = try package_name_expr.asStringCloned(ctx.allocator) orelse return error.InvalidPackageName; + if (comptime for_publish) { + const is_scoped = try Dependency.isScopedPackageName(package_name); + if (manager.options.publish_config.access) |access| { + if (access == .restricted and !is_scoped) { + return error.RestrictedUnscopedPackage; + } + } + } + defer if (comptime !for_publish) ctx.allocator.free(package_name); + if (package_name.len == 0) return error.InvalidPackageName; + + const package_version_expr: Expr = json.root.get("version") orelse return error.MissingPackageVersion; + const package_version = try package_version_expr.asStringCloned(ctx.allocator) orelse return error.InvalidPackageVersion; + defer if (comptime !for_publish) ctx.allocator.free(package_version); + if (package_version.len == 0) return error.InvalidPackageVersion; + + if (comptime for_publish) { + if (json.root.get("private")) |private| { + if (private.asBool()) |is_private| { + if (is_private) { + return error.PrivatePackage; + } + } + } + } + + const edited_package_json = try editRootPackageJSON(ctx.allocator, ctx.lockfile, json); + + var this_bundler: bun.bundler.Bundler = undefined; + + _ = RunCommand.configureEnvForRun( + ctx.command_ctx, + &this_bundler, + manager.env, + manager.options.log_level != .silent, + false, + ) catch |err| { + switch (err) { + error.OutOfMemory => |oom| return oom, + else => { + Output.errGeneric("failed to run pack scripts due to error: {s}\n", .{@errorName(err)}); + Global.crash(); + }, + } + }; + + const abs_workspace_path: string = strings.withoutTrailingSlash(strings.withoutSuffixComptime(abs_package_json_path, "package.json")); + + const postpack_script, const publish_script: ?[]const u8, const postpublish_script: ?[]const u8 = post_scripts: { + // --ignore-scripts + if (!manager.options.do.run_scripts) break :post_scripts .{ null, null, null }; + + const scripts = json.root.asProperty("scripts") orelse break :post_scripts .{ null, null, null }; + if (scripts.expr.data != .e_object) break :post_scripts .{ null, null, null }; + + if (comptime for_publish) { + if (scripts.expr.get("prepublishOnly")) |prepublish_only_script_str| { + if (prepublish_only_script_str.asString(ctx.allocator)) |prepublish_only| { + _ = RunCommand.runPackageScriptForeground( + ctx.command_ctx, + ctx.allocator, + prepublish_only, + "prepublishOnly", + abs_workspace_path, + this_bundler.env, + &.{}, + manager.options.log_level == .silent, + ctx.command_ctx.debug.use_system_shell, + ) catch |err| { + switch (err) { + error.MissingShell => { + Output.errGeneric("failed to find shell executable to run prepublishOnly script", .{}); + Global.crash(); + }, + error.OutOfMemory => |oom| return oom, + } + }; + } + } + } + + if (scripts.expr.get("prepack")) |prepack_script| { + if (prepack_script.asString(ctx.allocator)) |prepack_script_str| { + _ = RunCommand.runPackageScriptForeground( + ctx.command_ctx, + ctx.allocator, + prepack_script_str, + "prepack", + abs_workspace_path, + this_bundler.env, + &.{}, + manager.options.log_level == .silent, + ctx.command_ctx.debug.use_system_shell, + ) catch |err| { + switch (err) { + error.MissingShell => { + Output.errGeneric("failed to find shell executable to run prepack script", .{}); + Global.crash(); + }, + error.OutOfMemory => |oom| return oom, + } + }; + } + } + + if (scripts.expr.get("prepare")) |prepare_script| { + if (prepare_script.asString(ctx.allocator)) |prepare_script_str| { + _ = RunCommand.runPackageScriptForeground( + ctx.command_ctx, + ctx.allocator, + prepare_script_str, + "prepare", + abs_workspace_path, + this_bundler.env, + &.{}, + manager.options.log_level == .silent, + ctx.command_ctx.debug.use_system_shell, + ) catch |err| { + switch (err) { + error.MissingShell => { + Output.errGeneric("failed to find shell executable to run prepare script", .{}); + Global.crash(); + }, + error.OutOfMemory => |oom| return oom, + } + }; + } + } + + var postpack_script: ?[]const u8 = null; + if (scripts.expr.get("postpack")) |postpack| { + postpack_script = postpack.asString(ctx.allocator); + } + + if (comptime for_publish) { + var publish_script: ?[]const u8 = null; + var postpublish_script: ?[]const u8 = null; + if (scripts.expr.get("publish")) |publish| { + publish_script = try publish.asStringCloned(ctx.allocator); + } + if (scripts.expr.get("postpublish")) |postpublish| { + postpublish_script = try postpublish.asStringCloned(ctx.allocator); + } + + break :post_scripts .{ postpack_script, publish_script, postpublish_script }; + } + + break :post_scripts .{ postpack_script, null, null }; + }; + + var root_dir = root_dir: { + var path_buf: PathBuffer = undefined; + @memcpy(path_buf[0..abs_workspace_path.len], abs_workspace_path); + path_buf[abs_workspace_path.len] = 0; + break :root_dir std.fs.openDirAbsoluteZ(path_buf[0..abs_workspace_path.len :0], .{ + .iterate = true, + }) catch |err| { + Output.err(err, "failed to open root directory: {s}\n", .{abs_workspace_path}); + Global.crash(); + }; + }; + defer root_dir.close(); + + ctx.bundled_deps = try getBundledDeps(ctx.allocator, json.root, "bundledDependencies") orelse + try getBundledDeps(ctx.allocator, json.root, "bundleDependencies") orelse + .{}; + + var pack_queue = pack_queue: { + if (json.root.get("files")) |files| { + files_error: { + if (files.asArray()) |_files_array| { + var includes: std.ArrayListUnmanaged(Pattern) = .{}; + defer includes.deinit(ctx.allocator); + + var files_array = _files_array; + while (files_array.next()) |files_entry| { + if (files_entry.asString(ctx.allocator)) |file_entry_str| { + const parsed = try Pattern.fromUTF8(ctx.allocator, file_entry_str) orelse continue; + try includes.append(ctx.allocator, parsed); + continue; + } + + break :files_error; + } + + break :pack_queue try iterateIncludedProjectTree( + ctx.allocator, + includes.items, + root_dir, + log_level, + ); + } + } + + Output.errGeneric("expected `files` to be an array of string values", .{}); + Global.crash(); + } + + // pack from project root + break :pack_queue try iterateProjectTree( + ctx.allocator, + root_dir, + log_level, + ); + }; + defer pack_queue.deinit(); + + var bundled_pack_queue = try iterateBundledDeps(ctx, root_dir, log_level); + defer bundled_pack_queue.deinit(); + + // +1 for package.json + ctx.stats.total_files = pack_queue.count() + bundled_pack_queue.count() + 1; + + if (manager.options.dry_run) { + // don't create the tarball, but run scripts if they exists + + printArchivedFilesAndPackages(ctx, root_dir, true, &pack_queue, 0); + + if (comptime !for_publish) { + if (manager.options.pack_destination.len == 0) { + Output.pretty("\n{}\n", .{fmtTarballFilename(package_name, package_version)}); + } else { + var dest_buf: PathBuffer = undefined; + const abs_tarball_dest, _ = absTarballDestination( + ctx.manager.options.pack_destination, + abs_workspace_path, + package_name, + package_version, + &dest_buf, + ); + Output.pretty("\n{s}\n", .{abs_tarball_dest}); + } + } + + Context.printSummary(ctx.stats, null, null, log_level); + + if (postpack_script) |postpack_script_str| { + _ = RunCommand.runPackageScriptForeground( + ctx.command_ctx, + ctx.allocator, + postpack_script_str, + "postpack", + abs_workspace_path, + manager.env, + &.{}, + manager.options.log_level == .silent, + ctx.command_ctx.debug.use_system_shell, + ) catch |err| { + switch (err) { + error.MissingShell => { + Output.errGeneric("failed to find shell executable to run postpack script", .{}); + Global.crash(); + }, + error.OutOfMemory => |oom| return oom, + } + }; + } + + if (comptime for_publish) { + var dest_buf: bun.PathBuffer = undefined; + const abs_tarball_dest, _ = absTarballDestination( + ctx.manager.options.pack_destination, + abs_workspace_path, + package_name, + package_version, + &dest_buf, + ); + return .{ + .allocator = ctx.allocator, + .command_ctx = ctx.command_ctx, + .manager = manager, + .package_name = package_name, + .package_version = package_version, + .abs_tarball_path = try ctx.allocator.dupeZ(u8, abs_tarball_dest), + .tarball_bytes = "", + .shasum = undefined, + .integrity = undefined, + .uses_workspaces = false, + .publish_script = publish_script, + .postpublish_script = postpublish_script, + .script_env = this_bundler.env, + .normalized_pkg_info = "", + }; + } + + return; + } + + const bins = try getPackageBins(ctx.allocator, json.root); + defer for (bins) |bin| ctx.allocator.free(bin.path); + + var print_buf = std.ArrayList(u8).init(ctx.allocator); + defer print_buf.deinit(); + const print_buf_writer = print_buf.writer(); + + var archive = Archive.writeNew(); + + switch (archive.writeSetFormatPaxRestricted()) { + .failed, .fatal, .warn => { + Output.errGeneric("failed to set archive format: {s}", .{archive.errorString()}); + Global.crash(); + }, + else => {}, + } + switch (archive.writeAddFilterGzip()) { + .failed, .fatal, .warn => { + Output.errGeneric("failed to set archive compression to gzip: {s}", .{archive.errorString()}); + Global.crash(); + }, + else => {}, + } + + // default is 9 + // https://github.com/npm/cli/blob/ec105f400281a5bfd17885de1ea3d54d0c231b27/node_modules/pacote/lib/util/tar-create-options.js#L12 + const compression_level = manager.options.pack_gzip_level orelse "9"; + try print_buf_writer.print("{s}\x00", .{compression_level}); + switch (archive.writeSetFilterOption(null, "compression-level", print_buf.items[0..compression_level.len :0])) { + .failed, .fatal, .warn => { + Output.errGeneric("compression level must be between 0 and 9, received {s}", .{compression_level}); + Global.crash(); + }, + else => {}, + } + print_buf.clearRetainingCapacity(); + + switch (archive.writeSetFilterOption(null, "os", "Unknown")) { + .failed, .fatal, .warn => { + Output.errGeneric("failed to set os to `Unknown`: {s}", .{archive.errorString()}); + Global.crash(); + }, + else => {}, + } + + switch (archive.writeSetOptions("gzip:!timestamp")) { + .failed, .fatal, .warn => { + Output.errGeneric("failed to unset gzip timestamp option: {s}", .{archive.errorString()}); + Global.crash(); + }, + else => {}, + } + + var dest_buf: PathBuffer = undefined; + const abs_tarball_dest, const abs_tarball_dest_dir_end = absTarballDestination( + ctx.manager.options.pack_destination, + abs_workspace_path, + package_name, + package_version, + &dest_buf, + ); + + { + // create the directory if it doesn't exist + const most_likely_a_slash = dest_buf[abs_tarball_dest_dir_end]; + dest_buf[abs_tarball_dest_dir_end] = 0; + const abs_tarball_dest_dir = dest_buf[0..abs_tarball_dest_dir_end :0]; + bun.makePath(std.fs.cwd(), abs_tarball_dest_dir) catch {}; + dest_buf[abs_tarball_dest_dir_end] = most_likely_a_slash; + } + + // TODO: experiment with `archive.writeOpenMemory()` + switch (archive.writeOpenFilename(abs_tarball_dest)) { + .failed, .fatal, .warn => { + Output.errGeneric("failed to open tarball file destination: \"{s}\"", .{abs_tarball_dest}); + Global.crash(); + }, + else => {}, + } + + // append removed items from `pack_queue` with their file size + var pack_list: PackList = .{}; + defer pack_list.deinit(ctx.allocator); + + var read_buf: [8192]u8 = undefined; + const file_reader = try ctx.allocator.create(BufferedFileReader); + defer ctx.allocator.destroy(file_reader); + file_reader.* = .{ + .unbuffered_reader = undefined, + }; + + var entry = Archive.Entry.new2(archive); + + { + var progress: if (log_level == .silent) void else Progress = if (comptime log_level == .silent) {} else .{}; + var node = if (comptime log_level == .silent) {} else node: { + progress.supports_ansi_escape_codes = Output.enable_ansi_colors; + var node: *Progress.Node = progress.start("", pack_queue.count() + bundled_pack_queue.count() + 1); + node.unit = " files"; + break :node node; + }; + defer if (comptime log_level != .silent) node.end(); + + entry = try archivePackageJSON(ctx, archive, entry, root_dir, edited_package_json); + if (comptime log_level != .silent) node.completeOne(); + + while (pack_queue.removeOrNull()) |pathname| { + defer if (comptime log_level != .silent) node.completeOne(); + + const file = bun.sys.openat(bun.toFD(root_dir.fd), pathname, bun.O.RDONLY, 0).unwrap() catch |err| { + Output.err(err, "failed to open file: \"{s}\"", .{pathname}); + Global.crash(); + }; + + const fd = bun.sys.toLibUVOwnedFD(file, .open, .close_on_fail).unwrap() catch |err| { + Output.err(err, "failed to open file: \"{s}\"", .{pathname}); + Global.crash(); + }; + + defer _ = bun.sys.close(fd); + + const stat = bun.sys.sys_uv.fstat(fd).unwrap() catch |err| { + Output.err(err, "failed to stat file: \"{s}\"", .{pathname}); + Global.crash(); + }; + + try pack_list.append(ctx.allocator, .{ .subpath = pathname, .size = @intCast(stat.size) }); + + entry = try addArchiveEntry( + ctx, + fd, + stat, + pathname, + &read_buf, + file_reader, + archive, + entry, + &print_buf, + bins, + ); + } + + while (bundled_pack_queue.removeOrNull()) |pathname| { + defer if (comptime log_level != .silent) node.completeOne(); + + const file = File.openat(root_dir, pathname, bun.O.RDONLY, 0).unwrap() catch |err| { + Output.err(err, "failed to open file: \"{s}\"", .{pathname}); + Global.crash(); + }; + defer file.close(); + const stat = file.stat().unwrap() catch |err| { + Output.err(err, "failed to stat file: \"{}\"", .{file.handle}); + Global.crash(); + }; + + entry = try addArchiveEntry( + ctx, + file.handle, + stat, + pathname, + &read_buf, + file_reader, + archive, + entry, + &print_buf, + bins, + ); + } + } + + entry.free(); + + switch (archive.writeClose()) { + .failed, .fatal, .warn => { + Output.errGeneric("failed to close archive: {s}", .{archive.errorString()}); + Global.crash(); + }, + else => {}, + } + + switch (archive.writeFree()) { + .failed, .fatal, .warn => { + Output.errGeneric("failed to free archive: {s}", .{archive.errorString()}); + Global.crash(); + }, + else => {}, + } + + var shasum: sha.SHA1.Digest = undefined; + var integrity: sha.SHA512.Digest = undefined; + + const tarball_bytes = tarball_bytes: { + const tarball_file = File.open(abs_tarball_dest, bun.O.RDONLY, 0).unwrap() catch |err| { + Output.err(err, "failed to open tarball at: \"{s}\"", .{abs_tarball_dest}); + Global.crash(); + }; + defer tarball_file.close(); + + var sha1 = sha.SHA1.init(); + defer sha1.deinit(); + + var sha512 = sha.SHA512.init(); + defer sha512.deinit(); + + if (comptime for_publish) { + const tarball_bytes = tarball_file.readToEnd(ctx.allocator).unwrap() catch |err| { + Output.err(err, "failed to read tarball: \"{s}\"", .{abs_tarball_dest}); + Global.crash(); + }; + + sha1.update(tarball_bytes); + sha512.update(tarball_bytes); + + sha1.final(&shasum); + sha512.final(&integrity); + + ctx.stats.packed_size = tarball_bytes.len; + + break :tarball_bytes tarball_bytes; + } + + file_reader.* = .{ + .unbuffered_reader = tarball_file.reader(), + }; + + var size: usize = 0; + var read = file_reader.read(&read_buf) catch |err| { + Output.err(err, "failed to read tarball: \"{s}\"", .{abs_tarball_dest}); + Global.crash(); + }; + while (read > 0) { + sha1.update(read_buf[0..read]); + sha512.update(read_buf[0..read]); + size += read; + read = file_reader.read(&read_buf) catch |err| { + Output.err(err, "failed to read tarball: \"{s}\"", .{abs_tarball_dest}); + Global.crash(); + }; + } + + sha1.final(&shasum); + sha512.final(&integrity); + + ctx.stats.packed_size = size; + }; + + const normalized_pkg_info: if (for_publish) string else void = if (comptime for_publish) + try Publish.normalizedPackage( + ctx.allocator, + manager, + package_name, + package_version, + &json.root, + json.source, + shasum, + integrity, + abs_tarball_dest, + ); + + printArchivedFilesAndPackages( + ctx, + root_dir, + false, + pack_list, + edited_package_json.len, + ); + + if (comptime !for_publish) { + if (manager.options.pack_destination.len == 0) { + Output.pretty("\n{}\n", .{fmtTarballFilename(package_name, package_version)}); + } else { + Output.pretty("\n{s}\n", .{abs_tarball_dest}); + } + } + + Context.printSummary(ctx.stats, shasum, integrity, log_level); + + if (comptime for_publish) { + Output.flush(); + } + + if (postpack_script) |postpack_script_str| { + Output.pretty("\n", .{}); + _ = RunCommand.runPackageScriptForeground( + ctx.command_ctx, + ctx.allocator, + postpack_script_str, + "postpack", + abs_workspace_path, + manager.env, + &.{}, + manager.options.log_level == .silent, + ctx.command_ctx.debug.use_system_shell, + ) catch |err| { + switch (err) { + error.MissingShell => { + Output.errGeneric("failed to find shell executable to run postpack script", .{}); + Global.crash(); + }, + error.OutOfMemory => |oom| return oom, + } + }; + } + + if (comptime for_publish) { + return .{ + .allocator = ctx.allocator, + .command_ctx = ctx.command_ctx, + .manager = manager, + .package_name = package_name, + .package_version = package_version, + .abs_tarball_path = try ctx.allocator.dupeZ(u8, abs_tarball_dest), + .tarball_bytes = tarball_bytes, + .shasum = shasum, + .integrity = integrity, + .uses_workspaces = false, + .publish_script = publish_script, + .postpublish_script = postpublish_script, + .script_env = this_bundler.env, + .normalized_pkg_info = normalized_pkg_info, + }; + } + } + + fn absTarballDestination( + pack_destination: string, + abs_workspace_path: string, + package_name: string, + package_version: string, + dest_buf: []u8, + ) struct { stringZ, usize } { + const tarball_destination_dir = bun.path.joinAbsStringBuf( + abs_workspace_path, + dest_buf, + &.{pack_destination}, + .auto, + ); + + const tarball_name = std.fmt.bufPrint(dest_buf[strings.withoutTrailingSlash(tarball_destination_dir).len..], "/{}\x00", .{ + fmtTarballFilename(package_name, package_version), + }) catch { + Output.errGeneric("archive destination name too long: \"{s}/{}\"", .{ + strings.withoutTrailingSlash(tarball_destination_dir), + fmtTarballFilename(package_name, package_version), + }); + Global.crash(); + }; + + return .{ + dest_buf[0 .. strings.withoutTrailingSlash(tarball_destination_dir).len + tarball_name.len - 1 :0], + tarball_destination_dir.len, + }; + } + + fn fmtTarballFilename(package_name: string, package_version: string) TarballNameFormatter { + return .{ + .package_name = package_name, + .package_version = package_version, + }; + } + + const TarballNameFormatter = struct { + package_name: string, + package_version: string, + + pub fn format(this: TarballNameFormatter, comptime _: string, _: std.fmt.FormatOptions, writer: anytype) !void { + if (this.package_name[0] == '@') { + if (this.package_name.len > 1) { + if (strings.indexOfChar(this.package_name, '/')) |slash| { + return writer.print("{s}-{s}-{s}.tgz", .{ + this.package_name[1..][0 .. slash - 1], + this.package_name[slash + 1 ..], + this.package_version, + }); + } + } + + return writer.print("{s}-{s}.tgz", .{ + this.package_name[1..], + this.package_version, + }); + } + + return writer.print("{s}-{s}.tgz", .{ + this.package_name, + this.package_version, + }); + } + }; + + fn archivePackageJSON( + ctx: *Context, + archive: *Archive, + entry: *Archive.Entry, + root_dir: std.fs.Dir, + edited_package_json: string, + ) OOM!*Archive.Entry { + const stat = bun.sys.fstatat(bun.toFD(root_dir), "package.json").unwrap() catch |err| { + Output.err(err, "failed to stat package.json", .{}); + Global.crash(); + }; + + entry.setPathname(package_prefix ++ "package.json"); + entry.setSize(@intCast(edited_package_json.len)); + // https://github.com/libarchive/libarchive/blob/898dc8319355b7e985f68a9819f182aaed61b53a/libarchive/archive_entry.h#L185 + entry.setFiletype(0o100000); + entry.setPerm(@intCast(stat.mode)); + // '1985-10-26T08:15:00.000Z' + // https://github.com/npm/cli/blob/ec105f400281a5bfd17885de1ea3d54d0c231b27/node_modules/pacote/lib/util/tar-create-options.js#L28 + entry.setMtime(499162500, 0); + + switch (archive.writeHeader(entry)) { + .failed, .fatal, .warn => { + Output.errGeneric("failed to write tarball header: {s}", .{archive.errorString()}); + Global.crash(); + }, + else => {}, + } + + ctx.stats.unpacked_size += @intCast(archive.writeData(edited_package_json)); + + return entry.clear(); + } + + fn addArchiveEntry( + ctx: *Context, + file: FileDescriptor, + stat: bun.Stat, + filename: stringZ, + read_buf: []u8, + file_reader: *BufferedFileReader, + archive: *Archive, + entry: *Archive.Entry, + print_buf: *std.ArrayList(u8), + bins: []const BinInfo, + ) OOM!*Archive.Entry { + const print_buf_writer = print_buf.writer(); + + try print_buf_writer.print("{s}{s}\x00", .{ package_prefix, filename }); + const pathname = print_buf.items[0 .. package_prefix.len + filename.len :0]; + if (comptime Environment.isWindows) + entry.setPathnameUtf8(pathname) + else + entry.setPathname(pathname); + print_buf_writer.context.clearRetainingCapacity(); + + entry.setSize(@intCast(stat.size)); + + // https://github.com/libarchive/libarchive/blob/898dc8319355b7e985f68a9819f182aaed61b53a/libarchive/archive_entry.h#L185 + entry.setFiletype(0o100000); + + var perm: bun.Mode = @intCast(stat.mode); + // https://github.com/npm/cli/blob/ec105f400281a5bfd17885de1ea3d54d0c231b27/node_modules/pacote/lib/util/tar-create-options.js#L20 + if (isPackageBin(bins, filename)) perm |= 0o111; + entry.setPerm(@intCast(perm)); + + // '1985-10-26T08:15:00.000Z' + // https://github.com/npm/cli/blob/ec105f400281a5bfd17885de1ea3d54d0c231b27/node_modules/pacote/lib/util/tar-create-options.js#L28 + entry.setMtime(499162500, 0); + + switch (archive.writeHeader(entry)) { + .failed, .fatal => { + Output.errGeneric("failed to write tarball header: {s}", .{archive.errorString()}); + Global.crash(); + }, + else => {}, + } + + file_reader.* = .{ + .unbuffered_reader = File.from(file).reader(), + }; + + var read = file_reader.read(read_buf) catch |err| { + Output.err(err, "failed to read file: \"{s}\"", .{filename}); + Global.crash(); + }; + while (read > 0) { + ctx.stats.unpacked_size += @intCast(archive.writeData(read_buf[0..read])); + read = file_reader.read(read_buf) catch |err| { + Output.err(err, "failed to read file: \"{s}\"", .{filename}); + Global.crash(); + }; + } + + return entry.clear(); + } + + /// Strip workspace protocols from dependency versions then + /// returns the printed json + fn editRootPackageJSON( + allocator: std.mem.Allocator, + maybe_lockfile: ?*Lockfile, + json: *PackageManager.WorkspacePackageJSONCache.MapEntry, + ) OOM!string { + for ([_]string{ + "dependencies", + "devDependencies", + "peerDependencies", + "optionalDependencies", + }) |dependency_group| { + if (json.root.get(dependency_group)) |dependencies_expr| { + switch (dependencies_expr.data) { + .e_object => |dependencies| { + for (dependencies.properties.slice()) |*dependency| { + if (dependency.key == null) continue; + if (dependency.value == null) continue; + + const package_spec = dependency.value.?.asString(allocator) orelse continue; + if (strings.withoutPrefixIfPossibleComptime(package_spec, "workspace:")) |without_workspace_protocol| { + + // TODO: make semver parsing more strict. `^`, `~` are not valid + // const parsed = Semver.Version.parseUTF8(without_workspace_protocol); + // if (parsed.valid) { + // dependency.value = Expr.allocate( + // ctx.manager.allocator, + // E.String, + // .{ + // .data = without_workspace_protocol, + // }, + // .{}, + // ); + // continue; + // } + + if (without_workspace_protocol.len == 1) { + // TODO: this might be too strict + const c = without_workspace_protocol[0]; + if (c == '^' or c == '~' or c == '*') { + const dependency_name = dependency.key.?.asString(allocator) orelse { + Output.errGeneric("expected string value for dependency name in \"{s}\"", .{ + dependency_group, + }); + Global.crash(); + }; + + failed_to_resolve: { + // find the current workspace version and append to package spec without `workspace:` + const lockfile = maybe_lockfile orelse break :failed_to_resolve; + + const workspace_version = lockfile.workspace_versions.get(Semver.String.Builder.stringHash(dependency_name)) orelse break :failed_to_resolve; + + dependency.value = Expr.allocate( + allocator, + E.String, + .{ + .data = try std.fmt.allocPrint(allocator, "{s}{}", .{ + switch (c) { + '^' => "^", + '~' => "~", + '*' => "", + else => unreachable, + }, + workspace_version.fmt(lockfile.buffers.string_bytes.items), + }), + }, + .{}, + ); + + continue; + } + + // only produce this error only when we need to get the workspace version + Output.errGeneric("Failed to resolve workspace version for \"{s}\" in `{s}`. Run `bun install` and try again.", .{ + dependency_name, + dependency_group, + }); + Global.crash(); + } + } + + dependency.value = Expr.allocate( + allocator, + E.String, + .{ + .data = try allocator.dupe(u8, without_workspace_protocol), + }, + .{}, + ); + } + } + }, + else => {}, + } + } + } + + const has_trailing_newline = json.source.contents.len > 0 and json.source.contents[json.source.contents.len - 1] == '\n'; + var buffer_writer = try js_printer.BufferWriter.init(allocator); + try buffer_writer.buffer.list.ensureTotalCapacity(allocator, json.source.contents.len + 1); + buffer_writer.append_newline = has_trailing_newline; + var package_json_writer = js_printer.BufferPrinter.init(buffer_writer); + + const written = js_printer.printJSON( + @TypeOf(&package_json_writer), + &package_json_writer, + json.root, + + // shouldn't be used + &json.source, + .{ + .indent = json.indentation, + }, + ) catch |err| { + return switch (err) { + error.OutOfMemory => |oom| oom, + else => { + Output.errGeneric("failed to print edited package.json: {s}", .{@errorName(err)}); + Global.crash(); + }, + }; + }; + _ = written; + + return package_json_writer.ctx.writtenWithoutTrailingZero(); + } + + /// A pattern used to ignore or include + /// files in the project tree. Might come + /// from .npmignore, .gitignore, or `files` + /// in package.json + const Pattern = struct { + glob: []const u32, + /// beginning or middle slash (leading slash was trimmed) + rel_path: bool, + // can only match directories (had an ending slash, also trimmed) + dirs_only: bool, + + @"leading **/": bool, + + pub fn fromUTF8(allocator: std.mem.Allocator, pattern: string) OOM!?Pattern { + var remain = pattern; + var @"has leading **/, (could start with '!')" = false; + const has_leading_or_middle_slash, const has_trailing_slash, const add_negate = check_slashes: { + const before_length = remain.len; + + // strip `!` and add one if any existed + while (remain.len > 0 and remain[0] == '!') remain = remain[1..]; + + const skipped_negate = before_length != remain.len; + + if (remain.len == 0) return null; + + // `**/foo` matches the same as `foo` + if (strings.hasPrefixComptime(remain, "**/")) { + remain = remain["**/".len..]; + if (remain.len == 0) return null; + @"has leading **/, (could start with '!')" = true; + } + + const trailing_slash = remain[remain.len - 1] == '/'; + if (trailing_slash) { + // trim trailing slash + remain = remain[0 .. remain.len - 1]; + if (remain.len == 0) return null; + } + + var leading_or_middle_slash = remain[0] == '/'; + if (!leading_or_middle_slash) { + // check for middle slash + if (strings.indexOfChar(remain, '/')) |slash_index| { + leading_or_middle_slash = slash_index != remain.len - 1; + } + } else { + // trim leading slash + remain = remain[1..]; + if (remain.len == 0) return null; + } + + break :check_slashes .{ leading_or_middle_slash, trailing_slash, skipped_negate }; + }; + + const length = bun.simdutf.length.utf32.from.utf8.le(remain) + @intFromBool(add_negate); + const buf = try allocator.alloc(u32, length); + const result = bun.simdutf.convert.utf8.to.utf32.with_errors.le(remain, buf[@intFromBool(add_negate)..]); + if (!result.isSuccessful()) { + allocator.free(buf); + return null; + } + + if (add_negate) { + buf[0] = '!'; + } + + return .{ + .glob = buf[0 .. result.count + @intFromBool(add_negate)], + .rel_path = has_leading_or_middle_slash, + .@"leading **/" = @"has leading **/, (could start with '!')", + .dirs_only = has_trailing_slash, + }; + } + + pub fn deinit(this: Pattern, allocator: std.mem.Allocator) void { + allocator.free(this.glob); + } + }; + + pub const IgnorePatterns = struct { + list: []const Pattern, + kind: Kind, + depth: usize, + + // At least one of the patterns has a leading + // or middle slash. A relative path will need to + // be created + has_rel_path: bool, + + pub const Kind = enum { + default, + @".npmignore", + @".gitignore", + }; + + pub const List = std.ArrayListUnmanaged(IgnorePatterns); + + fn ignoreFileFail(dir: std.fs.Dir, ignore_kind: Kind, reason: enum { read, open }, err: anyerror) noreturn { + var buf: PathBuffer = undefined; + const dir_path = bun.getFdPath(dir, &buf) catch ""; + Output.err(err, "failed to {s} {s} at: \"{s}{s}{s}\"", .{ + @tagName(reason), + @tagName(ignore_kind), + strings.withoutTrailingSlash(dir_path), + std.fs.path.sep_str, + @tagName(ignore_kind), + }); + Global.crash(); + } + + fn trimTrailingSpaces(line: string) string { + // TODO: copy this function + // https://github.com/git/git/blob/17d4b10aea6bda2027047a0e3548a6f8ad667dde/dir.c#L986 + return line; + } + + fn maybeTrimLeadingSpaces(line: string) string { + // npm will trim, git will not + return line; + } + + // ignore files are always ignored, don't need to worry about opening or reading twice + pub fn readFromDisk(allocator: std.mem.Allocator, dir: std.fs.Dir, dir_depth: usize) OOM!?IgnorePatterns { + var patterns: std.ArrayListUnmanaged(Pattern) = .{}; + errdefer patterns.deinit(allocator); + + var ignore_kind: Kind = .@".npmignore"; + + const ignore_file = dir.openFileZ(".npmignore", .{}) catch |err| ignore_file: { + if (err != error.FileNotFound) { + // Crash if the file exists and fails to open. Don't want to create a tarball + // with files you want to ignore. + ignoreFileFail(dir, ignore_kind, .open, err); + } + ignore_kind = .@".gitignore"; + break :ignore_file dir.openFileZ(".gitignore", .{}) catch |err2| { + if (err2 != error.FileNotFound) { + ignoreFileFail(dir, ignore_kind, .open, err2); + } + + return null; + }; + }; + defer ignore_file.close(); + + const contents = File.from(ignore_file).readToEnd(allocator).unwrap() catch |err| { + ignoreFileFail(dir, ignore_kind, .read, err); + }; + defer allocator.free(contents); + + var has_rel_path = false; + + var iter = std.mem.tokenizeScalar(u8, contents, '\n'); + while (iter.next()) |line| { + if (line.len == 0) continue; + + // comment + if (line[0] == '#') continue; + + const trimmed = trimmed: { + var remain = line; + if (remain[remain.len - 1] == '\r') { + remain = remain[0 .. remain.len - 1]; + } + + break :trimmed trimTrailingSpaces(remain); + }; + + if (trimmed.len == 0) continue; + + const parsed = try Pattern.fromUTF8(allocator, trimmed) orelse continue; + try patterns.append(allocator, parsed); + + has_rel_path = has_rel_path or parsed.rel_path; + } + + if (patterns.items.len == 0) return null; + + return .{ + .list = patterns.items, + .kind = ignore_kind, + .depth = dir_depth, + .has_rel_path = has_rel_path, + }; + } + + pub fn deinit(this: *const IgnorePatterns, allocator: std.mem.Allocator) void { + for (this.list) |pattern_info| { + allocator.free(pattern_info.glob); + } + allocator.free(this.list); + } + }; + + fn printArchivedFilesAndPackages( + ctx: *Context, + root_dir: std.fs.Dir, + comptime is_dry_run: bool, + pack_list: if (is_dry_run) *PackQueue else PackList, + package_json_len: usize, + ) void { + if (ctx.manager.options.log_level == .silent) return; + const packed_fmt = "packed {} {s}"; + + if (comptime is_dry_run) { + const package_json_stat = bun.sys.fstatat(bun.toFD(root_dir), "package.json").unwrap() catch |err| { + Output.err(err, "failed to stat package.json", .{}); + Global.crash(); + }; + + ctx.stats.unpacked_size += @intCast(package_json_stat.size); + + Output.prettyln("\n" ++ packed_fmt, .{ + bun.fmt.size(package_json_stat.size, .{ .space_between_number_and_unit = false }), + "package.json", + }); + + while (pack_list.removeOrNull()) |filename| { + const stat = bun.sys.fstatat(bun.toFD(root_dir), filename).unwrap() catch |err| { + Output.err(err, "failed to stat file: \"{s}\"", .{filename}); + Global.crash(); + }; + + ctx.stats.unpacked_size += @intCast(stat.size); + + Output.prettyln(packed_fmt, .{ + bun.fmt.size(stat.size, .{ .space_between_number_and_unit = false }), + filename, + }); + } + + for (ctx.bundled_deps.items) |dep| { + if (!dep.was_packed) continue; + Output.prettyln("bundled {s}", .{dep.name}); + } + + Output.flush(); + return; + } + + Output.prettyln("\n" ++ packed_fmt, .{ + bun.fmt.size(package_json_len, .{ .space_between_number_and_unit = false }), + "package.json", + }); + + for (pack_list.items) |entry| { + Output.prettyln(packed_fmt, .{ + bun.fmt.size(entry.size, .{ .space_between_number_and_unit = false }), + entry.subpath, + }); + } + + for (ctx.bundled_deps.items) |dep| { + if (!dep.was_packed) continue; + Output.prettyln("bundled {s}", .{dep.name}); + } + + Output.flush(); + } +}; + +pub const bindings = struct { + const JSC = bun.JSC; + const JSValue = JSC.JSValue; + const JSGlobalObject = JSC.JSGlobalObject; + const CallFrame = JSC.CallFrame; + const ZigString = JSC.ZigString; + const String = bun.String; + const JSArray = JSC.JSArray; + const JSObject = JSC.JSObject; + + // pub fn generate(global: *JSGlobalObject) JSValue { + // const obj = JSValue.createEmptyObject(global, 1); + + // const readTarEntries = ZigString.static("readTarEntries"); + // obj.put(global, readTarEntries, JSC.createCallback(global, readTarEntries, 1, jsReadTarEntries)); + // return obj; + // } + + pub fn jsReadTarball(global: *JSGlobalObject, callFrame: *CallFrame) bun.JSError!JSValue { + const args = callFrame.arguments(1).slice(); + if (args.len < 1 or !args[0].isString()) { + global.throw("expected tarball path string argument", .{}); + return .zero; + } + + const tarball_path_str = args[0].toBunString(global); + defer tarball_path_str.deref(); + + const tarball_path = tarball_path_str.toUTF8(bun.default_allocator); + defer tarball_path.deinit(); + + const tarball_file = File.from(std.fs.openFileAbsolute(tarball_path.slice(), .{}) catch |err| { + global.throw("failed to open tarball file \"{s}\": {s}", .{ tarball_path.slice(), @errorName(err) }); + return .zero; + }); + defer tarball_file.close(); + + const tarball = tarball_file.readToEnd(bun.default_allocator).unwrap() catch |err| { + global.throw("failed to read tarball contents \"{s}\": {s}", .{ tarball_path.slice(), @errorName(err) }); + return .zero; + }; + defer bun.default_allocator.free(tarball); + + var sha1_digest: sha.SHA1.Digest = undefined; + var sha1 = sha.SHA1.init(); + defer sha1.deinit(); + sha1.update(tarball); + sha1.final(&sha1_digest); + const shasum_str = String.createFormat("{s}", .{bun.fmt.bytesToHex(sha1_digest, .lower)}) catch bun.outOfMemory(); + + var sha512_digest: sha.SHA512.Digest = undefined; + var sha512 = sha.SHA512.init(); + defer sha512.deinit(); + sha512.update(tarball); + sha512.final(&sha512_digest); + var base64_buf: [std.base64.standard.Encoder.calcSize(sha.SHA512.digest)]u8 = undefined; + const encode_count = bun.simdutf.base64.encode(&sha512_digest, &base64_buf, false); + const integrity_str = String.createUTF8(base64_buf[0..encode_count]); + + const EntryInfo = struct { + pathname: String, + kind: String, + perm: bun.Mode, + size: ?usize = null, + contents: ?String = null, + }; + var entries_info = std.ArrayList(EntryInfo).init(bun.default_allocator); + defer entries_info.deinit(); + + const archive = Archive.readNew(); + + switch (archive.readSupportFormatTar()) { + .failed, .fatal, .warn => { + global.throw("failed to support tar: {s}", .{archive.errorString()}); + return .zero; + }, + else => {}, + } + switch (archive.readSupportFormatGnutar()) { + .failed, .fatal, .warn => { + global.throw("failed to support gnutar: {s}", .{archive.errorString()}); + return .zero; + }, + else => {}, + } + switch (archive.readSupportFilterGzip()) { + .failed, .fatal, .warn => { + global.throw("failed to support gzip compression: {s}", .{archive.errorString()}); + return .zero; + }, + else => {}, + } + + switch (archive.readSetOptions("read_concatenated_archives")) { + .failed, .fatal, .warn => { + global.throw("failed to set read_concatenated_archives option: {s}", .{archive.errorString()}); + return .zero; + }, + else => {}, + } + + switch (archive.readOpenMemory(tarball)) { + .failed, .fatal, .warn => { + global.throw("failed to open archive in memory: {s}", .{archive.errorString()}); + return .zero; + }, + else => {}, + } + + var archive_entry: *Archive.Entry = undefined; + var header_status = archive.readNextHeader(&archive_entry); + + var read_buf = std.ArrayList(u8).init(bun.default_allocator); + defer read_buf.deinit(); + + while (header_status != .eof) : (header_status = archive.readNextHeader(&archive_entry)) { + switch (header_status) { + .eof => unreachable, + .retry => continue, + .failed, .fatal => { + global.throw("failed to read archive header: {s}", .{Archive.errorString(@ptrCast(archive))}); + return .zero; + }, + else => { + const pathname = archive_entry.pathname(); + const kind = bun.C.kindFromMode(archive_entry.filetype()); + const perm = archive_entry.perm(); + + var entry_info: EntryInfo = .{ + .pathname = String.createUTF8(pathname), + .kind = String.static(@tagName(kind)), + .perm = perm, + }; + + if (kind == .file) { + const size: usize = @intCast(archive_entry.size()); + read_buf.resize(size) catch bun.outOfMemory(); + defer read_buf.clearRetainingCapacity(); + + const read = archive.readData(read_buf.items); + if (read < 0) { + global.throw("failed to read archive entry \"{}\": {s}", .{ + bun.fmt.fmtPath(u8, pathname, .{}), + Archive.errorString(@ptrCast(archive)), + }); + return .zero; + } + read_buf.items.len = @intCast(read); + entry_info.contents = String.createUTF8(read_buf.items); + } + + entries_info.append(entry_info) catch bun.outOfMemory(); + }, + } + } + + switch (archive.readClose()) { + .failed, .fatal, .warn => { + global.throw("failed to close read archive: {s}", .{archive.errorString()}); + return .zero; + }, + else => {}, + } + switch (archive.readFree()) { + .failed, .fatal, .warn => { + global.throw("failed to close read archive: {s}", .{archive.errorString()}); + return .zero; + }, + else => {}, + } + + const entries = JSArray.createEmpty(global, entries_info.items.len); + + for (entries_info.items, 0..) |entry, i| { + const obj = JSValue.createEmptyObject(global, 4); + obj.put(global, "pathname", entry.pathname.toJS(global)); + obj.put(global, "kind", entry.kind.toJS(global)); + obj.put(global, "perm", JSValue.jsNumber(entry.perm)); + if (entry.contents) |contents| { + obj.put(global, "contents", contents.toJS(global)); + } + entries.putIndex(global, @intCast(i), obj); + } + + const result = JSValue.createEmptyObject(global, 2); + result.put(global, "entries", entries); + result.put(global, "size", JSValue.jsNumber(tarball.len)); + result.put(global, "shasum", shasum_str.toJS(global)); + result.put(global, "integrity", integrity_str.toJS(global)); + + return result; + } +}; diff --git a/src/cli/package_manager_command.zig b/src/cli/package_manager_command.zig index 226d59d1c2..801f492936 100644 --- a/src/cli/package_manager_command.zig +++ b/src/cli/package_manager_command.zig @@ -24,6 +24,8 @@ const UntrustedCommand = @import("./pm_trusted_command.zig").UntrustedCommand; const TrustCommand = @import("./pm_trusted_command.zig").TrustCommand; const DefaultTrustedCommand = @import("./pm_trusted_command.zig").DefaultTrustedCommand; const Environment = bun.Environment; +pub const PackCommand = @import("./pack_command.zig").PackCommand; +const Npm = Install.Npm; const ByName = struct { dependencies: []const Dependency, @@ -62,7 +64,8 @@ pub const PackageManagerCommand = struct { lockfile_buffer[lockfile_.len] = 0; const lockfile = lockfile_buffer[0..lockfile_.len :0]; const cli = try PackageManager.CommandLineArguments.parse(ctx.allocator, .pm); - var pm = try PackageManager.init(ctx, cli, PackageManager.Subcommand.pm); + var pm, const cwd = try PackageManager.init(ctx, cli, PackageManager.Subcommand.pm); + defer ctx.allocator.free(cwd); const load_lockfile = pm.lockfile.loadFromDisk(pm, ctx.allocator, ctx.log, lockfile, true); handleLoadLockfileErrors(load_lockfile, pm); @@ -98,10 +101,16 @@ pub const PackageManagerCommand = struct { Output.prettyln( \\bun pm: Package manager utilities \\ + \\ bun pm pack create a tarball of the current workspace + \\ --dry-run do everything except for writing the tarball to disk + \\ --destination the directory the tarball will be saved in + \\ --ignore-scripts don't run pre/postpack and prepare scripts + \\ --gzip-level specify a custom compression level for gzip (0-9, default is 9) \\ bun pm bin print the path to bin folder \\ -g print the global path to bin folder \\ bun pm ls list the dependency tree according to the current lockfile \\ --all list the entire dependency tree according to the current lockfile + \\ bun pm whoami print the current npm username \\ bun pm hash generate & print the hash of the current lockfile \\ bun pm hash-string print the string used to hash the lockfile \\ bun pm hash-print print the hash stored in the current lockfile @@ -122,7 +131,7 @@ pub const PackageManagerCommand = struct { var args = try std.process.argsAlloc(ctx.allocator); args = args[1..]; const cli = try PackageManager.CommandLineArguments.parse(ctx.allocator, .pm); - var pm = PackageManager.init(ctx, cli, PackageManager.Subcommand.pm) catch |err| { + var pm, const cwd = PackageManager.init(ctx, cli, PackageManager.Subcommand.pm) catch |err| { if (err == error.MissingPackageJSON) { var cwd_buf: bun.PathBuffer = undefined; if (bun.getcwd(&cwd_buf)) |cwd| { @@ -135,13 +144,34 @@ pub const PackageManagerCommand = struct { } return err; }; + defer ctx.allocator.free(cwd); const subcommand = getSubcommand(&pm.options.positionals); if (pm.options.global) { try pm.setupGlobalDir(ctx); } - if (strings.eqlComptime(subcommand, "bin")) { + if (strings.eqlComptime(subcommand, "pack")) { + try PackCommand.execWithManager(ctx, pm); + Global.exit(0); + } else if (strings.eqlComptime(subcommand, "whoami")) { + const username = Npm.whoami(ctx.allocator, pm) catch |err| { + switch (err) { + error.OutOfMemory => bun.outOfMemory(), + error.NeedAuth => { + Output.errGeneric("missing authentication (run `bunx npm login`)", .{}); + }, + error.ProbablyInvalidAuth => { + Output.errGeneric("failed to authenticate with registry '{}'", .{ + bun.fmt.redactedNpmUrl(pm.options.scope.url.href), + }); + }, + } + Global.crash(); + }; + Output.println("{s}", .{username}); + Global.exit(0); + } else if (strings.eqlComptime(subcommand, "bin")) { const output_path = Path.joinAbs(Fs.FileSystem.instance.top_level_dir, .auto, bun.asByteSlice(pm.options.bin_path)); Output.prettyln("{s}", .{output_path}); if (Output.stdout_descriptor_type == .terminal) { diff --git a/src/cli/pm_trusted_command.zig b/src/cli/pm_trusted_command.zig index 159aad49b8..b4a56684c9 100644 --- a/src/cli/pm_trusted_command.zig +++ b/src/cli/pm_trusted_command.zig @@ -344,8 +344,15 @@ pub const TrustCommand = struct { } const output_in_foreground = false; + const optional = false; switch (pm.options.log_level) { - inline else => |log_level| try pm.spawnPackageLifecycleScripts(ctx, info.scripts_list, log_level, output_in_foreground), + inline else => |log_level| try pm.spawnPackageLifecycleScripts( + ctx, + info.scripts_list, + optional, + log_level, + output_in_foreground, + ), } if (pm.options.log_level.showProgress()) { @@ -370,10 +377,8 @@ pub const TrustCommand = struct { const package_json_source = logger.Source.initPathString(PackageManager.package_json_cwd, package_json_contents); - var package_json = bun.JSON.ParseJSONUTF8(&package_json_source, ctx.log, ctx.allocator) catch |err| { - switch (Output.enable_ansi_colors) { - inline else => |enable_ansi_colors| ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), enable_ansi_colors) catch {}, - } + var package_json = bun.JSON.parseUTF8(&package_json_source, ctx.log, ctx.allocator) catch |err| { + ctx.log.print(Output.errorWriter()) catch {}; Output.errGeneric("failed to parse package.json: {s}", .{@errorName(err)}); Global.crash(); diff --git a/src/cli/publish_command.zig b/src/cli/publish_command.zig new file mode 100644 index 0000000000..69c3e6d12f --- /dev/null +++ b/src/cli/publish_command.zig @@ -0,0 +1,1382 @@ +const std = @import("std"); +const bun = @import("root").bun; +const Command = bun.CLI.Command; +const Output = bun.Output; +const Global = bun.Global; +const http = bun.http; +const OOM = bun.OOM; +const Headers = http.Headers; +const HeaderBuilder = http.HeaderBuilder; +const MutableString = bun.MutableString; +const URL = bun.URL; +const install = bun.install; +const PackageManager = install.PackageManager; +const strings = bun.strings; +const string = bun.string; +const stringZ = bun.stringZ; +const File = bun.sys.File; +const JSON = bun.JSON; +const sha = bun.sha; +const path = bun.path; +const FileSystem = bun.fs.FileSystem; +const Environment = bun.Environment; +const Archive = bun.libarchive.lib.Archive; +const logger = bun.logger; +const Dependency = install.Dependency; +const Pack = bun.CLI.PackCommand; +const Lockfile = install.Lockfile; +const MimeType = http.MimeType; +const Expr = bun.js_parser.Expr; +const prompt = bun.CLI.InitCommand.prompt; +const Npm = install.Npm; +const Run = bun.CLI.RunCommand; +const DotEnv = bun.DotEnv; +const Open = @import("../open.zig"); +const E = bun.JSAst.E; +const G = bun.JSAst.G; +const BabyList = bun.BabyList; + +pub const PublishCommand = struct { + pub fn Context(comptime directory_publish: bool) type { + return struct { + manager: *PackageManager, + allocator: std.mem.Allocator, + command_ctx: Command.Context, + + package_name: string, + package_version: string, + abs_tarball_path: stringZ, + tarball_bytes: string, + shasum: sha.SHA1.Digest, + integrity: sha.SHA512.Digest, + uses_workspaces: bool, + + normalized_pkg_info: string, + + publish_script: if (directory_publish) ?[]const u8 else void = if (directory_publish) null else {}, + postpublish_script: if (directory_publish) ?[]const u8 else void = if (directory_publish) null else {}, + script_env: if (directory_publish) *DotEnv.Loader else void, + + const FromTarballError = OOM || error{ + MissingPackageJSON, + InvalidPackageJSON, + MissingPackageName, + MissingPackageVersion, + InvalidPackageName, + InvalidPackageVersion, + PrivatePackage, + RestrictedUnscopedPackage, + }; + + /// Retrieve information for publishing from a tarball path, `bun publish path/to/tarball.tgz` + pub fn fromTarballPath( + ctx: Command.Context, + manager: *PackageManager, + tarball_path: string, + ) FromTarballError!Context(directory_publish) { + var abs_buf: bun.PathBuffer = undefined; + const abs_tarball_path = path.joinAbsStringBufZ( + FileSystem.instance.top_level_dir, + &abs_buf, + &[_]string{tarball_path}, + .auto, + ); + + const tarball_bytes = File.readFrom(bun.invalid_fd, abs_tarball_path, ctx.allocator).unwrap() catch |err| { + Output.err(err, "failed to read tarball: '{s}'", .{tarball_path}); + Global.crash(); + }; + + var maybe_package_json_contents: ?[]const u8 = null; + + var iter = switch (Archive.Iterator.init(tarball_bytes)) { + .err => |err| { + Output.errGeneric("{s}: {s}", .{ + err.message, + err.archive.errorString(), + }); + + Global.crash(); + }, + .result => |res| res, + }; + + var unpacked_size: usize = 0; + var total_files: usize = 0; + + Output.print("\n", .{}); + + while (switch (iter.next()) { + .err => |err| { + Output.errGeneric("{s}: {s}", .{ err.message, err.archive.errorString() }); + Global.crash(); + }, + .result => |res| res, + }) |next| { + const pathname = if (comptime Environment.isWindows) + next.entry.pathnameW() + else + next.entry.pathname(); + + const size = next.entry.size(); + + unpacked_size += @intCast(@max(0, size)); + total_files += @intFromBool(next.kind == .file); + + // this is option `strip: 1` (npm expects a `package/` prefix for all paths) + if (strings.indexOfAnyT(bun.OSPathChar, pathname, "/\\")) |slash| { + const stripped = pathname[slash + 1 ..]; + if (stripped.len == 0) continue; + + Output.pretty("packed {} {}\n", .{ + bun.fmt.size(size, .{ .space_between_number_and_unit = false }), + bun.fmt.fmtOSPath(stripped, .{}), + }); + + if (next.kind != .file) continue; + + if (strings.indexOfAnyT(bun.OSPathChar, stripped, "/\\") == null) { + + // check for package.json, readme.md, ... + const filename = pathname[slash + 1 ..]; + + if (maybe_package_json_contents == null and strings.eqlCaseInsensitiveT(bun.OSPathChar, filename, "package.json")) { + maybe_package_json_contents = switch (try next.readEntryData(ctx.allocator, iter.archive)) { + .err => |err| { + Output.errGeneric("{s}: {s}", .{ err.message, err.archive.errorString() }); + Global.crash(); + }, + .result => |bytes| bytes, + }; + } + } + } else { + Output.pretty("packed {} {}\n", .{ + bun.fmt.size(size, .{ .space_between_number_and_unit = false }), + bun.fmt.fmtOSPath(pathname, .{}), + }); + } + } + + switch (iter.deinit()) { + .err => |err| { + Output.errGeneric("{s}: {s}", .{ err.message, err.archive.errorString() }); + Global.crash(); + }, + .result => {}, + } + + const package_json_contents = maybe_package_json_contents orelse return error.MissingPackageJSON; + + const package_name, const package_version, var json, const json_source = package_info: { + const source = logger.Source.initPathString("package.json", package_json_contents); + const json = JSON.parsePackageJSONUTF8(&source, manager.log, ctx.allocator) catch |err| { + return switch (err) { + error.OutOfMemory => |oom| return oom, + else => error.InvalidPackageJSON, + }; + }; + + if (json.get("private")) |private| { + if (private.asBool()) |is_private| { + if (is_private) { + return error.PrivatePackage; + } + } + } + + if (json.get("publishConfig")) |config| { + if (manager.options.publish_config.tag.len == 0) { + if (try config.getStringCloned(ctx.allocator, "tag")) |tag| { + manager.options.publish_config.tag = tag; + } + } + + if (manager.options.publish_config.access == null) { + if (try config.getString(ctx.allocator, "access")) |access| { + manager.options.publish_config.access = PackageManager.Options.Access.fromStr(access[0]) orelse { + Output.errGeneric("invalid `access` value: '{s}'", .{access[0]}); + Global.crash(); + }; + } + } + + // maybe otp + } + + const name = try json.getStringCloned(ctx.allocator, "name") orelse return error.MissingPackageName; + const is_scoped = try Dependency.isScopedPackageName(name); + + if (manager.options.publish_config.access) |access| { + if (access == .restricted and !is_scoped) { + return error.RestrictedUnscopedPackage; + } + } + + const version = try json.getStringCloned(ctx.allocator, "version") orelse return error.MissingPackageVersion; + if (version.len == 0) return error.InvalidPackageVersion; + + break :package_info .{ name, version, json, source }; + }; + + var shasum: sha.SHA1.Digest = undefined; + var sha1 = sha.SHA1.init(); + defer sha1.deinit(); + + sha1.update(tarball_bytes); + sha1.final(&shasum); + + var integrity: sha.SHA512.Digest = undefined; + var sha512 = sha.SHA512.init(); + defer sha512.deinit(); + + sha512.update(tarball_bytes); + sha512.final(&integrity); + + const normalized_pkg_info = try normalizedPackage( + ctx.allocator, + manager, + package_name, + package_version, + &json, + json_source, + shasum, + integrity, + abs_tarball_path, + ); + + Pack.Context.printSummary( + .{ + .total_files = total_files, + .unpacked_size = unpacked_size, + .packed_size = tarball_bytes.len, + }, + shasum, + integrity, + manager.options.log_level, + ); + + return .{ + .manager = manager, + .allocator = ctx.allocator, + .package_name = package_name, + .package_version = package_version, + .abs_tarball_path = try ctx.allocator.dupeZ(u8, abs_tarball_path), + .tarball_bytes = tarball_bytes, + .shasum = shasum, + .integrity = integrity, + .uses_workspaces = false, + .command_ctx = ctx, + .script_env = {}, + .normalized_pkg_info = normalized_pkg_info, + }; + } + + const FromWorkspaceError = Pack.PackError(true); + + /// `bun publish` without a tarball path. Automatically pack the current workspace and get + /// information required for publishing + pub fn fromWorkspace( + ctx: Command.Context, + manager: *PackageManager, + ) FromWorkspaceError!Context(directory_publish) { + var lockfile: Lockfile = undefined; + const load_from_disk_result = lockfile.loadFromDisk( + manager, + manager.allocator, + manager.log, + manager.options.lockfile_path, + false, + ); + + var pack_ctx: Pack.Context = .{ + .allocator = ctx.allocator, + .manager = manager, + .command_ctx = ctx, + .lockfile = switch (load_from_disk_result) { + .ok => |ok| ok.lockfile, + .not_found => null, + .err => |cause| err: { + switch (cause.step) { + .open_file => { + if (cause.value == error.ENOENT) break :err null; + Output.errGeneric("failed to open lockfile: {s}", .{@errorName(cause.value)}); + }, + .parse_file => { + Output.errGeneric("failed to parse lockfile: {s}", .{@errorName(cause.value)}); + }, + .read_file => { + Output.errGeneric("failed to read lockfile: {s}", .{@errorName(cause.value)}); + }, + .migrating => { + Output.errGeneric("failed to migrate lockfile: {s}", .{@errorName(cause.value)}); + }, + } + + if (manager.log.hasErrors()) { + manager.log.print(Output.errorWriter()) catch {}; + } + + Global.crash(); + }, + }, + }; + + return switch (manager.options.log_level) { + inline else => |log_level| Pack.pack(&pack_ctx, manager.original_package_json_path, log_level, true), + }; + } + }; + } + + pub fn exec(ctx: Command.Context) !void { + Output.prettyln("bun publish v" ++ Global.package_json_version_with_sha ++ "", .{}); + Output.flush(); + + const cli = try PackageManager.CommandLineArguments.parse(ctx.allocator, .publish); + + const manager, const original_cwd = PackageManager.init(ctx, cli, .publish) catch |err| { + if (!cli.silent) { + if (err == error.MissingPackageJSON) { + Output.errGeneric("missing package.json, nothing to publish", .{}); + } + Output.errGeneric("failed to initialize bun install: {s}", .{@errorName(err)}); + } + Global.crash(); + }; + defer ctx.allocator.free(original_cwd); + + if (cli.positionals.len > 1) { + const context = Context(false).fromTarballPath(ctx, manager, cli.positionals[1]) catch |err| { + switch (err) { + error.OutOfMemory => bun.outOfMemory(), + error.MissingPackageName => { + Output.errGeneric("missing `name` string in package.json", .{}); + }, + error.MissingPackageVersion => { + Output.errGeneric("missing `version` string in package.json", .{}); + }, + error.InvalidPackageName, error.InvalidPackageVersion => { + Output.errGeneric("package.json `name` and `version` fields must be non-empty strings", .{}); + }, + error.MissingPackageJSON => { + Output.errGeneric("failed to find package.json in tarball '{s}'", .{cli.positionals[1]}); + }, + error.InvalidPackageJSON => { + manager.log.print(Output.errorWriter()) catch {}; + Output.errGeneric("failed to parse tarball package.json", .{}); + }, + error.PrivatePackage => { + Output.errGeneric("attempted to publish a private package", .{}); + }, + error.RestrictedUnscopedPackage => { + Output.errGeneric("unable to restrict access to unscoped package", .{}); + }, + } + Global.crash(); + }; + + publish(false, &context) catch |err| { + switch (err) { + error.OutOfMemory => bun.outOfMemory(), + error.NeedAuth => { + Output.errGeneric("missing authentication (run `bunx npm login`)", .{}); + Global.crash(); + }, + } + }; + + Output.prettyln("\n + {s}@{s}{s}", .{ + context.package_name, + Dependency.withoutBuildTag(context.package_version), + if (manager.options.dry_run) " (dry-run)" else "", + }); + + return; + } + + const context = Context(true).fromWorkspace(ctx, manager) catch |err| { + switch (err) { + error.OutOfMemory => bun.outOfMemory(), + error.MissingPackageName => { + Output.errGeneric("missing `name` string in package.json", .{}); + }, + error.MissingPackageVersion => { + Output.errGeneric("missing `version` string in package.json", .{}); + }, + error.InvalidPackageName, error.InvalidPackageVersion => { + Output.errGeneric("package.json `name` and `version` fields must be non-empty strings", .{}); + }, + error.MissingPackageJSON => { + Output.errGeneric("failed to find package.json from: '{s}'", .{FileSystem.instance.top_level_dir}); + }, + error.RestrictedUnscopedPackage => { + Output.errGeneric("unable to restrict access to unscoped package", .{}); + }, + error.PrivatePackage => { + Output.errGeneric("attempted to publish a private package", .{}); + }, + } + Global.crash(); + }; + + // TODO: read this into memory + _ = bun.sys.unlink(context.abs_tarball_path); + + publish(true, &context) catch |err| { + switch (err) { + error.OutOfMemory => bun.outOfMemory(), + error.NeedAuth => { + Output.errGeneric("missing authentication (run `bunx npm login`)", .{}); + Global.crash(); + }, + } + }; + + Output.prettyln("\n + {s}@{s}{s}", .{ + context.package_name, + Dependency.withoutBuildTag(context.package_version), + if (manager.options.dry_run) " (dry-run)" else "", + }); + + if (manager.options.do.run_scripts) { + const abs_workspace_path: string = strings.withoutTrailingSlash(strings.withoutSuffixComptime(manager.original_package_json_path, "package.json")); + if (context.publish_script) |publish_script| { + _ = Run.runPackageScriptForeground( + context.command_ctx, + context.allocator, + publish_script, + "publish", + abs_workspace_path, + context.script_env, + &.{}, + context.manager.options.log_level == .silent, + context.command_ctx.debug.use_system_shell, + ) catch |err| { + switch (err) { + error.MissingShell => { + Output.errGeneric("failed to find shell executable to run publish script", .{}); + Global.crash(); + }, + error.OutOfMemory => |oom| return oom, + } + }; + } + + if (context.postpublish_script) |postpublish_script| { + _ = Run.runPackageScriptForeground( + context.command_ctx, + context.allocator, + postpublish_script, + "postpublish", + abs_workspace_path, + context.script_env, + &.{}, + context.manager.options.log_level == .silent, + context.command_ctx.debug.use_system_shell, + ) catch |err| { + switch (err) { + error.MissingShell => { + Output.errGeneric("failed to find shell executable to run postpublish script", .{}); + Global.crash(); + }, + error.OutOfMemory => |oom| return oom, + } + }; + } + } + } + + const PublishError = OOM || error{ + NeedAuth, + }; + + pub fn publish( + comptime directory_publish: bool, + ctx: *const Context(directory_publish), + ) PublishError!void { + const registry = ctx.manager.scopeForPackageName(ctx.package_name); + + if (registry.token.len == 0 and (registry.url.password.len == 0 or registry.url.username.len == 0)) { + return error.NeedAuth; + } + + // continues from `printSummary` + Output.pretty( + \\Tag: {s} + \\Access: {s} + \\Registry: {s} + \\ + , .{ + if (ctx.manager.options.publish_config.tag.len > 0) ctx.manager.options.publish_config.tag else "latest", + if (ctx.manager.options.publish_config.access) |access| @tagName(access) else "default", + registry.url.href, + }); + + // dry-run stops here + if (ctx.manager.options.dry_run) return; + + const publish_req_body = try constructPublishRequestBody(directory_publish, ctx); + + var print_buf: std.ArrayListUnmanaged(u8) = .{}; + defer print_buf.deinit(ctx.allocator); + var print_writer = print_buf.writer(ctx.allocator); + + const publish_headers = try constructPublishHeaders( + ctx.allocator, + &print_buf, + registry, + publish_req_body.len, + if (ctx.manager.options.publish_config.otp.len > 0) ctx.manager.options.publish_config.otp else null, + ctx.uses_workspaces, + ctx.manager.options.publish_config.auth_type, + ); + + var response_buf = try MutableString.init(ctx.allocator, 1024); + + try print_writer.print("{s}/{s}", .{ + strings.withoutTrailingSlash(registry.url.href), + bun.fmt.dependencyUrl(ctx.package_name), + }); + const publish_url = URL.parse(try ctx.allocator.dupe(u8, print_buf.items)); + print_buf.clearRetainingCapacity(); + + var req = http.AsyncHTTP.initSync( + ctx.allocator, + .PUT, + publish_url, + publish_headers.entries, + publish_headers.content.ptr.?[0..publish_headers.content.len], + &response_buf, + publish_req_body, + null, + null, + .follow, + ); + + const res = req.sendSync() catch |err| { + switch (err) { + error.OutOfMemory => |oom| return oom, + else => { + Output.err(err, "failed to publish package", .{}); + Global.crash(); + }, + } + }; + + switch (res.status_code) { + 400...std.math.maxInt(@TypeOf(res.status_code)) => { + const prompt_for_otp = prompt_for_otp: { + if (res.status_code != 401) break :prompt_for_otp false; + + if (res.headers.get("www-authenticate")) |@"www-authenticate"| { + var iter = strings.split(@"www-authenticate", ","); + while (iter.next()) |part| { + const trimmed = strings.trim(part, &strings.whitespace_chars); + if (strings.eqlCaseInsensitiveASCII(trimmed, "ipaddress", true)) { + Output.errGeneric("login is not allowed from your IP address", .{}); + Global.crash(); + } else if (strings.eqlCaseInsensitiveASCII(trimmed, "otp", true)) { + break :prompt_for_otp true; + } + } + + Output.errGeneric("unable to authenticate, need: {s}", .{@"www-authenticate"}); + Global.crash(); + } else if (strings.containsComptime(response_buf.list.items, "one-time pass")) { + // missing www-authenicate header but one-time pass is still included + break :prompt_for_otp true; + } + + break :prompt_for_otp false; + }; + + if (!prompt_for_otp) { + // general error + const otp_response = false; + try Npm.responseError( + ctx.allocator, + &req, + &res, + .{ ctx.package_name, ctx.package_version }, + &response_buf, + otp_response, + ); + } + + // https://github.com/npm/cli/blob/534ad7789e5c61f579f44d782bdd18ea3ff1ee20/node_modules/npm-registry-fetch/lib/check-response.js#L14 + // ignore if x-local-cache exists + if (res.headers.getIfOtherIsAbsent("npm-notice", "x-local-cache")) |notice| { + Output.printError("\n", .{}); + Output.note("{s}", .{notice}); + Output.flush(); + } + + const otp = try getOTP(directory_publish, ctx, registry, &response_buf, &print_buf); + + const otp_headers = try constructPublishHeaders( + ctx.allocator, + &print_buf, + registry, + publish_req_body.len, + otp, + ctx.uses_workspaces, + ctx.manager.options.publish_config.auth_type, + ); + + response_buf.reset(); + + var otp_req = http.AsyncHTTP.initSync( + ctx.allocator, + .PUT, + publish_url, + otp_headers.entries, + otp_headers.content.ptr.?[0..otp_headers.content.len], + &response_buf, + publish_req_body, + null, + null, + .follow, + ); + + const otp_res = otp_req.sendSync() catch |err| { + switch (err) { + error.OutOfMemory => |oom| return oom, + else => { + Output.err(err, "failed to publish package", .{}); + Global.crash(); + }, + } + }; + + switch (otp_res.status_code) { + 400...std.math.maxInt(@TypeOf(otp_res.status_code)) => { + const otp_response = true; + try Npm.responseError( + ctx.allocator, + &otp_req, + &otp_res, + .{ ctx.package_name, ctx.package_version }, + &response_buf, + otp_response, + ); + }, + else => { + // https://github.com/npm/cli/blob/534ad7789e5c61f579f44d782bdd18ea3ff1ee20/node_modules/npm-registry-fetch/lib/check-response.js#L14 + // ignore if x-local-cache exists + if (otp_res.headers.getIfOtherIsAbsent("npm-notice", "x-local-cache")) |notice| { + Output.printError("\n", .{}); + Output.note("{s}", .{notice}); + Output.flush(); + } + }, + } + }, + else => {}, + } + } + + const GetOTPError = OOM || error{}; + + fn pressEnterToOpenInBrowser(auth_url: stringZ) void { + // unset `ENABLE_VIRTUAL_TERMINAL_INPUT` on windows. This prevents backspace from + // deleting the entire line + const original_mode: if (Environment.isWindows) ?bun.windows.DWORD else void = if (comptime Environment.isWindows) + bun.win32.unsetStdioModeFlags(0, bun.windows.ENABLE_VIRTUAL_TERMINAL_INPUT) catch null + else {}; + + defer if (comptime Environment.isWindows) { + if (original_mode) |mode| { + _ = bun.windows.SetConsoleMode(bun.win32.STDIN_FD.cast(), mode); + } + }; + + while ('\n' != Output.buffered_stdin.reader().readByte() catch return) {} + + var child = std.process.Child.init(&.{ Open.opener, auth_url }, bun.default_allocator); + _ = child.spawnAndWait() catch return; + } + + fn getOTP( + comptime directory_publish: bool, + ctx: *const Context(directory_publish), + registry: *const Npm.Registry.Scope, + response_buf: *MutableString, + print_buf: *std.ArrayListUnmanaged(u8), + ) GetOTPError![]const u8 { + const res_source = logger.Source.initPathString("???", response_buf.list.items); + + if (JSON.parseUTF8(&res_source, ctx.manager.log, ctx.allocator) catch |err| res_json: { + switch (err) { + error.OutOfMemory => |oom| return oom, + + // https://github.com/npm/cli/blob/63d6a732c3c0e9c19fd4d147eaa5cc27c29b168d/node_modules/npm-registry-fetch/lib/check-response.js#L65 + // invalid json is ignored + else => break :res_json null, + } + }) |json| try_web: { + const auth_url_str = try json.getStringClonedZ(ctx.allocator, "authUrl") orelse break :try_web; + + // important to clone because it belongs to `response_buf`, and `response_buf` will be + // reused with the following requests + const done_url_str = try json.getStringCloned(ctx.allocator, "doneUrl") orelse break :try_web; + const done_url = URL.parse(done_url_str); + + Output.prettyln("\nAuthenticate your account at (press ENTER to open in browser):\n", .{}); + + const offset = 0; + const padding = 1; + + const horizontal = if (Output.enable_ansi_colors) "─" else "-"; + const vertical = if (Output.enable_ansi_colors) "│" else "|"; + const top_left = if (Output.enable_ansi_colors) "┌" else "|"; + const top_right = if (Output.enable_ansi_colors) "┐" else "|"; + const bottom_left = if (Output.enable_ansi_colors) "└" else "|"; + const bottom_right = if (Output.enable_ansi_colors) "┘" else "|"; + + const width = (padding * 2) + auth_url_str.len; + + for (0..offset) |_| Output.print(" ", .{}); + Output.print("{s}", .{top_left}); + for (0..width) |_| Output.print("{s}", .{horizontal}); + Output.println("{s}", .{top_right}); + + for (0..offset) |_| Output.print(" ", .{}); + Output.print("{s}", .{vertical}); + for (0..padding) |_| Output.print(" ", .{}); + Output.pretty("{s}", .{auth_url_str}); + for (0..padding) |_| Output.print(" ", .{}); + Output.println("{s}", .{vertical}); + + for (0..offset) |_| Output.print(" ", .{}); + Output.print("{s}", .{bottom_left}); + for (0..width) |_| Output.print("{s}", .{horizontal}); + Output.println("{s}", .{bottom_right}); + Output.flush(); + + // on another thread because pressing enter is not required + (std.Thread.spawn(.{}, pressEnterToOpenInBrowser, .{auth_url_str}) catch |err| { + Output.err(err, "failed to spawn thread for opening auth url", .{}); + Global.crash(); + }).detach(); + + var auth_headers = try constructPublishHeaders( + ctx.allocator, + print_buf, + registry, + null, + null, + ctx.uses_workspaces, + ctx.manager.options.publish_config.auth_type, + ); + + while (true) { + response_buf.reset(); + + var req = http.AsyncHTTP.initSync( + ctx.allocator, + .GET, + done_url, + auth_headers.entries, + auth_headers.content.ptr.?[0..auth_headers.content.len], + response_buf, + "", + null, + null, + .follow, + ); + + const res = req.sendSync() catch |err| { + switch (err) { + error.OutOfMemory => |oom| return oom, + else => { + Output.err(err, "failed to send OTP request", .{}); + Global.crash(); + }, + } + }; + + switch (res.status_code) { + 202 => { + // retry + const nanoseconds = nanoseconds: { + if (res.headers.get("retry-after")) |retry| default: { + const trimmed = strings.trim(retry, &strings.whitespace_chars); + const seconds = bun.fmt.parseInt(u32, trimmed, 10) catch break :default; + break :nanoseconds seconds * std.time.ns_per_s; + } + + break :nanoseconds 500 * std.time.ns_per_ms; + }; + + std.time.sleep(nanoseconds); + continue; + }, + 200 => { + // login successful + const otp_done_source = logger.Source.initPathString("???", response_buf.list.items); + const otp_done_json = JSON.parseUTF8(&otp_done_source, ctx.manager.log, ctx.allocator) catch |err| { + switch (err) { + error.OutOfMemory => |oom| return oom, + else => { + Output.err("WebLogin", "failed to parse response json", .{}); + Global.crash(); + }, + } + }; + + const token = try otp_done_json.getStringCloned(ctx.allocator, "token") orelse { + Output.err("WebLogin", "missing `token` field in reponse json", .{}); + Global.crash(); + }; + + // https://github.com/npm/cli/blob/534ad7789e5c61f579f44d782bdd18ea3ff1ee20/node_modules/npm-registry-fetch/lib/check-response.js#L14 + // ignore if x-local-cache exists + if (res.headers.getIfOtherIsAbsent("npm-notice", "x-local-cache")) |notice| { + Output.printError("\n", .{}); + Output.note("{s}", .{notice}); + Output.flush(); + } + + return token; + }, + else => { + const otp_response = false; + try Npm.responseError( + ctx.allocator, + &req, + &res, + .{ ctx.package_name, ctx.package_version }, + response_buf, + otp_response, + ); + }, + } + } + } + + // classic + return prompt(ctx.allocator, "\nThis operation requires a one-time password.\nEnter OTP: ", "") catch |err| { + switch (err) { + error.OutOfMemory => |oom| return oom, + else => { + Output.err(err, "failed to read OTP input", .{}); + Global.crash(); + }, + } + }; + } + + pub fn normalizedPackage( + allocator: std.mem.Allocator, + manager: *PackageManager, + package_name: string, + package_version: string, + json: *Expr, + json_source: logger.Source, + shasum: sha.SHA1.Digest, + integrity: sha.SHA512.Digest, + abs_tarball_path: stringZ, + ) OOM!string { + bun.assertWithLocation(json.isObject(), @src()); + + const registry = manager.scopeForPackageName(package_name); + + const version_without_build_tag = Dependency.withoutBuildTag(package_version); + + const integrity_fmt = try std.fmt.allocPrint(allocator, "{}", .{bun.fmt.integrity(integrity, .full)}); + + try json.setString(allocator, "_id", try std.fmt.allocPrint(allocator, "{s}@{s}", .{ package_name, version_without_build_tag })); + try json.setString(allocator, "_integrity", integrity_fmt); + try json.setString(allocator, "_nodeVersion", Environment.reported_nodejs_version); + // TODO: npm version + try json.setString(allocator, "_npmVersion", "10.8.3"); + try json.setString(allocator, "integrity", integrity_fmt); + try json.setString(allocator, "shasum", try std.fmt.allocPrint(allocator, "{s}", .{bun.fmt.bytesToHex(shasum, .lower)})); + + var dist_props = try allocator.alloc(G.Property, 3); + dist_props[0] = .{ + .key = Expr.init( + E.String, + .{ .data = "integrity" }, + logger.Loc.Empty, + ), + .value = Expr.init( + E.String, + .{ .data = try std.fmt.allocPrint(allocator, "{}", .{bun.fmt.integrity(integrity, .full)}) }, + logger.Loc.Empty, + ), + }; + dist_props[1] = .{ + .key = Expr.init( + E.String, + .{ .data = "shasum" }, + logger.Loc.Empty, + ), + .value = Expr.init( + E.String, + .{ .data = try std.fmt.allocPrint(allocator, "{s}", .{bun.fmt.bytesToHex(shasum, .lower)}) }, + logger.Loc.Empty, + ), + }; + dist_props[2] = .{ + .key = Expr.init( + E.String, + .{ .data = "tarball" }, + logger.Loc.Empty, + ), + .value = Expr.init( + E.String, + .{ + .data = try bun.fmt.allocPrint(allocator, "http://{s}/{s}/-/{s}", .{ + strings.withoutTrailingSlash(registry.url.href), + package_name, + std.fs.path.basename(abs_tarball_path), + }), + }, + logger.Loc.Empty, + ), + }; + + try json.set(allocator, "dist", Expr.init( + E.Object, + .{ .properties = G.Property.List.init(dist_props) }, + logger.Loc.Empty, + )); + + { + const workspace_root = bun.sys.openA( + strings.withoutSuffixComptime(manager.original_package_json_path, "package.json"), + bun.O.DIRECTORY, + 0, + ).unwrap() catch |err| { + Output.err(err, "failed to open workspace directory", .{}); + Global.crash(); + }; + defer _ = bun.sys.close(workspace_root); + + try normalizeBin( + allocator, + json, + package_name, + workspace_root, + ); + } + + const buffer_writer = try bun.js_printer.BufferWriter.init(allocator); + var writer = bun.js_printer.BufferPrinter.init(buffer_writer); + + const written = bun.js_printer.printJSON( + @TypeOf(&writer), + &writer, + json.*, + &json_source, + .{ + .minify_whitespace = true, + }, + ) catch |err| { + switch (err) { + error.OutOfMemory => |oom| return oom, + else => { + Output.errGeneric("failed to print normalized package.json: {s}", .{@errorName(err)}); + Global.crash(); + }, + } + }; + _ = written; + + return writer.ctx.writtenWithoutTrailingZero(); + } + + fn normalizeBin( + allocator: std.mem.Allocator, + json: *Expr, + package_name: string, + workspace_root: bun.FileDescriptor, + ) OOM!void { + var path_buf: bun.PathBuffer = undefined; + if (json.asProperty("bin")) |bin_query| { + switch (bin_query.expr.data) { + .e_string => |bin_str| { + var bin_props = std.ArrayList(G.Property).init(allocator); + const normalized = strings.withoutPrefixComptimeZ( + path.normalizeBufZ( + try bin_str.string(allocator), + &path_buf, + .posix, + ), + "./", + ); + if (!bun.sys.existsAt(workspace_root, normalized)) { + Output.warn("bin '{s}' does not exist", .{normalized}); + } + + try bin_props.append(.{ + .key = Expr.init( + E.String, + .{ .data = package_name }, + logger.Loc.Empty, + ), + .value = Expr.init( + E.String, + .{ .data = try allocator.dupe(u8, normalized) }, + logger.Loc.Empty, + ), + }); + + json.data.e_object.properties.ptr[bin_query.i].value = Expr.init( + E.Object, + .{ + .properties = G.Property.List.fromList(bin_props), + }, + logger.Loc.Empty, + ); + }, + .e_object => |bin_obj| { + var bin_props = std.ArrayList(G.Property).init(allocator); + for (bin_obj.properties.slice()) |bin_prop| { + const key = key: { + if (bin_prop.key) |key| { + if (key.isString() and key.data.e_string.len() != 0) { + break :key try allocator.dupeZ( + u8, + strings.withoutPrefixComptime( + path.normalizeBuf( + try key.data.e_string.string(allocator), + &path_buf, + .posix, + ), + "./", + ), + ); + } + } + + continue; + }; + + if (key.len == 0) { + continue; + } + + const value = value: { + if (bin_prop.value) |value| { + if (value.isString() and value.data.e_string.len() != 0) { + break :value try allocator.dupeZ( + u8, + strings.withoutPrefixComptimeZ( + // replace separators + path.normalizeBufZ( + try value.data.e_string.string(allocator), + &path_buf, + .posix, + ), + "./", + ), + ); + } + } + + continue; + }; + if (value.len == 0) { + continue; + } + + if (!bun.sys.existsAt(workspace_root, value)) { + Output.warn("bin '{s}' does not exist", .{value}); + } + + try bin_props.append(.{ + .key = Expr.init( + E.String, + .{ .data = key }, + logger.Loc.Empty, + ), + .value = Expr.init( + E.String, + .{ .data = value }, + logger.Loc.Empty, + ), + }); + } + + json.data.e_object.properties.ptr[bin_query.i].value = Expr.init( + E.Object, + .{ .properties = G.Property.List.fromList(bin_props) }, + logger.Loc.Empty, + ); + }, + else => {}, + } + } else if (json.asProperty("directories")) |directories_query| { + if (directories_query.expr.asProperty("bin")) |bin_query| { + const bin_dir_str = bin_query.expr.asString(allocator) orelse { + return; + }; + var bin_props = std.ArrayList(G.Property).init(allocator); + const normalized_bin_dir = try allocator.dupeZ( + u8, + strings.withoutTrailingSlash( + strings.withoutPrefixComptime( + path.normalizeBuf( + bin_dir_str, + &path_buf, + .posix, + ), + "./", + ), + ), + ); + + if (normalized_bin_dir.len == 0) { + return; + } + + const bin_dir = bun.sys.openat(workspace_root, normalized_bin_dir, bun.O.DIRECTORY, 0).unwrap() catch |err| { + if (err == error.ENOENT) { + Output.warn("bin directory '{s}' does not exist", .{normalized_bin_dir}); + return; + } else { + Output.err(err, "failed to open bin directory: '{s}'", .{normalized_bin_dir}); + Global.crash(); + } + }; + + var dirs: std.ArrayListUnmanaged(struct { std.fs.Dir, string, bool }) = .{}; + defer dirs.deinit(allocator); + + try dirs.append(allocator, .{ bin_dir.asDir(), normalized_bin_dir, false }); + + while (dirs.popOrNull()) |dir_info| { + var dir, const dir_subpath, const close_dir = dir_info; + defer if (close_dir) dir.close(); + + var iter = bun.DirIterator.iterate(dir, .u8); + while (iter.next().unwrap() catch null) |entry| { + const name, const subpath = name_and_subpath: { + const name = entry.name.slice(); + const join = try bun.fmt.allocPrintZ(allocator, "{s}{s}{s}", .{ + dir_subpath, + // only using posix separators + if (dir_subpath.len == 0) "" else std.fs.path.sep_str_posix, + strings.withoutTrailingSlash(name), + }); + + break :name_and_subpath .{ join[join.len - name.len ..][0..name.len :0], join }; + }; + + if (name.len == 0 or (name.len == 1 and name[0] == '.') or (name.len == 2 and name[0] == '.' and name[1] == '.')) { + continue; + } + + try bin_props.append(.{ + .key = Expr.init( + E.String, + .{ .data = std.fs.path.basenamePosix(subpath) }, + logger.Loc.Empty, + ), + .value = Expr.init( + E.String, + .{ .data = subpath }, + logger.Loc.Empty, + ), + }); + + if (entry.kind == .directory) { + const subdir = dir.openDirZ(name, .{ .iterate = true }) catch { + continue; + }; + try dirs.append(allocator, .{ subdir, subpath, true }); + } + } + } + + try json.set(allocator, "bin", Expr.init(E.Object, .{ .properties = G.Property.List.fromList(bin_props) }, logger.Loc.Empty)); + } + } + + // no bins + } + + fn constructPublishHeaders( + allocator: std.mem.Allocator, + print_buf: *std.ArrayListUnmanaged(u8), + registry: *const Npm.Registry.Scope, + maybe_json_len: ?usize, + maybe_otp: ?[]const u8, + uses_workspaces: bool, + auth_type: ?PackageManager.Options.AuthType, + ) OOM!http.HeaderBuilder { + var print_writer = print_buf.writer(allocator); + var headers: http.HeaderBuilder = .{}; + const npm_auth_type = if (maybe_otp == null) + if (auth_type) |auth| @tagName(auth) else "web" + else + "legacy"; + const ci_name = bun.detectCI(); + + { + headers.count("accept", "*/*"); + headers.count("accept-encoding", "gzip,deflate"); + + if (registry.token.len > 0) { + try print_writer.print("Bearer {s}", .{registry.token}); + headers.count("authorization", print_buf.items); + print_buf.clearRetainingCapacity(); + } else if (registry.auth.len > 0) { + try print_writer.print("Basic {s}", .{registry.auth}); + headers.count("authorization", print_buf.items); + print_buf.clearRetainingCapacity(); + } + + if (maybe_json_len != null) { + // not using `MimeType.json.value`, verdaccio will fail if it's anything other than `application/json` + headers.count("content-type", "application/json"); + } + + headers.count("npm-auth-type", npm_auth_type); + if (maybe_otp) |otp| { + headers.count("npm-otp", otp); + } + headers.count("npm-command", "publish"); + + try print_writer.print("{s} {s} {s} workspaces/{}{s}{s}", .{ + Global.user_agent, + Global.os_name, + Global.arch_name, + uses_workspaces, + if (ci_name != null) " ci/" else "", + ci_name orelse "", + }); + // headers.count("user-agent", "npm/10.8.3 node/v22.6.0 darwin arm64 workspaces/false"); + headers.count("user-agent", print_buf.items); + print_buf.clearRetainingCapacity(); + + headers.count("Connection", "keep-alive"); + headers.count("Host", registry.url.host); + + if (maybe_json_len) |json_len| { + try print_writer.print("{d}", .{json_len}); + headers.count("Content-Length", print_buf.items); + print_buf.clearRetainingCapacity(); + } + } + + try headers.allocate(allocator); + + { + headers.append("accept", "*/*"); + headers.append("accept-encoding", "gzip,deflate"); + + if (registry.token.len > 0) { + try print_writer.print("Bearer {s}", .{registry.token}); + headers.append("authorization", print_buf.items); + print_buf.clearRetainingCapacity(); + } else if (registry.auth.len > 0) { + try print_writer.print("Basic {s}", .{registry.auth}); + headers.append("authorization", print_buf.items); + print_buf.clearRetainingCapacity(); + } + + if (maybe_json_len != null) { + // not using `MimeType.json.value`, verdaccio will fail if it's anything other than `application/json` + headers.append("content-type", "application/json"); + } + + headers.append("npm-auth-type", npm_auth_type); + if (maybe_otp) |otp| { + headers.append("npm-otp", otp); + } + headers.append("npm-command", "publish"); + + try print_writer.print("{s} {s} {s} workspaces/{}{s}{s}", .{ + Global.user_agent, + Global.os_name, + Global.arch_name, + uses_workspaces, + if (ci_name != null) " ci/" else "", + ci_name orelse "", + }); + // headers.append("user-agent", "npm/10.8.3 node/v22.6.0 darwin arm64 workspaces/false"); + headers.append("user-agent", print_buf.items); + print_buf.clearRetainingCapacity(); + + headers.append("Connection", "keep-alive"); + headers.append("Host", registry.url.host); + + if (maybe_json_len) |json_len| { + try print_writer.print("{d}", .{json_len}); + headers.append("Content-Length", print_buf.items); + print_buf.clearRetainingCapacity(); + } + } + + return headers; + } + + fn constructPublishRequestBody( + comptime directory_publish: bool, + ctx: *const Context(directory_publish), + ) OOM![]const u8 { + const tag = if (ctx.manager.options.publish_config.tag.len > 0) + ctx.manager.options.publish_config.tag + else + "latest"; + + const encoded_tarball_len = std.base64.standard.Encoder.calcSize(ctx.tarball_bytes.len); + const version_without_build_tag = Dependency.withoutBuildTag(ctx.package_version); + + var buf = try std.ArrayListUnmanaged(u8).initCapacity( + ctx.allocator, + ctx.package_name.len * 5 + + version_without_build_tag.len * 4 + + ctx.abs_tarball_path.len + + encoded_tarball_len, + ); + var writer = buf.writer(ctx.allocator); + + try writer.print("{{\"_id\":\"{s}\",\"name\":\"{s}\"", .{ + ctx.package_name, + ctx.package_name, + }); + + try writer.print(",\"dist-tags\":{{\"{s}\":\"{s}\"}}", .{ + tag, + version_without_build_tag, + }); + + // "versions" + { + try writer.print(",\"versions\":{{\"{s}\":{s}}}", .{ + version_without_build_tag, + ctx.normalized_pkg_info, + }); + } + + if (ctx.manager.options.publish_config.access) |access| { + try writer.print(",\"access\":\"{s}\"", .{@tagName(access)}); + } else { + try writer.writeAll(",\"access\":null"); + } + + // "_attachments" + { + try writer.print(",\"_attachments\":{{\"{s}\":{{\"content_type\":\"{s}\",\"data\":\"", .{ + std.fs.path.basename(ctx.abs_tarball_path), + "application/octet-stream", + }); + + try buf.ensureUnusedCapacity(ctx.allocator, encoded_tarball_len); + buf.items.len += encoded_tarball_len; + const count = bun.simdutf.base64.encode(ctx.tarball_bytes, buf.items[buf.items.len - encoded_tarball_len ..], false); + bun.assertWithLocation(count == encoded_tarball_len, @src()); + + try writer.print("\",\"length\":{d}}}}}}}", .{ + ctx.tarball_bytes.len, + }); + } + + return buf.items; + } +}; diff --git a/src/cli/run_command.zig b/src/cli/run_command.zig index 5e8abbe276..3267b38de8 100644 --- a/src/cli/run_command.zig +++ b/src/cli/run_command.zig @@ -13,6 +13,7 @@ const std = @import("std"); const uws = bun.uws; const JSC = bun.JSC; const WaiterThread = JSC.Subprocess.WaiterThread; +const OOM = bun.OOM; const lex = bun.js_lexer; const logger = bun.logger; @@ -139,7 +140,7 @@ pub const RunCommand = struct { pub inline fn replacePackageManagerRun( copy_script: *std.ArrayList(u8), script: string, - ) !void { + ) OOM!void { var entry_i: usize = 0; var delimiter: u8 = ' '; @@ -193,7 +194,6 @@ pub const RunCommand = struct { delimiter = 0; }, - // do we need to escape? ' ' => { delimiter = ' '; }, @@ -235,24 +235,6 @@ pub const RunCommand = struct { delimiter = 0; }, - // TODO: handle escape sequences properly - // https://github.com/oven-sh/bun/issues/53 - '\\' => { - delimiter = 0; - - if (entry_i + 1 < script.len) { - switch (script[entry_i + 1]) { - '"', '\'' => { - entry_i += 1; - continue; - }, - '\\' => { - entry_i += 1; - }, - else => {}, - } - } - }, else => { delimiter = 0; }, @@ -265,7 +247,7 @@ pub const RunCommand = struct { const log = Output.scoped(.RUN, false); - fn runPackageScriptForeground( + pub fn runPackageScriptForeground( ctx: Command.Context, allocator: std.mem.Allocator, original_script: string, @@ -275,46 +257,37 @@ pub const RunCommand = struct { passthrough: []const string, silent: bool, use_system_shell: bool, - ) !bool { + ) !void { const shell_bin = findShell(env.get("PATH") orelse "", cwd) orelse return error.MissingShell; - const script = original_script; - var copy_script = try std.ArrayList(u8).initCapacity(allocator, script.len); + var copy_script_capacity: usize = original_script.len; + for (passthrough) |part| copy_script_capacity += 1 + part.len; + var copy_script = try std.ArrayList(u8).initCapacity(allocator, copy_script_capacity); // We're going to do this slowly. // Find exact matches of yarn, pnpm, npm - try replacePackageManagerRun(©_script, script); + try replacePackageManagerRun(©_script, original_script); - var combined_script: []u8 = copy_script.items; - - log("Script: \"{s}\"", .{combined_script}); - - if (passthrough.len > 0) { - var combined_script_len = script.len; - for (passthrough) |p| { - combined_script_len += p.len + 1; + for (passthrough) |part| { + try copy_script.append(' '); + if (bun.shell.needsEscapeUtf8AsciiLatin1(part)) { + try bun.shell.escape8Bit(part, ©_script, true); + } else { + try copy_script.appendSlice(part); } - var combined_script_buf = try allocator.alloc(u8, combined_script_len); - bun.copy(u8, combined_script_buf, script); - var remaining_script_buf = combined_script_buf[script.len..]; - for (passthrough) |part| { - const p = part; - remaining_script_buf[0] = ' '; - bun.copy(u8, remaining_script_buf[1..], p); - remaining_script_buf = remaining_script_buf[p.len + 1 ..]; - } - combined_script = combined_script_buf; + } + + log("Script: \"{s}\"", .{copy_script.items}); + + if (!silent) { + Output.prettyErrorln("$ {s}", .{copy_script.items}); + Output.flush(); } if (!use_system_shell) { - if (!silent) { - Output.prettyErrorln("$ {s}", .{combined_script}); - Output.flush(); - } - const mini = bun.JSC.MiniEventLoop.initGlobal(env); - const code = bun.shell.Interpreter.initAndRunFromSource(ctx, mini, name, combined_script) catch |err| { + const code = bun.shell.Interpreter.initAndRunFromSource(ctx, mini, name, copy_script.items) catch |err| { if (!silent) { Output.prettyErrorln("error: Failed to run script {s} due to error {s}", .{ name, @errorName(err) }); } @@ -331,19 +304,20 @@ pub const RunCommand = struct { Global.exit(code); } - return true; + return; } const argv = [_]string{ shell_bin, if (Environment.isWindows) "/c" else "-c", - combined_script, + copy_script.items, }; - if (!silent) { - Output.prettyErrorln("$ {s}", .{combined_script}); - Output.flush(); - } + const ipc_fd = if (!Environment.isWindows) blk: { + const node_ipc_fd = bun.getenvZ("NODE_CHANNEL_FD") orelse break :blk null; + const fd = std.fmt.parseInt(u32, node_ipc_fd, 10) catch break :blk null; + break :blk bun.toFD(@as(i32, @intCast(fd))); + } else null; // TODO: implement on Windows const spawn_result = switch ((bun.spawnSync(&.{ .argv = &argv, @@ -357,6 +331,7 @@ pub const RunCommand = struct { .stderr = .inherit, .stdout = .inherit, .stdin = .inherit, + .ipc = ipc_fd, .windows = if (Environment.isWindows) .{ .loop = JSC.EventLoopHandle.init(JSC.MiniEventLoop.initGlobal(env)), @@ -367,7 +342,7 @@ pub const RunCommand = struct { } Output.flush(); - return true; + return; })) { .err => |err| { if (!silent) { @@ -375,7 +350,7 @@ pub const RunCommand = struct { } Output.flush(); - return true; + return; }, .result => |result| result, }; @@ -414,13 +389,13 @@ pub const RunCommand = struct { } Output.flush(); - return true; + return; }, else => {}, } - return true; + return; } /// When printing error messages from 'bun run', attribute bun overridden node.js to bun @@ -838,20 +813,12 @@ pub const RunCommand = struct { const root_dir_info = this_bundler.resolver.readDirInfo(this_bundler.fs.top_level_dir) catch |err| { if (!log_errors) return error.CouldntReadCurrentDirectory; - if (Output.enable_ansi_colors) { - ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), true) catch {}; - } else { - ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), false) catch {}; - } + ctx.log.print(Output.errorWriter()) catch {}; Output.prettyErrorln("error: {s} loading directory {}", .{ @errorName(err), bun.fmt.QuotedFormatter{ .text = this_bundler.fs.top_level_dir } }); Output.flush(); return err; } orelse { - if (Output.enable_ansi_colors) { - ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), true) catch {}; - } else { - ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), false) catch {}; - } + ctx.log.print(Output.errorWriter()) catch {}; Output.prettyErrorln("error loading current directory", .{}); Output.flush(); return error.CouldntReadCurrentDirectory; @@ -1302,7 +1269,7 @@ pub const RunCommand = struct { Run.boot(ctx, ".") catch |err| { bun.handleErrorReturnTrace(err, @errorReturnTrace()); - ctx.log.printForLogLevel(Output.errorWriter()) catch {}; + ctx.log.print(Output.errorWriter()) catch {}; Output.prettyErrorln("error: Failed to run {s} due to error {s}", .{ script_name_to_search, @@ -1397,7 +1364,7 @@ pub const RunCommand = struct { Run.boot(ctx, out_path) catch |err| { bun.handleErrorReturnTrace(err, @errorReturnTrace()); - ctx.log.printForLogLevel(Output.errorWriter()) catch {}; + ctx.log.print(Output.errorWriter()) catch {}; Output.prettyErrorln("error: Failed to run {s} due to error {s}", .{ std.fs.path.basename(file_path), @@ -1441,7 +1408,7 @@ pub const RunCommand = struct { defer ctx.allocator.free(temp_script_buffer); if (scripts.get(temp_script_buffer[1..])) |prescript| { - if (!try runPackageScriptForeground( + try runPackageScriptForeground( ctx, ctx.allocator, prescript, @@ -1451,12 +1418,10 @@ pub const RunCommand = struct { &.{}, ctx.debug.silent, ctx.debug.use_system_shell, - )) { - return false; - } + ); } - if (!try runPackageScriptForeground( + try runPackageScriptForeground( ctx, ctx.allocator, script_content, @@ -1466,12 +1431,12 @@ pub const RunCommand = struct { passthrough, ctx.debug.silent, ctx.debug.use_system_shell, - )) return false; + ); temp_script_buffer[0.."post".len].* = "post".*; if (scripts.get(temp_script_buffer)) |postscript| { - if (!try runPackageScriptForeground( + try runPackageScriptForeground( ctx, ctx.allocator, postscript, @@ -1481,9 +1446,7 @@ pub const RunCommand = struct { &.{}, ctx.debug.silent, ctx.debug.use_system_shell, - )) { - return false; - } + ); } return true; @@ -1498,7 +1461,7 @@ pub const RunCommand = struct { Run.boot(ctx, ctx.allocator.dupe(u8, script_name_to_search) catch unreachable) catch |err| { bun.handleErrorReturnTrace(err, @errorReturnTrace()); - ctx.log.printForLogLevel(Output.errorWriter()) catch {}; + ctx.log.print(Output.errorWriter()) catch {}; Output.prettyErrorln("error: Failed to run {s} due to error {s}", .{ std.fs.path.basename(script_name_to_search), @@ -1524,7 +1487,7 @@ pub const RunCommand = struct { const entry_path = entry_point_buf[0 .. cwd.len + trigger.len]; Run.boot(ctx, ctx.allocator.dupe(u8, entry_path) catch return false) catch |err| { - ctx.log.printForLogLevel(Output.errorWriter()) catch {}; + ctx.log.print(Output.errorWriter()) catch {}; Output.prettyErrorln("error: Failed to run {s} due to error {s}", .{ std.fs.path.basename(script_name_to_search), @@ -1641,7 +1604,7 @@ pub const RunCommand = struct { }; Run.boot(ctx, normalized_filename) catch |err| { - ctx.log.printForLogLevel(Output.errorWriter()) catch {}; + ctx.log.print(Output.errorWriter()) catch {}; Output.err(err, "Failed to run script \"{s}\"", .{std.fs.path.basename(normalized_filename)}); Global.exit(1); @@ -1714,7 +1677,7 @@ pub const BunXFastPath = struct { wpath, ) catch return; Run.boot(ctx, utf8) catch |err| { - ctx.log.printForLogLevel(Output.errorWriter()) catch {}; + ctx.log.print(Output.errorWriter()) catch {}; Output.err(err, "Failed to run bin \"{s}\"", .{std.fs.path.basename(utf8)}); Global.exit(1); }; diff --git a/src/cli/test_command.zig b/src/cli/test_command.zig index e0acfbd658..451f1e2a52 100644 --- a/src/cli/test_command.zig +++ b/src/cli/test_command.zig @@ -729,7 +729,7 @@ pub const TestCommand = struct { Output.is_github_action = Output.isGithubAction(); // print the version so you know its doing stuff if it takes a sec - Output.prettyErrorln("bun test v" ++ Global.package_json_version_with_sha ++ "", .{}); + Output.prettyln("bun test v" ++ Global.package_json_version_with_sha ++ "", .{}); Output.flush(); var env_loader = brk: { @@ -741,7 +741,7 @@ pub const TestCommand = struct { break :brk loader; }; bun.JSC.initialize(false); - HTTPThread.init(); + HTTPThread.init(&.{}); var snapshot_file_buf = std.ArrayList(u8).init(ctx.allocator); var snapshot_values = Snapshots.ValuesHashMap.init(ctx.allocator); @@ -1161,11 +1161,7 @@ pub const TestCommand = struct { js_ast.Stmt.Data.Store.reset(); if (vm.log.errors > 0) { - if (Output.enable_ansi_colors) { - vm.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), true) catch {}; - } else { - vm.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), false) catch {}; - } + vm.log.print(Output.errorWriter()) catch {}; vm.log.msgs.clearRetainingCapacity(); vm.log.errors = 0; } @@ -1173,6 +1169,10 @@ pub const TestCommand = struct { Output.flush(); } + // Restore test.only state after each module. + const prev_only = reporter.jest.only; + defer reporter.jest.only = prev_only; + const file_start = reporter.jest.files.len; const resolution = try vm.bundler.resolveEntryPoint(file_name); vm.clearEntryPoint(); @@ -1202,13 +1202,14 @@ pub const TestCommand = struct { reporter.summary.files += 1; switch (promise.status(vm.global.vm())) { - .Rejected => { + .rejected => { _ = vm.unhandledRejection(vm.global, promise.result(vm.global.vm()), promise.asValue()); reporter.summary.fail += 1; if (reporter.jest.bail == reporter.summary.fail) { reporter.printSummary(); Output.prettyError("\nBailed out after {d} failure{s}\n", .{ reporter.jest.bail, if (reporter.jest.bail == 1) "" else "s" }); + Global.exit(1); } @@ -1250,7 +1251,7 @@ pub const TestCommand = struct { if (!jest.Jest.runner.?.has_pending_tests) break; vm.eventLoop().tick(); } else { - vm.eventLoop().tickImmediateTasks(); + vm.eventLoop().tickImmediateTasks(vm); } while (prev_unhandled_count < vm.unhandled_error_counter) { @@ -1283,6 +1284,10 @@ pub const TestCommand = struct { Output.prettyErrorln("\n::endgroup::\n", .{}); Output.flush(); } + + // Ensure these never linger across files. + vm.auto_killer.clear(); + vm.auto_killer.disable(); } if (is_last) { diff --git a/src/cli/upgrade_command.zig b/src/cli/upgrade_command.zig index c5fca9ef54..f399e12607 100644 --- a/src/cli/upgrade_command.zig +++ b/src/cli/upgrade_command.zig @@ -30,7 +30,7 @@ const bundler = bun.bundler; const fs = @import("../fs.zig"); const URL = @import("../url.zig").URL; const HTTP = bun.http; -const ParseJSON = @import("../json_parser.zig").ParseJSONUTF8; +const JSON = bun.JSON; const Archive = @import("../libarchive/libarchive.zig").Archive; const Zlib = @import("../zlib.zig"); const JSPrinter = bun.js_printer; @@ -133,7 +133,7 @@ pub const UpgradeCheckerThread = struct { std.time.sleep(std.time.ns_per_ms * delay); Output.Source.configureThread(); - HTTP.HTTPThread.init(); + HTTP.HTTPThread.init(&.{}); defer { js_ast.Expr.Data.Store.deinit(); @@ -251,7 +251,7 @@ pub const UpgradeCommand = struct { async_http.client.flags.reject_unauthorized = env_loader.getTLSRejectUnauthorized(); if (!silent) async_http.client.progress_node = progress.?; - const response = try async_http.sendSync(true); + const response = try async_http.sendSync(); switch (response.status_code) { 404 => return error.HTTP404, @@ -266,17 +266,13 @@ pub const UpgradeCommand = struct { defer if (comptime silent) log.deinit(); var source = logger.Source.initPathString("releases.json", metadata_body.list.items); initializeStore(); - var expr = ParseJSON(&source, &log, allocator) catch |err| { + var expr = JSON.parseUTF8(&source, &log, allocator) catch |err| { if (!silent) { progress.?.end(); refresher.?.refresh(); if (log.errors > 0) { - if (Output.enable_ansi_colors) { - try log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), true); - } else { - try log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), false); - } + try log.print(Output.errorWriter()); Global.exit(1); } else { @@ -293,11 +289,7 @@ pub const UpgradeCommand = struct { progress.?.end(); refresher.?.refresh(); - if (Output.enable_ansi_colors) { - try log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), true); - } else { - try log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), false); - } + try log.print(Output.errorWriter()); Global.exit(1); } @@ -440,7 +432,7 @@ pub const UpgradeCommand = struct { } fn _exec(ctx: Command.Context) !void { - HTTP.HTTPThread.init(); + HTTP.HTTPThread.init(&.{}); var filesystem = try fs.FileSystem.init(null); var env_loader: DotEnv.Loader = brk: { @@ -533,7 +525,7 @@ pub const UpgradeCommand = struct { async_http.client.progress_node = progress; async_http.client.flags.reject_unauthorized = env_loader.getTLSRejectUnauthorized(); - const response = try async_http.sendSync(true); + const response = try async_http.sendSync(); switch (response.status_code) { 404 => { @@ -559,7 +551,7 @@ pub const UpgradeCommand = struct { else => return error.HTTPError, } - const bytes = zip_file_buffer.toOwnedSliceLeaky(); + const bytes = zip_file_buffer.slice(); progress.end(); refresher.refresh(); @@ -996,7 +988,7 @@ pub const upgrade_js_bindings = struct { /// For testing upgrades when the temp directory has an open handle without FILE_SHARE_DELETE. /// Windows only - pub fn jsOpenTempDirWithoutSharingDelete(_: *JSC.JSGlobalObject, _: *JSC.CallFrame) bun.JSC.JSValue { + pub fn jsOpenTempDirWithoutSharingDelete(_: *JSC.JSGlobalObject, _: *JSC.CallFrame) bun.JSError!bun.JSC.JSValue { if (comptime !Environment.isWindows) return .undefined; const w = std.os.windows; @@ -1050,7 +1042,7 @@ pub const upgrade_js_bindings = struct { return .undefined; } - pub fn jsCloseTempDirHandle(_: *JSC.JSGlobalObject, _: *JSC.CallFrame) JSValue { + pub fn jsCloseTempDirHandle(_: *JSC.JSGlobalObject, _: *JSC.CallFrame) bun.JSError!JSValue { if (comptime !Environment.isWindows) return .undefined; if (tempdir_fd) |fd| { diff --git a/src/codegen/bake-codegen.ts b/src/codegen/bake-codegen.ts new file mode 100644 index 0000000000..bd8a6b8edd --- /dev/null +++ b/src/codegen/bake-codegen.ts @@ -0,0 +1,203 @@ +import assert from "node:assert"; +import { existsSync, writeFileSync, rmSync, readFileSync } from "node:fs"; +import { watch } from "node:fs/promises"; +import { basename, join } from "node:path"; + +// arg parsing +const options = {}; +for (const arg of process.argv.slice(2)) { + if (!arg.startsWith("--")) { + console.error("Unknown argument " + arg); + process.exit(1); + } + const split = arg.split("="); + const value = split[1] || "true"; + options[split[0].slice(2)] = value; +} + +let { codegen_root, debug, live } = options as any; +if (!codegen_root) { + console.error("Missing --codegen_root=..."); + process.exit(1); +} +if (debug === "false" || debug === "0" || debug == "OFF") debug = false; + +const base_dir = join(import.meta.dirname, "../bake"); +process.chdir(base_dir); // to make bun build predictable in development + +function convertZigEnum(zig: string) { + const startTrigger = "\npub const MessageId = enum(u8) {"; + const start = zig.indexOf(startTrigger) + startTrigger.length; + const endTrigger = /\n pub (inline )?fn |\n};/g; + const end = zig.slice(start).search(endTrigger) + start; + const enumText = zig.slice(start, end); + const values = enumText.replaceAll("\n ", "\n ").replace(/\n\s*(\w+)\s*=\s*'(.+?)',/g, (_, name, value) => { + return `\n ${name} = ${value.charCodeAt(0)},`; + }); + return `/** Generated from DevServer.zig */\nexport const enum MessageId {${values}}`; +} + +async function run() { + const devServerZig = readFileSync(join(base_dir, "DevServer.zig"), "utf-8"); + writeFileSync(join(base_dir, "generated.ts"), convertZigEnum(devServerZig)); + + const results = await Promise.allSettled( + ["client", "server", "error"].map(async file => { + const side = file === "error" ? "client" : file; + let result = await Bun.build({ + entrypoints: [join(base_dir, `hmr-runtime-${file}.ts`)], + define: { + side: JSON.stringify(side), + IS_BUN_DEVELOPMENT: String(!!debug), + }, + minify: { + syntax: true, + }, + target: side === 'server' ? 'bun' : 'browser', + }); + if (!result.success) throw new AggregateError(result.logs); + assert(result.outputs.length === 1, "must bundle to a single file"); + // @ts-ignore + let code = await result.outputs[0].text(); + + // A second pass is used to convert global variables into parameters, while + // allowing for renaming to properly function when minification is enabled. + const in_names = [ + file !== "error" && "input_graph", + file !== "error" && "config", + file === "server" && "server_exports", + file === "server" && "$separateSSRGraph", + file === "server" && "$importMeta", + ].filter(Boolean); + const combined_source = + file === "error" + ? code + : ` + __marker__; + ${in_names.length > 0 ? "let" : ""} ${in_names.join(",")}; + __marker__(${in_names.join(",")}); + ${code}; + `; + const generated_entrypoint = join(base_dir, `.runtime-${file}.generated.ts`); + + writeFileSync(generated_entrypoint, combined_source); + + result = await Bun.build({ + entrypoints: [generated_entrypoint], + minify: { + syntax: true, + whitespace: !debug, + identifiers: !debug, + }, + }); + if (!result.success) throw new AggregateError(result.logs); + assert(result.outputs.length === 1, "must bundle to a single file"); + code = (await result.outputs[0].text()).replace(`// ${basename(generated_entrypoint)}`, "").trim(); + + rmSync(generated_entrypoint); + + if (code.includes('export default ')) { + throw new AggregateError([new Error('export default is not allowed in bake codegen. this became a commonjs module!')]); + } + + if (file !== "error") { + let names: string = ""; + code = code + .replace(/(\n?)\s*__marker__.*__marker__\((.+?)\);\s*/s, (_, n, captured) => { + names = captured; + return n; + }) + .trim(); + assert(names, "missing name"); + const split_names = names.split(",").map(x => x.trim()); + const out_names = Object.fromEntries(in_names.map((x, i) => [x, split_names[i]])); + function outName(name) { + if (!out_names[name]) throw new Error(`missing out name for ${name}`); + return out_names[name]; + } + + if (debug) { + code = "\n " + code.replace(/\n/g, "\n ") + "\n"; + } + + if (code[code.length - 1] === ";") code = code.slice(0, -1); + + if (side === "server") { + code = debug + ? `${code} return ${outName('server_exports')};\n` + : `${code};return ${outName('server_exports')};`; + + const params = `${outName('$separateSSRGraph')},${outName('$importMeta')}`; + code = code.replaceAll('import.meta', outName('$importMeta')); + code = `let ${outName('input_graph')}={},${outName('config')}={separateSSRGraph:${outName('$separateSSRGraph')}},${outName('server_exports')};${code}`; + + code = debug ? `((${params}) => {${code}})\n` : `((${params})=>{${code}})\n`; + } else { + code = debug ? `((${names}) => {${code}})({\n` : `((${names})=>{${code}})({`; + } + } + + writeFileSync(join(codegen_root, `bake.${file}.js`), code); + }), + ); + + // print failures in a de-duplicated fashion. + interface Err { + kind: ("client" | "server" | "error")[]; + err: any; + } + const failed = [ + { kind: ["client"], result: results[0] }, + { kind: ["server"], result: results[1] }, + { kind: ["error"], result: results[2] }, + ] + .filter(x => x.result.status === "rejected") + .map(x => ({ kind: x.kind, err: x.result.reason })) as Err[]; + if (failed.length > 0) { + const flattened_errors: Err[] = []; + for (const { kind, err } of failed) { + if (err instanceof AggregateError) { + flattened_errors.push(...err.errors.map(err => ({ kind, err }))); + } + flattened_errors.push({ kind, err }); + } + for (let i = 0; i < flattened_errors.length; i++) { + const x = flattened_errors[i]; + if (!x.err?.message) continue; + for (const other of flattened_errors.slice(0, i)) { + if (other.err?.message === x.err.message || other.err.stack === x.err.stack) { + other.kind = [...x.kind, ...other.kind]; + flattened_errors.splice(i, 1); + i -= 1; + continue; + } + } + } + for (const { kind, err } of flattened_errors) { + const map = { error: "error runtime", client: "client runtime", server: "server runtime" }; + console.error(`Errors while bundling Bake ${kind.map(x => map[x]).join(" and ")}:`); + console.error(err); + } + if (!live) process.exit(1); + } else { + console.log("-> bake.client.js, bake.server.js, bake.error.js"); + + const empty_file = join(codegen_root, "bake_empty_file"); + if (!existsSync(empty_file)) writeFileSync(empty_file, "this is used to fulfill a cmake dependency"); + } +} + +await run(); + +if (live) { + const watcher = watch(base_dir, { recursive: true }) as any; + for await (const event of watcher) { + if (event.filename.endsWith(".zig")) continue; + if (event.filename.startsWith(".")) continue; + try { + await run(); + } catch (e) { + console.log(e); + } + } +} diff --git a/src/codegen/buildTypeFlag.ts b/src/codegen/buildTypeFlag.ts new file mode 100644 index 0000000000..78800cbe3d --- /dev/null +++ b/src/codegen/buildTypeFlag.ts @@ -0,0 +1,18 @@ +const buildTypeFlag = process.argv.find(argv => { + if (argv.startsWith("--build-type=")) { + return argv; + } +}); + +enum BuildType { + debug, + release, +} + +if (buildTypeFlag) { + process.argv.splice(process.argv.indexOf(buildTypeFlag), 1); +} + +let buildType = buildTypeFlag ? BuildType[buildTypeFlag.split("=")[1].toLowerCase()] : BuildType.release; + +export { BuildType, buildType }; diff --git a/src/codegen/bundle-functions.ts b/src/codegen/bundle-functions.ts index 461adda7be..7756e44afd 100644 --- a/src/codegen/bundle-functions.ts +++ b/src/codegen/bundle-functions.ts @@ -44,6 +44,7 @@ interface ParsedBuiltin { directives: Record; source: string; async: boolean; + enums: string[]; } interface BundledBuiltin { @@ -74,13 +75,15 @@ async function processFileSplit(filename: string): Promise<{ functions: BundledB // and then compile those separately const consumeWhitespace = /^\s*/; - const consumeTopLevelContent = /^(\/\*|\/\/|type|import|interface|\$|export (?:async )?function|(?:async )?function)/; - const consumeEndOfType = /;|.(?=export|type|interface|\$|\/\/|\/\*|function)/; + const consumeTopLevelContent = + /^(\/\*|\/\/|type|import|interface|\$|const enum|export (?:async )?function|(?:async )?function)/; + const consumeEndOfType = /;|.(?=export|type|interface|\$|\/\/|\/\*|function|const enum)/; const functions: ParsedBuiltin[] = []; let directives: Record = {}; const bundledFunctions: BundledBuiltin[] = []; let internal = false; + const topLevelEnums: { name: string; code: string }[] = []; while (contents.length) { contents = contents.replace(consumeWhitespace, ""); @@ -107,6 +110,16 @@ async function processFileSplit(filename: string): Promise<{ functions: BundledB contents = contents.slice(i + 1); } else if (match[1] === "interface") { contents = sliceSourceCode(contents, false).rest; + } else if (match[1] === "const enum") { + const { result, rest } = sliceSourceCode(contents, false); + const i = result.indexOf("{\n"); + // Support const enums in module scope. + topLevelEnums.push({ + name: result.slice("const enum ".length, i).trim(), + code: "\n" + result, + }); + + contents = rest; } else if (match[1] === "$") { const directive = contents.match(/^\$([a-zA-Z0-9]+)(?:\s*=\s*([^\r\n]+?))?\s*;?\r?\n/); if (!directive) { @@ -148,12 +161,27 @@ async function processFileSplit(filename: string): Promise<{ functions: BundledB globalThis.requireTransformer(x, SRC_DIR + "/" + basename), ); + const source = result.trim().slice(2, -1); + const constEnumsUsedInFunction: string[] = []; + if (topLevelEnums.length) { + // If the function references a top-level const enum let's add the code + // to the top-level scope of the function so that the transpiler will + // inline all the values and strip out the enum object. + for (const { name, code } of topLevelEnums) { + // Only include const enums which are referenced in the function source. + if (source.includes(name)) { + constEnumsUsedInFunction.push(code); + } + } + } + functions.push({ name, params, directives, - source: result.trim().slice(2, -1), + source, async, + enums: constEnumsUsedInFunction, }); contents = rest; directives = {}; @@ -178,7 +206,7 @@ async function processFileSplit(filename: string): Promise<{ functions: BundledB `// @ts-nocheck // GENERATED TEMP FILE - DO NOT EDIT // Sourced from ${path.relative(TMP_DIR, filename)} - +${fn.enums.join("\n")} // do not allow the bundler to rename a symbol to $ ($); @@ -193,6 +221,7 @@ $$capture_start$$(${fn.async ? "async " : ""}${ const build = await Bun.build({ entrypoints: [tmpFile], define, + target: "bun", minify: { syntax: true, whitespace: false }, }); if (!build.success) { @@ -201,7 +230,7 @@ $$capture_start$$(${fn.async ? "async " : ""}${ if (build.outputs.length !== 1) { throw new Error("expected one output"); } - const output = await build.outputs[0].text(); + let output = (await build.outputs[0].text()).replaceAll("// @bun\n", ""); let usesDebug = output.includes("$debug_log"); let usesAssert = output.includes("$assert"); const captured = output.match(/\$\$capture_start\$\$([\s\S]+)\.\$\$capture_end\$\$/)![1]; diff --git a/src/codegen/bundle-modules.ts b/src/codegen/bundle-modules.ts index 59b72320bf..9a1d91d25a 100644 --- a/src/codegen/bundle-modules.ts +++ b/src/codegen/bundle-modules.ts @@ -9,15 +9,16 @@ // // For explanation on this, please nag @paperdave to write documentation on how everything works. import fs from "fs"; -import { writeFile, mkdir } from "fs/promises"; -import path from "path"; -import { sliceSourceCode } from "./builtin-parser"; -import { cap, declareASCIILiteral, writeIfNotChanged } from "./helpers"; -import { createAssertClientJS, createLogClientJS } from "./client-js"; +import { mkdir, writeFile } from "fs/promises"; import { builtinModules } from "node:module"; -import { define } from "./replacements"; -import { createInternalModuleRegistry } from "./internal-module-registry-scanner"; +import path from "path"; +import ErrorCode from "../bun.js/bindings/ErrorCode"; +import { sliceSourceCode } from "./builtin-parser"; +import { createAssertClientJS, createLogClientJS } from "./client-js"; import { getJS2NativeCPP, getJS2NativeZig } from "./generate-js2native"; +import { cap, declareASCIILiteral, writeIfNotChanged } from "./helpers"; +import { createInternalModuleRegistry } from "./internal-module-registry-scanner"; +import { define } from "./replacements"; const BASE = path.join(import.meta.dir, "../js"); const debug = process.argv[2] === "--debug=ON"; @@ -116,7 +117,7 @@ for (let i = 0; i < moduleList.length; i++) { ${importStatements.join("\n")} ${processed.result.slice(1).trim()} -$$EXPORT$$(__intrinsic__exports).$$EXPORT_END$$; +;$$EXPORT$$(__intrinsic__exports).$$EXPORT_END$$; `; // Attempt to optimize "$exports = ..." to a variableless return @@ -432,6 +433,31 @@ writeIfNotChanged(path.join(CODEGEN_DIR, "GeneratedJS2Native.h"), getJS2NativeCP const js2nativeZigPath = path.join(import.meta.dir, "../bun.js/bindings/GeneratedJS2Native.zig"); writeIfNotChanged(js2nativeZigPath, getJS2NativeZig(js2nativeZigPath)); +const generatedDTSPath = path.join(CODEGEN_DIR, "generated.d.ts"); +writeIfNotChanged( + generatedDTSPath, + (() => { + let dts = ` +// GENERATED TEMP FILE - DO NOT EDIT +`; + + for (let i = 0; i < ErrorCode.length; i++) { + const [code, _, name] = ErrorCode[i]; + dts += ` +/** + * Generate a ${name} error with the \`code\` property set to ${code}. + * + * @param msg The error message + * @param args Additional arguments + */ +declare function $${code}(msg: string, ...args: any[]): ${name}; +`; + } + + return dts; + })(), +); + mark("Generate Code"); if (!silent) { diff --git a/src/codegen/class-definitions.ts b/src/codegen/class-definitions.ts index 594325b1d0..fe5944fce4 100644 --- a/src/codegen/class-definitions.ts +++ b/src/codegen/class-definitions.ts @@ -29,6 +29,7 @@ export type Field = | ({ fn: string; length?: number; + passThis?: boolean; DOMJIT?: { returns: string; args?: [string, string] | [string, string, string] | [string] | []; @@ -57,6 +58,7 @@ export interface ClassDefinition { values?: string[]; JSType?: string; noConstructor?: boolean; + wantsThis?: boolean; estimatedSize?: boolean; hasPendingActivity?: boolean; isEventEmitter?: boolean; @@ -101,7 +103,21 @@ export function define( estimatedSize, structuredClone, values, - klass: Object.fromEntries(Object.entries(klass).sort(([a], [b]) => a.localeCompare(b))), - proto: Object.fromEntries(Object.entries(proto).sort(([a], [b]) => a.localeCompare(b))), + klass: Object.fromEntries( + Object.entries(klass) + .sort(([a], [b]) => a.localeCompare(b)) + .map(([k, v]) => { + v.DOMJIT = undefined; + return [k, v]; + }), + ), + proto: Object.fromEntries( + Object.entries(proto) + .sort(([a], [b]) => a.localeCompare(b)) + .map(([k, v]) => { + v.DOMJIT = undefined; + return [k, v]; + }), + ), }; } diff --git a/src/codegen/generate-classes.ts b/src/codegen/generate-classes.ts index 61161c3f93..50281b1440 100644 --- a/src/codegen/generate-classes.ts +++ b/src/codegen/generate-classes.ts @@ -1,7 +1,7 @@ // @ts-nocheck import path from "path"; -import type { Field, ClassDefinition } from "./class-definitions"; -import { writeIfNotChanged, camelCase, pascalCase } from "./helpers"; +import type { ClassDefinition, Field } from "./class-definitions"; +import { camelCase, pascalCase, writeIfNotChanged } from "./helpers"; if (process.env.BUN_SILENT === "1") { console.log = () => {}; @@ -132,7 +132,7 @@ static const JSC::DOMJIT::Signature DOMJITSignatureFor${fnName}(${DOMJITName(fnN ); } -function DOMJITFunctionDefinition(jsClassName, fnName, symName, { args }) { +function DOMJITFunctionDefinition(jsClassName, fnName, symName, { args }, fn) { const argNames = args.map((arg, i) => `${argTypeName(arg)} arg${i}`); const formattedArgs = argNames.length > 0 ? `, ${argNames.join(", ")}` : ""; const retArgs = argNames.length > 0 ? `, ${args.map((b, i) => "arg" + i).join(", ")}` : ""; @@ -147,6 +147,24 @@ JSC_DEFINE_JIT_OPERATION(${DOMJITName( CallFrame* callFrame = DECLARE_CALL_FRAME(vm); IGNORE_WARNINGS_END JSC::JITOperationPrologueCallFrameTracer tracer(vm, callFrame); +#if BUN_DEBUG + ${jsClassName}* wrapper = reinterpret_cast<${jsClassName}*>(thisValue); + JSC::EncodedJSValue result = ${DOMJITName(symName)}(wrapper->wrapped(), lexicalGlobalObject${retArgs}); + JSValue decoded = JSValue::decode(result); + if (wrapper->m_${fn}_expectedResultType) { + if (decoded.isCell() && !decoded.isEmpty()) { + ASSERT_WITH_MESSAGE(wrapper->m_${fn}_expectedResultType.value().has_value(), "DOMJIT function return type changed!"); + ASSERT_WITH_MESSAGE(wrapper->m_${fn}_expectedResultType.value().value() == decoded.asCell()->type(), "DOMJIT function return type changed!"); + } else { + ASSERT_WITH_MESSAGE(!wrapper->m_${fn}_expectedResultType.value().has_value(), "DOMJIT function return type changed!"); + } + } else if (!decoded.isEmpty()) { + wrapper->m_${fn}_expectedResultType = decoded.isCell() + ? std::optional(decoded.asCell()->type()) + : std::optional(std::nullopt); + } + return { result }; +#endif return {${DOMJITName(symName)}(reinterpret_cast<${jsClassName}*>(thisValue)->wrapped(), lexicalGlobalObject${retArgs})}; } `.trim(); @@ -276,7 +294,7 @@ function propRow( return `{ "${name}"_s, static_cast(JSC::PropertyAttribute::ReadOnly | JSC::PropertyAttribute::CustomAccessor | JSC::PropertyAttribute::DOMAttribute${extraPropertyAttributes}), NoIntrinsic, { HashTableValue::GetterSetterType, ${getter}, 0 } } `.trim(); } else if (getter && !supportsObjectCreate && writable) { - return `{ "${name}"_s, static_cast(JSC::PropertyAttribute::CustomAccessor | JSC::PropertyAttribute::DOMAttribute${extraPropertyAttributes}), NoIntrinsic, { HashTableValue::GetterSetterType, ${getter}, ${setter} } } + return `{ "${name}"_s, static_cast(JSC::PropertyAttribute::CustomAccessor | JSC::PropertyAttribute::DOMAttribute${extraPropertyAttributes}), NoIntrinsic, { HashTableValue::GetterSetterType, ${getter}, ${setter} } } `.trim(); } else if (getter && supportsObjectCreate) { setter = getter.replace("Get", "Set"); @@ -338,11 +356,17 @@ function generatePrototype(typeName, obj) { if (obj.construct) { externs += ` -extern JSC_CALLCONV void* JSC_HOST_CALL_ATTRIBUTES ${classSymbolName(typeName, "construct")}(JSC::JSGlobalObject*, JSC::CallFrame*); +extern JSC_CALLCONV void* JSC_HOST_CALL_ATTRIBUTES ${classSymbolName(typeName, "construct")}(JSC::JSGlobalObject*, JSC::CallFrame*); JSC_DECLARE_CUSTOM_GETTER(js${typeName}Constructor); `; } + if (obj.wantsThis) { + externs += ` +extern JSC_CALLCONV void* JSC_HOST_CALL_ATTRIBUTES ${classSymbolName(typeName, "_setThis")}(JSC::JSGlobalObject*, void*, JSC::EncodedJSValue); +`; + } + if (obj.structuredClone) { externs += `extern JSC_CALLCONV void JSC_HOST_CALL_ATTRIBUTES ${symbolName( @@ -600,7 +624,7 @@ JSC::EncodedJSValue JSC_HOST_CALL_ATTRIBUTES ${name}::construct(JSC::JSGlobalObj auto* constructor = globalObject->${className(typeName)}Constructor(); Structure* structure = globalObject->${className(typeName)}Structure(); if (UNLIKELY(constructor != newTarget)) { - auto* functionGlobalObject = reinterpret_cast( + auto* functionGlobalObject = defaultGlobalObject( // ShadowRealm functions belong to a different global object. getFunctionRealm(globalObject, newTarget) ); @@ -627,7 +651,15 @@ JSC::EncodedJSValue JSC_HOST_CALL_ATTRIBUTES ${name}::construct(JSC::JSGlobalObj : "" } - RELEASE_AND_RETURN(scope, JSValue::encode(instance)); + auto value = JSValue::encode(instance); +${ + obj.wantsThis + ? ` + ${classSymbolName(typeName, "_setThis")}(globalObject, ptr, value); +` + : "" +} + RELEASE_AND_RETURN(scope, value); } void ${name}::initializeProperties(VM& vm, JSC::JSGlobalObject* globalObject, ${prototypeName(typeName)}* prototype) @@ -777,11 +809,11 @@ function renderCallbacksZig(typeName, callbacks: Record) { out += "\n};\n"; out += ` - + pub fn callbacks(_: *const ${typeName}, instance: JSC.JSValue) Callbacks { return .{.instance = instance }; } - + `; return "\n" + out; @@ -831,7 +863,8 @@ function renderDecls(symbolName, typeName, proto, supportsObjectCreate = false) `extern JSC_CALLCONV JSC::EncodedJSValue JSC_HOST_CALL_ATTRIBUTES ${symbolName( typeName, proto[name].fn, - )}(void* ptr, JSC::JSGlobalObject* lexicalGlobalObject, JSC::CallFrame* callFrame);` + "\n"; + )}(void* ptr, JSC::JSGlobalObject* lexicalGlobalObject, JSC::CallFrame* callFrame${proto[name].passThis ? ", JSC::EncodedJSValue thisValue" : ""});` + + "\n"; rows.push( ` JSC_DECLARE_HOST_FUNCTION(${symbolName(typeName, name)}Callback); @@ -852,6 +885,7 @@ function renderDecls(symbolName, typeName, proto, supportsObjectCreate = false) symbolName(typeName, name), symbolName(typeName, proto[name].fn), proto[name].DOMJIT, + proto[name].fn, ), ); } @@ -993,10 +1027,10 @@ JSC_DEFINE_CUSTOM_GETTER(${symbolName(typeName, name)}GetterWrap, (JSGlobalObjec } JSC::EnsureStillAliveScope thisArg = JSC::EnsureStillAliveScope(thisObject); - + if (JSValue cachedValue = thisObject->${cacheName}.get()) return JSValue::encode(cachedValue); - + JSC::JSValue result = JSC::JSValue::decode( ${symbolName(typeName, proto[name].getter)}(thisObject->wrapped(),${ proto[name].this!! ? " thisValue, " : "" @@ -1088,22 +1122,23 @@ JSC_DEFINE_CUSTOM_SETTER(${symbolName(typeName, name)}SetterWrap, (JSGlobalObjec } if ("fn" in proto[name]) { + const fn = proto[name].fn; rows.push(` JSC_DEFINE_HOST_FUNCTION(${symbolName(typeName, name)}Callback, (JSGlobalObject * lexicalGlobalObject, CallFrame* callFrame)) { auto& vm = lexicalGlobalObject->vm(); + auto scope = DECLARE_THROW_SCOPE(vm); ${className(typeName)}* thisObject = jsDynamicCast<${className(typeName)}*>(callFrame->thisValue()); if (UNLIKELY(!thisObject)) { - auto throwScope = DECLARE_THROW_SCOPE(vm); - throwVMTypeError(lexicalGlobalObject, throwScope, "Expected 'this' to be instanceof ${typeName}"_s); - return JSValue::encode({}); + scope.throwException(lexicalGlobalObject, Bun::createInvalidThisError(lexicalGlobalObject, callFrame->thisValue(), "${typeName}"_s)); + return {}; } JSC::EnsureStillAliveScope thisArg = JSC::EnsureStillAliveScope(thisObject); -#ifdef BUN_DEBUG +#if BUN_DEBUG /** View the file name of the JS file that called this function * from a debugger */ SourceOrigin sourceOrigin = callFrame->callerSourceOrigin(vm); @@ -1113,14 +1148,33 @@ JSC_DEFINE_HOST_FUNCTION(${symbolName(typeName, name)}Callback, (JSGlobalObject lastFileName = fileName; } - JSC::EncodedJSValue result = ${symbolName(typeName, proto[name].fn)}(thisObject->wrapped(), lexicalGlobalObject, callFrame); + JSC::EncodedJSValue result = ${symbolName(typeName, fn)}(thisObject->wrapped(), lexicalGlobalObject, callFrame${proto[name].passThis ? ", JSValue::encode(thisObject)" : ""}); ASSERT_WITH_MESSAGE(!JSValue::decode(result).isEmpty() or DECLARE_CATCH_SCOPE(vm).exception() != 0, \"${typeName}.${proto[name].fn} returned an empty value without an exception\"); + ${ + !proto[name].DOMJIT + ? "" + : ` + JSValue decoded = JSValue::decode(result); + if (thisObject->m_${fn}_expectedResultType) { + if (decoded.isCell() && !decoded.isEmpty()) { + ASSERT_WITH_MESSAGE(thisObject->m_${fn}_expectedResultType.value().has_value(), "DOMJIT function return type changed!"); + ASSERT_WITH_MESSAGE(thisObject->m_${fn}_expectedResultType.value().value() == decoded.asCell()->type(), "DOMJIT function return type changed!"); + } else { + ASSERT_WITH_MESSAGE(!thisObject->m_${fn}_expectedResultType.value().has_value(), "DOMJIT function return type changed!"); + } + } else if (!decoded.isEmpty()) { + thisObject->m_${fn}_expectedResultType = decoded.isCell() + ? std::optional(decoded.asCell()->type()) + : std::optional(std::nullopt); + }` + } + return result; #endif - return ${symbolName(typeName, proto[name].fn)}(thisObject->wrapped(), lexicalGlobalObject, callFrame); + return ${symbolName(typeName, proto[name].fn)}(thisObject->wrapped(), lexicalGlobalObject, callFrame${proto[name].passThis ? ", JSValue::encode(thisObject)" : ""}); } `); @@ -1264,6 +1318,8 @@ function generateClassHeader(typeName, obj: ClassDefinition) { }) .join("\n")} + ${domJITTypeCheckFields(proto, klass)} + ${weakOwner} ${DECLARE_VISIT_CHILDREN} @@ -1275,6 +1331,23 @@ function generateClassHeader(typeName, obj: ClassDefinition) { `.trim(); } +function domJITTypeCheckFields(proto, klass) { + var output = "#if BUN_DEBUG\n"; + for (const name in proto) { + const { DOMJIT, fn } = proto[name]; + if (!DOMJIT) continue; + output += `std::optional> m_${fn}_expectedResultType = std::nullopt;\n`; + } + + for (const name in klass) { + const { DOMJIT, fn } = klass[name]; + if (!DOMJIT) continue; + output += `std::optional> m_${fn}_expectedResultType = std::nullopt;\n`; + } + output += "#endif\n"; + return output; +} + function generateClassImpl(typeName, obj: ClassDefinition) { const { klass: fields, @@ -1450,7 +1523,7 @@ extern JSC_CALLCONV void* JSC_HOST_CALL_ATTRIBUTES ${typeName}__fromJSDirect(JSC Zig::GlobalObject* globalObject = jsDynamicCast(object->globalObject()); - if (UNLIKELY(globalObject == nullptr || cell->structureID() != globalObject->${className(typeName)}Structure()->id())) { + if (UNLIKELY(globalObject == nullptr || cell->structureID() != globalObject->${className(typeName)}Structure()->id())) { return nullptr; } @@ -1547,6 +1620,7 @@ function generateZig( construct, finalize, noConstructor = false, + wantsThis = false, overridesToJS = false, estimatedSize, call = false, @@ -1617,8 +1691,8 @@ function generateZig( function renderMethods() { const exports = new Map(); var output = ` -const JavaScriptCoreBindings = struct { - +const JavaScriptCoreBindings = struct { + `; if (estimatedSize) { @@ -1652,9 +1726,19 @@ const JavaScriptCoreBindings = struct { if (construct && !noConstructor) { exports.set("construct", classSymbolName(typeName, "construct")); output += ` - pub fn ${classSymbolName(typeName, "construct")}(globalObject: *JSC.JSGlobalObject, callFrame: *JSC.CallFrame) callconv(JSC.conv) ?*${typeName} { + pub fn ${classSymbolName(typeName, "construct")}(globalObject: *JSC.JSGlobalObject, callFrame: *JSC.CallFrame) callconv(JSC.conv) ?*anyopaque { if (comptime Environment.enable_logs) zig("new ${typeName}({})", .{callFrame}); - return @call(.always_inline, ${typeName}.constructor, .{globalObject, callFrame}); + return @call(.always_inline, wrapConstructor(${typeName}, ${typeName}.constructor), .{globalObject, callFrame}); + } + `; + } + + if (construct && !noConstructor && wantsThis) { + exports.set("_setThis", classSymbolName(typeName, "_setThis")); + output += ` + pub fn ${classSymbolName(typeName, "_setThis")}(globalObject: *JSC.JSGlobalObject, ptr: *anyopaque, this: JSC.JSValue) callconv(JSC.conv) void { + const real: *${typeName} = @ptrCast(@alignCast(ptr)); + real.this_value.set(globalObject, this); } `; } @@ -1664,7 +1748,10 @@ const JavaScriptCoreBindings = struct { output += ` pub fn ${classSymbolName(typeName, "call")}(globalObject: *JSC.JSGlobalObject, callFrame: *JSC.CallFrame) callconv(JSC.conv) JSC.JSValue { if (comptime Environment.enable_logs) zig("${typeName}({})", .{callFrame}); - return @call(.always_inline, ${typeName}.call, .{globalObject, callFrame}); + return @call(.always_inline, ${typeName}.call, .{globalObject, callFrame}) catch |err| switch (err) { + error.JSError => .zero, + error.OutOfMemory => globalObject.throwOutOfMemoryValue(), + }; } `; } @@ -1715,9 +1802,12 @@ const JavaScriptCoreBindings = struct { } output += ` - pub fn ${names.fn}(thisValue: *${typeName}, globalObject: *JSC.JSGlobalObject, callFrame: *JSC.CallFrame) callconv(JSC.conv) JSC.JSValue { + pub fn ${names.fn}(thisValue: *${typeName}, globalObject: *JSC.JSGlobalObject, callFrame: *JSC.CallFrame${proto[name].passThis ? ", js_this_value: JSC.JSValue" : ""}) callconv(JSC.conv) JSC.JSValue { if (comptime Environment.enable_logs) zig("${typeName}.${name}({})", .{callFrame}); - return @call(.always_inline, ${typeName}.${fn}, .{thisValue, globalObject, callFrame}); + return @call(.always_inline, ${typeName}.${fn}, .{thisValue, globalObject, callFrame${proto[name].passThis ? ", js_this_value" : ""}}) catch |err| switch (err) { + error.JSError => .zero, + error.OutOfMemory => globalObject.throwOutOfMemoryValue(), + }; } `; } @@ -1764,7 +1854,10 @@ const JavaScriptCoreBindings = struct { output += ` pub fn ${names.fn}(globalObject: *JSC.JSGlobalObject, callFrame: *JSC.CallFrame) callconv(JSC.conv) JSC.JSValue { if (comptime Environment.enable_logs) JSC.markBinding(@src()); - return @call(.always_inline, ${typeName}.${fn}, .{globalObject, callFrame}); + return @call(.always_inline, ${typeName}.${fn}, .{globalObject, callFrame}) catch |err| switch (err) { + error.JSError => .zero, + error.OutOfMemory => globalObject.throwOutOfMemoryValue(), + }; } `; } @@ -1983,6 +2076,8 @@ const GENERATED_CLASSES_IMPL_HEADER_PRE = ` #include "JSDOMConvertBufferSource.h" #include "ZigGeneratedClasses.h" +#include "ErrorCode+List.h" +#include "ErrorCode.h" #if !OS(WINDOWS) #define JSC_CALLCONV "C" @@ -1998,6 +2093,7 @@ namespace WebCore { using namespace JSC; using namespace Zig; + `; const GENERATED_CLASSES_IMPL_FOOTER = ` @@ -2051,6 +2147,13 @@ const Environment = bun.Environment; const std = @import("std"); const zig = bun.Output.scoped(.zig, true); +const wrapHostFunction = bun.gen_classes_lib.wrapHostFunction; +const wrapMethod = bun.gen_classes_lib.wrapMethod; +const wrapMethodWithThis = bun.gen_classes_lib.wrapMethodWithThis; +const wrapConstructor = bun.gen_classes_lib.wrapConstructor; +const wrapGetterCallback = bun.gen_classes_lib.wrapGetterCallback; +const wrapGetterWithValueCallback = bun.gen_classes_lib.wrapGetterWithValueCallback; + pub const StaticGetterType = fn(*JSC.JSGlobalObject, JSC.JSValue, JSC.JSValue) callconv(JSC.conv) JSC.JSValue; pub const StaticSetterType = fn(*JSC.JSGlobalObject, JSC.JSValue, JSC.JSValue, JSC.JSValue) callconv(JSC.conv) bool; pub const StaticCallbackType = JSC.JSHostFunctionType; diff --git a/src/codegen/generate-js2native.ts b/src/codegen/generate-js2native.ts index 1d9ffee55c..e3b3c06c33 100644 --- a/src/codegen/generate-js2native.ts +++ b/src/codegen/generate-js2native.ts @@ -18,7 +18,7 @@ interface NativeCall { interface WrapperCall { type: NativeCallType; wrap_kind: "new-function"; - symbol_taget: string; + symbol_target: string; symbol_generated: string; display_name: string; call_length: number; @@ -91,7 +91,7 @@ export function registerNativeCall( wrapperCalls.push({ type: call_type, wrap_kind: "new-function", - symbol_taget: symbol, + symbol_target: symbol, symbol_generated: "js2native_wrap_" + symbol.replace(/[^A-Za-z]/g, "_"), display_name: callBaseName(symbol), call_length: create_fn_len, @@ -135,7 +135,7 @@ export function getJS2NativeCPP() { call => ( externs.push(`extern "C" SYSV_ABI JSC::EncodedJSValue ${symbol(call)}_workaround(Zig::GlobalObject*);` + "\n"), [ - `JSC::JSValue ${symbol(call)}(Zig::GlobalObject* global) {`, + `static ALWAYS_INLINE JSC::JSValue ${symbol(call)}(Zig::GlobalObject* global) {`, ` return JSValue::decode(${symbol(call)}_workaround(global));`, `}` + "\n\n", ] @@ -149,14 +149,20 @@ export function getJS2NativeCPP() { externs.push( `BUN_DECLARE_HOST_FUNCTION(${symbol({ type: "zig", - symbol: x.symbol_taget, + symbol: x.symbol_target, + filename: x.filename, })});`, ), "") || "", - `JSC::JSValue ${x.symbol_generated}(Zig::GlobalObject* globalObject) {`, + `static ALWAYS_INLINE JSC::JSValue ${x.symbol_generated}(Zig::GlobalObject* globalObject) {`, ` return JSC::JSFunction::create(globalObject->vm(), globalObject, ${x.call_length}, ${JSON.stringify( x.display_name, - )}_s, ${symbol({ type: x.type, symbol: x.symbol_taget })}, JSC::ImplementationVisibility::Public);`, + )}_s, ${symbol({ + type: x.type, + symbol: x.symbol_target, + + filename: x.filename, + })}, JSC::ImplementationVisibility::Public);`, `}`, ].join("\n"); } @@ -175,11 +181,15 @@ export function getJS2NativeCPP() { ...nativeCallStrings, ...wrapperCallStrings, `typedef JSC::JSValue (*JS2NativeFunction)(Zig::GlobalObject*);`, - `static JS2NativeFunction js2nativePointers[] = {`, - ...nativeCalls.map(x => ` ${cppPointer(x)},`), - `};`, - `};`, + `static ALWAYS_INLINE JSC::JSValue callJS2Native(int32_t index, Zig::GlobalObject* global) {`, + ` switch(index) {`, + ...nativeCalls.map(x => ` case ${x.id}: return ${symbol(x)}(global);`), + ` default:`, + ` __builtin_unreachable();`, + ` }`, + `}`, `#define JS2NATIVE_COUNT ${nativeCalls.length}`, + "}", ].join("\n"); } @@ -191,9 +201,9 @@ export function getJS2NativeZig(gs2NativeZigPath: string) { .filter(x => x.type === "zig") .flatMap(call => [ `export fn ${symbol(call)}_workaround(global: *JSC.JSGlobalObject) callconv(JSC.conv) JSC.JSValue {`, - ` return @import(${JSON.stringify(path.relative(path.dirname(gs2NativeZigPath), call.filename))}).${ + ` return global.errorUnionToCPP(@import(${JSON.stringify(path.relative(path.dirname(gs2NativeZigPath), call.filename))}).${ call.symbol - }(global);`, + }(global));`, "}", ]), ...wrapperCalls @@ -201,19 +211,17 @@ export function getJS2NativeZig(gs2NativeZigPath: string) { .flatMap(x => [ `export fn ${symbol({ type: "zig", - symbol: x.symbol_taget, + symbol: x.symbol_target, + filename: x.filename, })}(global: *JSC.JSGlobalObject, call_frame: *JSC.CallFrame) callconv(JSC.conv) JSC.JSValue {`, ` const function = @import(${JSON.stringify(path.relative(path.dirname(gs2NativeZigPath), x.filename))}); - return @call(.always_inline, function.${x.symbol_taget}, .{global, call_frame});`, + return @call(.always_inline, function.${x.symbol_target}, .{global, call_frame}) catch |err| switch (err) { + error.JSError => .zero, + error.OutOfMemory => global.throwOutOfMemoryValue(), + };`, "}", ]), - "comptime {", - ...nativeCalls.filter(x => x.type === "zig").flatMap(call => ` _ = &${symbol(call)}_workaround;`), - ...wrapperCalls - .filter(x => x.type === "zig") - .flatMap(x => ` _ = &${symbol({ type: "zig", symbol: x.symbol_taget })};`), - "}", ].join("\n"); } diff --git a/src/codegen/generate-jssink.ts b/src/codegen/generate-jssink.ts index 69e0699164..b8e760e3fb 100644 --- a/src/codegen/generate-jssink.ts +++ b/src/codegen/generate-jssink.ts @@ -1,4 +1,4 @@ -import { resolve, join } from "path"; +import { join, resolve } from "path"; const classes = ["ArrayBufferSink", "FileSink", "HTTPResponseSink", "HTTPSResponseSink"]; @@ -896,13 +896,14 @@ extern "C" void ${name}__onReady(JSC__JSValue controllerValue, JSC__JSValue amt, if (!function) return; JSC::JSGlobalObject *globalObject = controller->globalObject(); - + auto scope = DECLARE_THROW_SCOPE(globalObject->vm()); JSC::MarkedArgumentBuffer arguments; arguments.append(controller); arguments.append(JSC::JSValue::decode(amt)); arguments.append(JSC::JSValue::decode(offset)); AsyncContextFrame::call(globalObject, function, JSC::jsUndefined(), arguments); + RELEASE_AND_RETURN(scope, void()); } extern "C" void ${name}__onStart(JSC__JSValue controllerValue) @@ -920,13 +921,14 @@ extern "C" void ${name}__onClose(JSC__JSValue controllerValue, JSC__JSValue reas // only call close once controller->m_onClose.clear(); JSC::JSGlobalObject* globalObject = controller->globalObject(); + auto scope = DECLARE_THROW_SCOPE(globalObject->vm()); JSC::MarkedArgumentBuffer arguments; auto readableStream = controller->m_weakReadableStream.get(); arguments.append(readableStream ? readableStream : JSC::jsUndefined()); - arguments.append(JSC::JSValue::decode(reason)); AsyncContextFrame::call(globalObject, function, JSC::jsUndefined(), arguments); + RELEASE_AND_RETURN(scope, void()); } `; diff --git a/src/codegen/generate-node-errors.ts b/src/codegen/generate-node-errors.ts index 838fc32f90..41b5de47c7 100644 --- a/src/codegen/generate-node-errors.ts +++ b/src/codegen/generate-node-errors.ts @@ -1,6 +1,6 @@ +import path from "node:path"; import NodeErrors from "../bun.js/bindings/ErrorCode.ts"; const outputDir = process.argv[2]; -import path from "node:path"; if (!outputDir) { throw new Error("Missing output directory"); @@ -39,10 +39,8 @@ const std = @import("std"); const bun = @import("root").bun; const JSC = bun.JSC; -fn ErrorBuilder(comptime code_: Error, comptime fmt_: [:0]const u8, Args: type) type { +fn ErrorBuilder(comptime code: Error, comptime fmt: [:0]const u8, Args: type) type { return struct { - const code = code_; - const fmt = fmt_; globalThis: *JSC.JSGlobalObject, args: Args, @@ -60,7 +58,6 @@ fn ErrorBuilder(comptime code_: Error, comptime fmt_: [:0]const u8, Args: type) pub inline fn reject(this: @This()) JSC.JSValue { return JSC.JSPromise.rejectedPromiseValue(this.globalThis, code.fmt(this.globalThis, fmt, this.args)); } - }; } diff --git a/src/codegen/helpers.ts b/src/codegen/helpers.ts index a97007b767..bfd6cdafcc 100644 --- a/src/codegen/helpers.ts +++ b/src/codegen/helpers.ts @@ -1,6 +1,6 @@ +import { isAscii } from "buffer"; import fs from "fs"; import path from "path"; -import { isAscii } from "buffer"; // MSVC has a max of 16k characters per string literal // Combining string literals didn't support constexpr apparently @@ -75,13 +75,14 @@ export function checkAscii(str: string) { export function writeIfNotChanged(file: string, contents: string) { if (Array.isArray(contents)) contents = contents.join(""); + contents = contents.replaceAll("\r\n", "\n").trim() + "\n"; - if (fs.existsSync(file)) { + try { const oldContents = fs.readFileSync(file, "utf8"); if (oldContents === contents) { return; } - } + } catch (e) {} try { fs.writeFileSync(file, contents); diff --git a/src/codegen/internal-module-registry-scanner.ts b/src/codegen/internal-module-registry-scanner.ts index 5f1dee4621..71458fd2e2 100644 --- a/src/codegen/internal-module-registry-scanner.ts +++ b/src/codegen/internal-module-registry-scanner.ts @@ -29,19 +29,14 @@ export function createInternalModuleRegistry(basedir: string) { moduleList.push("internal-for-testing.ts"); internalRegistry.set("bun:internal-for-testing", moduleList.length - 1); - // Native Module registry - const nativeModuleH = fs.readFileSync(path.join(basedir, "../bun.js/modules/_NativeModule.h"), "utf8"); - const nativeModuleDefine = nativeModuleH.match(/BUN_FOREACH_NATIVE_MODULE\(macro\)\s*\\\n((.*\\\n)*\n)/); - if (!nativeModuleDefine) { - throw new Error( - "Could not find BUN_FOREACH_NATIVE_MODULE in _NativeModule.h. Knowing native module IDs is a part of the codegen process.", - ); - } let nextNativeModuleId = 0; const nativeModuleIds: Record = {}; const nativeModuleEnums: Record = {}; const nativeModuleEnumToId: Record = {}; - for (const [_, idString, enumValue] of nativeModuleDefine[0].matchAll(/macro\((.*?),(.*?)\)/g)) { + + // Native Module registry + const nativeModuleH = fs.readFileSync(path.join(basedir, "../bun.js/modules/_NativeModule.h"), "utf8"); + for (const [_, idString, enumValue] of nativeModuleH.matchAll(/macro\((.*?),(.*?)\)/g)) { const processedIdString = JSON.parse(idString.trim().replace(/_s$/, "")); const processedEnumValue = enumValue.trim(); const processedNumericId = nextNativeModuleId++; @@ -50,6 +45,12 @@ export function createInternalModuleRegistry(basedir: string) { nativeModuleEnumToId[processedEnumValue] = processedNumericId; } + if (nextNativeModuleId === 0) { + throw new Error( + "Could not find BUN_FOREACH_ESM_AND_CJS_NATIVE_MODULE in _NativeModule.h. Knowing native module IDs is a part of the codegen process.", + ); + } + function codegenRequireId(id: string) { return `(__intrinsic__getInternalField(__intrinsic__internalModuleRegistry, ${id}) || __intrinsic__createInternalModuleById(${id}))`; } diff --git a/src/codegen/replacements.ts b/src/codegen/replacements.ts index 35b005aea9..025f0f854d 100644 --- a/src/codegen/replacements.ts +++ b/src/codegen/replacements.ts @@ -1,4 +1,5 @@ import { LoaderKeys } from "../api/schema"; +import NodeErrors from "../bun.js/bindings/ErrorCode.ts"; import { sliceSourceCode } from "./builtin-parser"; import { registerNativeCall } from "./generate-js2native"; @@ -12,6 +13,14 @@ export const replacements: ReplacementRule[] = [ { from: /\bexport\s*default/g, to: "$exports =" }, ]; +for (let i = 0; i < NodeErrors.length; i++) { + const [code] = NodeErrors[i]; + replacements.push({ + from: new RegExp(`\\b\\__intrinsic__${code}\\(`, "g"), + to: `$makeErrorWithCode(${i}, `, + }); +} + // These rules are run on the entire file, including within strings. export const globalReplacements: ReplacementRule[] = [ { @@ -111,7 +120,7 @@ for (const name in enums) { if (typeof value === null) throw new Error("Invalid enum object " + name + " defined in " + import.meta.file); const keys = Array.isArray(value) ? value : Object.keys(value).filter(k => !k.match(/^[0-9]+$/)); define[`$${name}IdToLabel`] = "[" + keys.map(k => `"${k}"`).join(", ") + "]"; - define[`$${name}LabelToId`] = "{" + keys.map(k => `"${k}": ${keys.indexOf(k)}`).join(", ") + "}"; + define[`$${name}LabelToId`] = "{" + keys.map(k => `"${k}": ${keys.indexOf(k) + 1}`).join(", ") + "}"; } for (const name of globalsToPrefix) { @@ -131,7 +140,11 @@ export interface ReplacementRule { global?: boolean; } -export const function_replacements = ["$debug", "$assert", "$zig", "$newZigFunction", "$cpp", "$newCppFunction"]; +export const function_replacements = [ + "$debug", "$assert", "$zig", "$newZigFunction", "$cpp", "$newCppFunction", + "$isPromiseResolved", +]; +const function_regexp = new RegExp(`__intrinsic__(${function_replacements.join("|").replaceAll('$', '')})`); /** Applies source code replacements as defined in `replacements` */ export function applyReplacements(src: string, length: number) { @@ -143,7 +156,7 @@ export function applyReplacements(src: string, length: number) { } let match; if ( - (match = slice.match(/__intrinsic__(debug|assert|zig|cpp|newZigFunction|newCppFunction)$/)) && + (match = slice.match(function_regexp)) && rest.startsWith("(") ) { const name = match[1]; @@ -213,6 +226,18 @@ export function applyReplacements(src: string, length: number) { const id = registerNativeCall(kind, args[0], args[1], is_create_fn ? args[2] : undefined); return [slice.slice(0, match.index) + "__intrinsic__lazy(" + id + ")", inner.rest, true]; + } else if (name === "isPromiseResolved") { + const inner = sliceSourceCode(rest, true); + let args; + if (debug) { + // use a property on @lazy as a temporary holder for the expression. only in debug! + args = `($assert(__intrinsic__isPromise(__intrinsic__lazy.temp=${inner.result.slice(0, -1)}))),(__intrinsic__getPromiseInternalField(__intrinsic__lazy.temp, __intrinsic__promiseFieldFlags) & __intrinsic__promiseStateMask) === (__intrinsic__lazy.temp = undefined, __intrinsic__promiseStateFulfilled))`; + } else { + args = `((__intrinsic__getPromiseInternalField(${inner.result.slice(0,-1)}), __intrinsic__promiseFieldFlags) & __intrinsic__promiseStateMask) === __intrinsic__promiseStateFulfilled)`; + } + return [slice.slice(0, match.index) + args, inner.rest, true]; + } else { + throw new Error("Unknown preprocessor macro " + name); } } return [slice, rest, false]; diff --git a/src/compile_target.zig b/src/compile_target.zig index 67d0c0aab1..cf0f0acc20 100644 --- a/src/compile_target.zig +++ b/src/compile_target.zig @@ -19,7 +19,7 @@ version: bun.Semver.Version = .{ .minor = @truncate(Environment.version.minor), .patch = @truncate(Environment.version.patch), }, -libc: Libc = .default, +libc: Libc = if (!Environment.isMusl) .default else .musl, const Libc = enum { /// The default libc for the target @@ -137,7 +137,7 @@ const HTTP = bun.http; const MutableString = bun.MutableString; const Global = bun.Global; pub fn downloadToPath(this: *const CompileTarget, env: *bun.DotEnv.Loader, allocator: std.mem.Allocator, dest_z: [:0]const u8) !void { - HTTP.HTTPThread.init(); + HTTP.HTTPThread.init(&.{}); var refresher = bun.Progress{}; { @@ -170,7 +170,7 @@ pub fn downloadToPath(this: *const CompileTarget, env: *bun.DotEnv.Loader, alloc async_http.client.progress_node = progress; async_http.client.flags.reject_unauthorized = env.getTLSRejectUnauthorized(); - const response = try async_http.sendSync(true); + const response = try async_http.sendSync(); switch (response.status_code) { 404 => { @@ -254,13 +254,13 @@ pub fn downloadToPath(this: *const CompileTarget, env: *bun.DotEnv.Loader, alloc var node = refresher.start("Extracting", 0); defer node.end(); - const libarchive = @import("./libarchive//libarchive.zig"); + const libarchive = bun.libarchive; var tmpname_buf: [1024]u8 = undefined; const tempdir_name = bun.span(try bun.fs.FileSystem.instance.tmpname("tmp", &tmpname_buf, bun.fastRandom())); var tmpdir = try std.fs.cwd().makeOpenPath(tempdir_name, .{}); defer tmpdir.close(); defer std.fs.cwd().deleteTree(tempdir_name) catch {}; - _ = libarchive.Archive.extractToDir( + _ = libarchive.Archiver.extractToDir( tarball_bytes.items, tmpdir, null, @@ -429,6 +429,8 @@ pub fn defineValues(this: *const CompileTarget) []const []const u8 { .arm64 => "\"arm64\"", else => @compileError("TODO"), }, + + "\"" ++ Global.package_json_version ++ "\"", }; }.values, else => @panic("TODO"), diff --git a/src/comptime_string_map.zig b/src/comptime_string_map.zig index 0dad921d9f..f5cecafc37 100644 --- a/src/comptime_string_map.zig +++ b/src/comptime_string_map.zig @@ -173,7 +173,8 @@ pub fn ComptimeStringMapWithKeyType(comptime KeyType: type, comptime V: type, co } } - const str = bun.String.tryFromJS(input, globalThis) orelse return null; + const str = bun.String.fromJS(input, globalThis); + bun.assert(str.tag != .Dead); defer str.deref(); return getWithEql(str, bun.String.eqlComptime); } @@ -186,11 +187,40 @@ pub fn ComptimeStringMapWithKeyType(comptime KeyType: type, comptime V: type, co } } - const str = bun.String.tryFromJS(input, globalThis) orelse return null; + const str = bun.String.fromJS(input, globalThis); + bun.assert(str.tag != .Dead); defer str.deref(); return str.inMapCaseInsensitive(@This()); } + pub fn getASCIIICaseInsensitive(input: anytype) ?V { + return getWithEqlLowercase(input, bun.strings.eqlComptimeIgnoreLen); + } + + pub fn getWithEqlLowercase(input: anytype, comptime eql: anytype) ?V { + const Input = @TypeOf(input); + const length = if (@hasField(Input, "len")) input.len else input.length(); + if (length < precomputed.min_len or length > precomputed.max_len) + return null; + + comptime var i: usize = precomputed.min_len; + inline while (i <= precomputed.max_len) : (i += 1) { + if (length == i) { + const lowerbuf: [i]u8 = brk: { + var buf: [i]u8 = undefined; + for (input, &buf) |c, *j| { + j.* = std.ascii.toLower(c); + } + break :brk buf; + }; + + return getWithLengthAndEql(&lowerbuf, i, eql); + } + } + + return null; + } + pub fn getWithEql(input: anytype, comptime eql: anytype) ?V { const Input = @TypeOf(input); const length = if (@hasField(Input, "len")) input.len else input.length(); @@ -207,6 +237,36 @@ pub fn ComptimeStringMapWithKeyType(comptime KeyType: type, comptime V: type, co return null; } + pub fn getAnyCase(input: anytype) ?V { + return getCaseInsensitiveWithEql(input, bun.strings.eqlComptimeIgnoreLen); + } + + pub fn getCaseInsensitiveWithEql(input: anytype, comptime eql: anytype) ?V { + const Input = @TypeOf(input); + const length = if (@hasField(Input, "len")) input.len else input.length(); + if (length < precomputed.min_len or length > precomputed.max_len) + return null; + + comptime var i: usize = precomputed.min_len; + inline while (i <= precomputed.max_len) : (i += 1) { + if (length == i) { + const lowercased: [i]u8 = brk: { + var buf: [i]u8 = undefined; + for (input[0..i], &buf) |c, *b| { + b.* = switch (c) { + 'A'...'Z' => c + 32, + else => c, + }; + } + break :brk buf; + }; + return getWithLengthAndEql(&lowercased, i, eql); + } + } + + return null; + } + pub fn getWithEqlList(input: anytype, comptime eql: anytype) ?V { const Input = @TypeOf(input); const length = if (@hasField(Input, "len")) input.len else input.length(); diff --git a/src/crash_handler.zig b/src/crash_handler.zig index 39a2d5ebf5..d9cb08f989 100644 --- a/src/crash_handler.zig +++ b/src/crash_handler.zig @@ -59,6 +59,9 @@ threadlocal var panic_stage: usize = 0; /// rate or only crash due to assertion failures, are debug-only. See `Action`. pub threadlocal var current_action: ?Action = null; +var before_crash_handlers: std.ArrayListUnmanaged(struct { *anyopaque, *const OnBeforeCrash }) = .{}; +var before_crash_handlers_mutex: std.Thread.Mutex = .{}; + const CPUFeatures = @import("./bun.js/bindings/CPUFeatures.zig").CPUFeatures; /// This structure and formatter must be kept in sync with `bun.report`'s decoder implementation. @@ -132,17 +135,17 @@ pub const Action = union(enum) { .bundle_generate_chunk => |data| if (bun.Environment.isDebug) { try writer.print( \\generating bundler chunk - \\ chunk entry point: {s} - \\ source: {s} + \\ chunk entry point: {?s} + \\ source: {?s} \\ part range: {d}..{d} , .{ - data.linkerContext().graph.bundler_graph.input_files + if (data.part_range.source_index.isValid()) data.linkerContext().parse_graph.input_files .items(.source)[data.chunk.entry_point.source_index] - .path.text, - data.linkerContext().graph.bundler_graph.input_files + .path.text else null, + if (data.part_range.source_index.isValid()) data.linkerContext().parse_graph.input_files .items(.source)[data.part_range.source_index.get()] - .path.text, + .path.text else null, data.part_range.part_index_begin, data.part_range.part_index_end, }, @@ -180,6 +183,13 @@ pub fn crashHandler( panic_stage = 1; _ = panicking.fetchAdd(1, .seq_cst); + if (before_crash_handlers_mutex.tryLock()) { + for (before_crash_handlers.items) |item| { + const ptr, const cb = item; + cb(ptr); + } + } + { panic_mutex.lock(); defer panic_mutex.unlock(); @@ -763,6 +773,15 @@ pub fn updatePosixSegfaultHandler(act: ?*std.posix.Sigaction) !void { var windows_segfault_handle: ?windows.HANDLE = null; +pub fn resetOnPosix() void { + var act = std.posix.Sigaction{ + .handler = .{ .sigaction = handleSegfaultPosix }, + .mask = std.posix.empty_sigset, + .flags = (std.posix.SA.SIGINFO | std.posix.SA.RESTART | std.posix.SA.RESETHAND), + }; + updatePosixSegfaultHandler(&act) catch {}; +} + pub fn init() void { if (!enable) return; switch (bun.Environment.os) { @@ -770,12 +789,7 @@ pub fn init() void { windows_segfault_handle = windows.kernel32.AddVectoredExceptionHandler(0, handleSegfaultWindows); }, .mac, .linux => { - var act = std.posix.Sigaction{ - .handler = .{ .sigaction = handleSegfaultPosix }, - .mask = std.posix.empty_sigset, - .flags = (std.posix.SA.SIGINFO | std.posix.SA.RESTART | std.posix.SA.RESETHAND), - }; - updatePosixSegfaultHandler(&act) catch {}; + resetOnPosix(); }, else => @compileError("TODO"), } @@ -830,12 +844,13 @@ pub fn printMetadata(writer: anytype) !void { try writer.writeAll(Output.prettyFmt("", true)); } + var is_ancient_cpu = false; + try writer.writeAll(metadata_version_line); { const platform = bun.Analytics.GenerateHeader.GeneratePlatform.forOS(); const cpu_features = CPUFeatures.get(); - if (bun.Environment.isLinux) { - // TODO: musl + if (bun.Environment.isLinux and !bun.Environment.isMusl) { const version = gnu_get_libc_version() orelse ""; const kernel_version = bun.Analytics.GenerateHeader.GeneratePlatform.kernelVersion(); if (platform.os == .wsl) { @@ -843,8 +858,19 @@ pub fn printMetadata(writer: anytype) !void { } else { try writer.print("Linux Kernel v{d}.{d}.{d} | glibc v{s}\n", .{ kernel_version.major, kernel_version.minor, kernel_version.patch, bun.sliceTo(version, 0) }); } + } else if (bun.Environment.isLinux and bun.Environment.isMusl) { + const kernel_version = bun.Analytics.GenerateHeader.GeneratePlatform.kernelVersion(); + try writer.print("Linux Kernel v{d}.{d}.{d} | musl\n", .{ kernel_version.major, kernel_version.minor, kernel_version.patch }); } else if (bun.Environment.isMac) { try writer.print("macOS v{s}\n", .{platform.version}); + } else if (bun.Environment.isWindows) { + try writer.print("Windows v{s}\n", .{std.zig.system.windows.detectRuntimeVersion()}); + } + + if (comptime bun.Environment.isX64) { + if (!cpu_features.avx and !cpu_features.avx2 and !cpu_features.avx512) { + is_ancient_cpu = true; + } } if (!cpu_features.isEmpty()) { @@ -884,10 +910,12 @@ pub fn printMetadata(writer: anytype) !void { &peak_commit, &page_faults, ); - try writer.print("Elapsed: {d}ms | User: {d}ms | Sys: {d}ms\nRSS: {:<3.2} | Peak: {:<3.2} | Commit: {:<3.2} | Faults: {d}\n", .{ + try writer.print("Elapsed: {d}ms | User: {d}ms | Sys: {d}ms\n", .{ elapsed_msecs, user_msecs, system_msecs, + }); + try writer.print("RSS: {:<3.2} | Peak: {:<3.2} | Commit: {:<3.2} | Faults: {d}\n", .{ std.fmt.fmtIntSizeDec(current_rss), std.fmt.fmtIntSizeDec(peak_rss), std.fmt.fmtIntSizeDec(current_commit), @@ -899,6 +927,12 @@ pub fn printMetadata(writer: anytype) !void { try writer.writeAll(Output.prettyFmt("", true)); } try writer.writeAll("\n"); + + if (comptime bun.Environment.isX64) { + if (is_ancient_cpu) { + try writer.writeAll("CPU lacks AVX support. Please consider upgrading to a newer CPU.\n"); + } + } } fn waitForOtherThreadToFinishPanicking() void { @@ -1498,7 +1532,7 @@ pub fn dumpStackTrace(trace: std.builtin.StackTrace) void { .action = .view_trace, .reason = .{ .zig_error = error.DumpStackTrace }, .trace = &trace, - }}); + }}) catch {}; return; } @@ -1579,6 +1613,49 @@ pub fn dumpStackTrace(trace: std.builtin.StackTrace) void { stderr.writeAll(proc.stderr) catch return; } +/// A variant of `std.builtin.StackTrace` that stores its data within itself +/// instead of being a pointer. This allows storing captured stack traces +/// for later printing. +pub const StoredTrace = struct { + data: [31]usize, + index: usize, + + pub const empty: StoredTrace = .{ + .data = .{0} ** 31, + .index = 0, + }; + + pub fn trace(stored: *StoredTrace) std.builtin.StackTrace { + return .{ + .index = stored.index, + .instruction_addresses = &stored.data, + }; + } + + pub fn capture(begin: ?usize) StoredTrace { + var stored: StoredTrace = StoredTrace.empty; + var frame = stored.trace(); + std.debug.captureStackTrace(begin orelse @returnAddress(), &frame); + stored.index = frame.index; + return stored; + } + + pub fn from(stack_trace: ?*std.builtin.StackTrace) StoredTrace { + if (stack_trace) |stack| { + var data: [31]usize = undefined; + @memset(&data, 0); + const items = @min(stack.instruction_addresses.len, 31); + @memcpy(data[0..items], stack.instruction_addresses[0..items]); + return .{ + .data = data, + .index = @min(items, stack.index), + }; + } else { + return empty; + } + } +}; + pub const js_bindings = struct { const JSC = bun.JSC; const JSValue = JSC.JSValue; @@ -1602,7 +1679,7 @@ pub const js_bindings = struct { return obj; } - pub fn jsGetMachOImageZeroOffset(_: *bun.JSC.JSGlobalObject, _: *bun.JSC.CallFrame) JSValue { + pub fn jsGetMachOImageZeroOffset(_: *bun.JSC.JSGlobalObject, _: *bun.JSC.CallFrame) bun.JSError!JSValue { if (!bun.Environment.isMac) return .undefined; const header = std.c._dyld_get_image_header(0) orelse return .undefined; @@ -1612,7 +1689,7 @@ pub const js_bindings = struct { return JSValue.jsNumber(base_address - vmaddr_slide); } - pub fn jsSegfault(_: *JSC.JSGlobalObject, _: *JSC.CallFrame) JSC.JSValue { + pub fn jsSegfault(_: *JSC.JSGlobalObject, _: *JSC.CallFrame) bun.JSError!JSC.JSValue { @setRuntimeSafety(false); const ptr: [*]align(1) u64 = @ptrFromInt(0xDEADBEEF); ptr[0] = 0xDEADBEEF; @@ -1620,33 +1697,34 @@ pub const js_bindings = struct { return .undefined; } - pub fn jsPanic(_: *JSC.JSGlobalObject, _: *JSC.CallFrame) JSC.JSValue { + pub fn jsPanic(_: *JSC.JSGlobalObject, _: *JSC.CallFrame) bun.JSError!JSC.JSValue { bun.crash_handler.panicImpl("invoked crashByPanic() handler", null, null); } - pub fn jsRootError(_: *JSC.JSGlobalObject, _: *JSC.CallFrame) JSC.JSValue { + pub fn jsRootError(_: *JSC.JSGlobalObject, _: *JSC.CallFrame) bun.JSError!JSC.JSValue { bun.crash_handler.handleRootError(error.Test, null); } - pub fn jsOutOfMemory(_: *JSC.JSGlobalObject, _: *JSC.CallFrame) JSC.JSValue { + pub fn jsOutOfMemory(_: *JSC.JSGlobalObject, _: *JSC.CallFrame) bun.JSError!JSC.JSValue { bun.outOfMemory(); } - pub fn jsRaiseIgnoringPanicHandler(_: *JSC.JSGlobalObject, _: *JSC.CallFrame) JSC.JSValue { + pub fn jsRaiseIgnoringPanicHandler(_: *JSC.JSGlobalObject, _: *JSC.CallFrame) bun.JSError!JSC.JSValue { bun.Global.raiseIgnoringPanicHandler(.SIGSEGV); } - pub fn jsGetFeaturesAsVLQ(global: *JSC.JSGlobalObject, _: *JSC.CallFrame) JSC.JSValue { + pub fn jsGetFeaturesAsVLQ(global: *JSC.JSGlobalObject, _: *JSC.CallFrame) bun.JSError!JSC.JSValue { const bits = bun.Analytics.packedFeatures(); var buf = std.BoundedArray(u8, 16){}; writeU64AsTwoVLQs(buf.writer(), @bitCast(bits)) catch { // there is definitely enough space in the bounded array unreachable; }; - return bun.String.createLatin1(buf.slice()).toJS(global); + var str = bun.String.createLatin1(buf.slice()); + return str.transferToJS(global); } - pub fn jsGetFeatureData(global: *JSC.JSGlobalObject, _: *JSC.CallFrame) JSC.JSValue { + pub fn jsGetFeatureData(global: *JSC.JSGlobalObject, _: *JSC.CallFrame) bun.JSError!JSC.JSValue { const obj = JSValue.createEmptyObject(global, 5); const list = bun.Analytics.packed_features_list; const array = JSValue.createEmptyArray(global, list.len); @@ -1656,8 +1734,41 @@ pub const js_bindings = struct { obj.put(global, JSC.ZigString.static("features"), array); obj.put(global, JSC.ZigString.static("version"), bun.String.init(Global.package_json_version).toJS(global)); obj.put(global, JSC.ZigString.static("is_canary"), JSC.JSValue.jsBoolean(bun.Environment.is_canary)); + + // This is the source of truth for the git sha. + // Not the github ref or the git tag. obj.put(global, JSC.ZigString.static("revision"), bun.String.init(bun.Environment.git_sha).toJS(global)); + obj.put(global, JSC.ZigString.static("generated_at"), JSValue.jsNumberFromInt64(@max(std.time.milliTimestamp(), 0))); return obj; } }; + +const OnBeforeCrash = fn (opaque_ptr: *anyopaque) void; + +/// For large codebases such as bun.bake.DevServer, it may be helpful +/// to dump a large amount of state to a file to aid debugging a crash. +/// +/// Pre-crash handlers are likely, but not guaranteed to call. Errors are ignored. +pub fn appendPreCrashHandler(comptime T: type, ptr: *T, comptime handler: fn (*T) anyerror!void) !void { + const wrap = struct { + fn onCrash(opaque_ptr: *anyopaque) void { + handler(@ptrCast(@alignCast(opaque_ptr))) catch |err| { + bun.handleErrorReturnTrace(err, @errorReturnTrace()); + }; + } + }; + + before_crash_handlers_mutex.lock(); + defer before_crash_handlers_mutex.unlock(); + try before_crash_handlers.append(bun.default_allocator, .{ ptr, wrap.onCrash }); +} + +pub fn removePreCrashHandler(ptr: *anyopaque) void { + before_crash_handlers_mutex.lock(); + defer before_crash_handlers_mutex.unlock(); + const index = for (before_crash_handlers.items, 0..) |item, i| { + if (item.@"0" == ptr) break i; + } else return; + _ = before_crash_handlers.orderedRemove(index); +} diff --git a/src/css/README.md b/src/css/README.md new file mode 100644 index 0000000000..75b7dead60 --- /dev/null +++ b/src/css/README.md @@ -0,0 +1,3 @@ +# CSS + +This is the code for Bun's experimental CSS parser. This code is derived from the [Lightning CSS](https://github.com/parcel-bundler/lightningcss) (huge, huge thanks to Devon Govett and contributors) project and the [Servo](https://github.com/servo/servo) project. diff --git a/src/css/build-prefixes.js b/src/css/build-prefixes.js new file mode 100644 index 0000000000..bb36e78669 --- /dev/null +++ b/src/css/build-prefixes.js @@ -0,0 +1,745 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +// const { execSync } = require("child_process"); +const prefixes = require("autoprefixer/data/prefixes"); +const browsers = require("caniuse-lite").agents; +const unpack = require("caniuse-lite").feature; +const features = require("caniuse-lite").features; +const mdn = require("@mdn/browser-compat-data"); +const fs = require("fs"); + +const BROWSER_MAPPING = { + and_chr: "chrome", + and_ff: "firefox", + ie_mob: "ie", + op_mob: "opera", + and_qq: null, + and_uc: null, + baidu: null, + bb: null, + kaios: null, + op_mini: null, + oculus: null, +}; + +const MDN_BROWSER_MAPPING = { + chrome_android: "chrome", + firefox_android: "firefox", + opera_android: "opera", + safari_ios: "ios_saf", + samsunginternet_android: "samsung", + webview_android: "android", + oculus: null, +}; + +const latestBrowserVersions = {}; +for (let b in browsers) { + let versions = browsers[b].versions.slice(-10); + for (let i = versions.length - 1; i >= 0; i--) { + if (versions[i] != null && versions[i] != "all" && versions[i] != "TP") { + latestBrowserVersions[b] = versions[i]; + break; + } + } +} + +// Caniuse data for clip-path is incorrect. +// https://github.com/Fyrd/caniuse/issues/6209 +prefixes["clip-path"].browsers = prefixes["clip-path"].browsers.filter(b => { + let [name, version] = b.split(" "); + return !( + (name === "safari" && parseVersion(version) >= ((9 << 16) | (1 << 8))) || + (name === "ios_saf" && parseVersion(version) >= ((9 << 16) | (3 << 8))) + ); +}); + +prefixes["any-pseudo"] = { + browsers: Object.entries(mdn.css.selectors.is.__compat.support).flatMap(([key, value]) => { + if (Array.isArray(value)) { + key = MDN_BROWSER_MAPPING[key] || key; + let any = value.find(v => v.alternative_name?.includes("-any"))?.version_added; + let supported = value.find(x => x.version_added && !x.alternative_name)?.version_added; + if (any && supported) { + let parts = supported.split("."); + parts[0]--; + supported = parts.join("."); + return [`${key} ${any}}`, `${key} ${supported}`]; + } + } + + return []; + }), +}; + +let flexSpec = {}; +let oldGradient = {}; +let p = new Map(); +for (let prop in prefixes) { + let browserMap = {}; + for (let b of prefixes[prop].browsers) { + let [name, version, variant] = b.split(" "); + if (BROWSER_MAPPING[name] === null) { + continue; + } + let prefix = browsers[name].prefix_exceptions?.[version] || browsers[name].prefix; + + // https://github.com/postcss/autoprefixer/blob/main/lib/hacks/backdrop-filter.js#L11 + if (prefix === "ms" && prop === "backdrop-filter") { + prefix = "webkit"; + } + + let origName = name; + let isCurrentVersion = version === latestBrowserVersions[name]; + name = BROWSER_MAPPING[name] || name; + let v = parseVersion(version); + if (v == null) { + console.log("BAD VERSION", prop, name, version); + continue; + } + if (browserMap[name]?.[prefix] == null) { + browserMap[name] = browserMap[name] || {}; + browserMap[name][prefix] = + prefixes[prop].browsers.filter(b => b.startsWith(origName) || b.startsWith(name)).length === 1 + ? isCurrentVersion + ? [null, null] + : [null, v] + : isCurrentVersion + ? [v, null] + : [v, v]; + } else { + if (v < browserMap[name][prefix][0]) { + browserMap[name][prefix][0] = v; + } + + if (isCurrentVersion && browserMap[name][prefix][0] != null) { + browserMap[name][prefix][1] = null; + } else if (v > browserMap[name][prefix][1] && browserMap[name][prefix][1] != null) { + browserMap[name][prefix][1] = v; + } + } + + if (variant === "2009") { + if (flexSpec[name] == null) { + flexSpec[name] = [v, v]; + } else { + if (v < flexSpec[name][0]) { + flexSpec[name][0] = v; + } + + if (v > flexSpec[name][1]) { + flexSpec[name][1] = v; + } + } + } else if (variant === "old" && prop.includes("gradient")) { + if (oldGradient[name] == null) { + oldGradient[name] = [v, v]; + } else { + if (v < oldGradient[name][0]) { + oldGradient[name][0] = v; + } + + if (v > oldGradient[name][1]) { + oldGradient[name][1] = v; + } + } + } + } + addValue(p, browserMap, prop); +} + +function addValue(map, value, prop) { + let s = JSON.stringify(value); + let found = false; + for (let [key, val] of map) { + if (JSON.stringify(val) === s) { + key.push(prop); + found = true; + break; + } + } + if (!found) { + map.set([prop], value); + } +} + +let cssFeatures = [ + "css-sel2", + "css-sel3", + "css-gencontent", + "css-first-letter", + "css-first-line", + "css-in-out-of-range", + "form-validation", + "css-any-link", + "css-default-pseudo", + "css-dir-pseudo", + "css-focus-within", + "css-focus-visible", + "css-indeterminate-pseudo", + "css-matches-pseudo", + "css-optional-pseudo", + "css-placeholder-shown", + "dialog", + "fullscreen", + "css-marker-pseudo", + "css-placeholder", + "css-selection", + "css-case-insensitive", + "css-read-only-write", + "css-autofill", + "css-namespaces", + "shadowdomv1", + "css-rrggbbaa", + "css-nesting", + "css-not-sel-list", + "css-has", + "font-family-system-ui", + "extended-system-fonts", + "calc", +]; + +let cssFeatureMappings = { + "css-dir-pseudo": "DirSelector", + "css-rrggbbaa": "HexAlphaColors", + "css-not-sel-list": "NotSelectorList", + "css-has": "HasSelector", + "css-matches-pseudo": "IsSelector", + "css-sel2": "Selectors2", + "css-sel3": "Selectors3", + "calc": "CalcFunction", +}; + +let cssFeatureOverrides = { + // Safari supports the ::marker pseudo element, but only supports styling some properties. + // However this does not break using the selector itself, so ignore for our purposes. + // https://bugs.webkit.org/show_bug.cgi?id=204163 + // https://github.com/parcel-bundler/lightningcss/issues/508 + "css-marker-pseudo": { + safari: { + "y #1": "y", + }, + }, +}; + +let compat = new Map(); +for (let feature of cssFeatures) { + let data = unpack(features[feature]); + let overrides = cssFeatureOverrides[feature]; + let browserMap = {}; + for (let name in data.stats) { + if (BROWSER_MAPPING[name] === null) { + continue; + } + + name = BROWSER_MAPPING[name] || name; + let browserOverrides = overrides?.[name]; + for (let version in data.stats[name]) { + let value = data.stats[name][version]; + value = browserOverrides?.[value] || value; + if (value === "y") { + let v = parseVersion(version); + if (v == null) { + console.log("BAD VERSION", feature, name, version); + continue; + } + + if (browserMap[name] == null || v < browserMap[name]) { + browserMap[name] = v; + } + } + } + } + + let name = (cssFeatureMappings[feature] || feature).replace(/^css-/, ""); + addValue(compat, browserMap, name); +} + +// No browser supports custom media queries yet. +addValue(compat, {}, "custom-media-queries"); + +let mdnFeatures = { + doublePositionGradients: mdn.css.types.image.gradient["radial-gradient"].doubleposition.__compat.support, + clampFunction: mdn.css.types.clamp.__compat.support, + placeSelf: mdn.css.properties["place-self"].__compat.support, + placeContent: mdn.css.properties["place-content"].__compat.support, + placeItems: mdn.css.properties["place-items"].__compat.support, + overflowShorthand: mdn.css.properties["overflow"].multiple_keywords.__compat.support, + mediaRangeSyntax: mdn.css["at-rules"].media.range_syntax.__compat.support, + mediaIntervalSyntax: Object.fromEntries( + Object.entries(mdn.css["at-rules"].media.range_syntax.__compat.support).map(([browser, value]) => { + // Firefox supported only ranges and not intervals for a while. + if (Array.isArray(value)) { + value = value.filter(v => !v.partial_implementation); + } else if (value.partial_implementation) { + value = undefined; + } + + return [browser, value]; + }), + ), + logicalBorders: mdn.css.properties["border-inline-start"].__compat.support, + logicalBorderShorthand: mdn.css.properties["border-inline"].__compat.support, + logicalBorderRadius: mdn.css.properties["border-start-start-radius"].__compat.support, + logicalMargin: mdn.css.properties["margin-inline-start"].__compat.support, + logicalMarginShorthand: mdn.css.properties["margin-inline"].__compat.support, + logicalPadding: mdn.css.properties["padding-inline-start"].__compat.support, + logicalPaddingShorthand: mdn.css.properties["padding-inline"].__compat.support, + logicalInset: mdn.css.properties["inset-inline-start"].__compat.support, + logicalSize: mdn.css.properties["inline-size"].__compat.support, + logicalTextAlign: mdn.css.properties["text-align"].start.__compat.support, + labColors: mdn.css.types.color.lab.__compat.support, + oklabColors: mdn.css.types.color.oklab.__compat.support, + colorFunction: mdn.css.types.color.color.__compat.support, + spaceSeparatedColorNotation: mdn.css.types.color.rgb.space_separated_parameters.__compat.support, + textDecorationThicknessPercent: mdn.css.properties["text-decoration-thickness"].percentage.__compat.support, + textDecorationThicknessShorthand: mdn.css.properties["text-decoration"].includes_thickness.__compat.support, + cue: mdn.css.selectors.cue.__compat.support, + cueFunction: mdn.css.selectors.cue.selector_argument.__compat.support, + anyPseudo: Object.fromEntries( + Object.entries(mdn.css.selectors.is.__compat.support).map(([key, value]) => { + if (Array.isArray(value)) { + value = value.filter(v => v.alternative_name?.includes("-any")).map(({ alternative_name, ...other }) => other); + } + + if (value && value.length) { + return [key, value]; + } else { + return [key, { version_added: false }]; + } + }), + ), + partPseudo: mdn.css.selectors.part.__compat.support, + imageSet: mdn.css.types.image["image-set"].__compat.support, + xResolutionUnit: mdn.css.types.resolution.x.__compat.support, + nthChildOf: mdn.css.selectors["nth-child"].of_syntax.__compat.support, + minFunction: mdn.css.types.min.__compat.support, + maxFunction: mdn.css.types.max.__compat.support, + roundFunction: mdn.css.types.round.__compat.support, + remFunction: mdn.css.types.rem.__compat.support, + modFunction: mdn.css.types.mod.__compat.support, + absFunction: mdn.css.types.abs.__compat.support, + signFunction: mdn.css.types.sign.__compat.support, + hypotFunction: mdn.css.types.hypot.__compat.support, + gradientInterpolationHints: mdn.css.types.image.gradient["linear-gradient"].interpolation_hints.__compat.support, + borderImageRepeatRound: mdn.css.properties["border-image-repeat"].round.__compat.support, + borderImageRepeatSpace: mdn.css.properties["border-image-repeat"].space.__compat.support, + fontSizeRem: mdn.css.properties["font-size"].rem_values.__compat.support, + fontSizeXXXLarge: mdn.css.properties["font-size"]["xxx-large"].__compat.support, + fontStyleObliqueAngle: mdn.css.properties["font-style"]["oblique-angle"].__compat.support, + fontWeightNumber: mdn.css.properties["font-weight"].number.__compat.support, + fontStretchPercentage: mdn.css.properties["font-stretch"].percentage.__compat.support, + lightDark: mdn.css.types.color["light-dark"].__compat.support, + accentSystemColor: mdn.css.types.color["system-color"].accentcolor_accentcolortext.__compat.support, + animationTimelineShorthand: mdn.css.properties.animation["animation-timeline_included"].__compat.support, +}; + +for (let key in mdn.css.types.length) { + if (key === "__compat") { + continue; + } + + let feat = key.includes("_") ? key.replace(/_([a-z])/g, (_, l) => l.toUpperCase()) : key + "Unit"; + + mdnFeatures[feat] = mdn.css.types.length[key].__compat.support; +} + +for (let key in mdn.css.types.image.gradient) { + if (key === "__compat") { + continue; + } + + let feat = key.replace(/-([a-z])/g, (_, l) => l.toUpperCase()); + mdnFeatures[feat] = mdn.css.types.image.gradient[key].__compat.support; +} + +const nonStandardListStyleType = new Set([ + // https://developer.mozilla.org/en-US/docs/Web/CSS/list-style-type#non-standard_extensions + "ethiopic-halehame", + "ethiopic-halehame-am", + "ethiopic-halehame-ti-er", + "ethiopic-halehame-ti-et", + "hangul", + "hangul-consonant", + "urdu", + "cjk-ideographic", + // https://github.com/w3c/csswg-drafts/issues/135 + "upper-greek", +]); + +for (let key in mdn.css.properties["list-style-type"]) { + if ( + key === "__compat" || + nonStandardListStyleType.has(key) || + mdn.css.properties["list-style-type"][key].__compat.support.chrome.version_removed + ) { + continue; + } + + let feat = key[0].toUpperCase() + key.slice(1).replace(/-([a-z])/g, (_, l) => l.toUpperCase()) + "ListStyleType"; + mdnFeatures[feat] = mdn.css.properties["list-style-type"][key].__compat.support; +} + +for (let key in mdn.css.properties["width"]) { + if (key === "__compat" || key === "animatable") { + continue; + } + + let feat = key[0].toUpperCase() + key.slice(1).replace(/[-_]([a-z])/g, (_, l) => l.toUpperCase()) + "Size"; + mdnFeatures[feat] = mdn.css.properties["width"][key].__compat.support; +} + +Object.entries(mdn.css.properties.width.stretch.__compat.support) + .filter(([, v]) => v.alternative_name) + .forEach(([k, v]) => { + let name = v.alternative_name.slice(1).replace(/[-_]([a-z])/g, (_, l) => l.toUpperCase()) + "Size"; + mdnFeatures[name] ??= {}; + mdnFeatures[name][k] = { version_added: v.version_added }; + }); + +for (let feature in mdnFeatures) { + let browserMap = {}; + for (let name in mdnFeatures[feature]) { + if (MDN_BROWSER_MAPPING[name] === null) { + continue; + } + + let feat = mdnFeatures[feature][name]; + let version; + if (Array.isArray(feat)) { + version = feat + .filter(x => x.version_added && !x.alternative_name && !x.flags) + .sort((a, b) => (parseVersion(a.version_added) < parseVersion(b.version_added) ? -1 : 1))[0].version_added; + } else if (!feat.alternative_name && !feat.flags) { + version = feat.version_added; + } + + if (!version) { + continue; + } + + let v = parseVersion(version); + if (v == null) { + console.log("BAD VERSION", feature, name, version); + continue; + } + + name = MDN_BROWSER_MAPPING[name] || name; + browserMap[name] = v; + } + + addValue(compat, browserMap, feature); +} + +addValue( + compat, + { + safari: parseVersion("10.1"), + ios_saf: parseVersion("10.3"), + }, + "p3Colors", +); + +addValue( + compat, + { + // https://github.com/WebKit/WebKit/commit/baed0d8b0abf366e1d9a6105dc378c59a5f21575 + safari: parseVersion("10.1"), + ios_saf: parseVersion("10.3"), + }, + "LangSelectorList", +); + +let prefixMapping = { + webkit: "webkit", + moz: "moz", + ms: "ms", + o: "o", +}; + +let flags = [ + "nesting", + "not_selector_list", + "dir_selector", + "lang_selector_list", + "is_selector", + "text_decoration_thickness_percent", + "media_interval_syntax", + "media_range_syntax", + "custom_media_queries", + "clamp_function", + "color_function", + "oklab_colors", + "lab_colors", + "p3_colors", + "hex_alpha_colors", + "space_separated_color_notation", + "font_family_system_ui", + "double_position_gradients", + "vendor_prefixes", + "logical_properties", + ["selectors", ["nesting", "not_selector_list", "dir_selector", "lang_selector_list", "is_selector"]], + ["media_queries", ["media_interval_syntax", "media_range_syntax", "custom_media_queries"]], + [ + "colors", + ["color_function", "oklab_colors", "lab_colors", "p3_colors", "hex_alpha_colors", "space_separated_color_notation"], + ], +]; + +function snakecase(str) { + let s = ""; + for (let i = 0; i < str.length; i++) { + let c = str[i].charCodeAt(0); + if (c === "-") { + s += "_"; + } else { + if (i > 0 && c >= 65 && c <= 90) { + s += "_"; + } + s += str[i].toLowerCase(); + } + } + return s; +} + +let enumify = f => + snakecase( + f + .replace(/^@([a-z])/, (_, x) => "at_" + x) + .replace(/^::([a-z])/, (_, x) => "pseudo_element_" + x) + .replace(/^:([a-z])/, (_, x) => "pseudo_class_" + x) + // .replace(/(^|-)([a-z])/g, (_, a, x) => (a === "-" ? "_" + x : x)); + .replace(/(^|-)([a-z])/g, (_, a, x) => (a === "-" ? "_" + x : x)), + ); + +let allBrowsers = Object.keys(browsers) + .filter(b => !(b in BROWSER_MAPPING)) + .sort(); +let browsersZig = `pub const Browsers = struct { + ${allBrowsers.join(": ?u32 = null,\n")}: ?u32 = null, + pub usingnamespace BrowsersImpl(@This()); +}`; +let flagsZig = `pub const Features = packed struct(u32) { + ${flags + .map((flag, i) => { + if (Array.isArray(flag)) { + // return `const ${flag[0]} = ${flag[1].map(f => `Self::${f}.bits()`).join(" | ")};`; + return `const ${flag[0]} = Features.fromNames(${flag[1].map(f => `"${f}"`).join(", ")});`; + } else { + return `${flag}: bool = 1 << ${i},`; + } + }) + .join("\n ")} + + pub usingnamespace css.Bitflags(@This()); + pub usingnamespace FeaturesImpl(@This()); + }`; +let targets = fs + .readFileSync("src/css/targets.zig", "utf8") + .replace(/pub const Browsers = struct \{((?:.|\n)+?)\}/, browsersZig) + .replace(/pub const Features = packed struct\(u32\) \{((?:.|\n)+?)\}/, flagsZig); + +console.log("TARGETS", targets); +fs.writeFileSync("src/css/targets.zig", targets); +await Bun.$`zig fmt src/css/targets.zig`; + +let targets_dts = `// This file is autogenerated by build-prefixes.js. DO NOT EDIT! + +export interface Targets { + ${allBrowsers.join("?: number,\n ")}?: number +} + +export const Features: { + ${flags + .map((flag, i) => { + if (Array.isArray(flag)) { + return `${flag[0]}: ${flag[1].reduce((p, f) => p | (1 << flags.indexOf(f)), 0)},`; + } else { + return `${flag}: ${1 << i},`; + } + }) + .join("\n ")} +}; +`; + +// fs.writeFileSync("node/targets.d.ts", targets_dts); + +let flagsJs = `// This file is autogenerated by build-prefixes.js. DO NOT EDIT! + +exports.Features = { + ${flags + .map((flag, i) => { + if (Array.isArray(flag)) { + return `${flag[0]}: ${flag[1].reduce((p, f) => p | (1 << flags.indexOf(f)), 0)},`; + } else { + return `${flag}: ${1 << i},`; + } + }) + .join("\n ")} +}; +`; + +// fs.writeFileSync("node/flags.js", flagsJs); + +let s = `// This file is autogenerated by build-prefixes.js. DO NOT EDIT! + +const css = @import("./css_parser.zig"); +const VendorPrefix = css.VendorPrefix; +const Browsers = css.targets.Browsers; + +pub const Feature = enum { + ${[...p.keys()].flat().map(enumify).sort().join(",\n ")}, + + pub fn prefixesFor(this: *const Feature, browsers: Browsers) VendorPrefix { + var prefixes = VendorPrefix{ .none = true }; + switch (this.*) { + ${[...p] + .map(([features, versions]) => { + return `${features.map(name => `.${enumify(name)}`).join(" ,\n ")} => { + ${Object.entries(versions) + .map(([name, prefixes]) => { + let needsVersion = !Object.values(prefixes).every(([min, max]) => min == null && max == null); + return `if ${needsVersion ? `(browsers.${name}) |version|` : `(browsers.${name} != null)`} { + ${Object.entries(prefixes) + .map(([prefix, [min, max]]) => { + if (!prefixMapping[prefix]) { + throw new Error("Missing prefix " + prefix); + } + let addPrefix = `prefixes = prefixes.bitwiseOr(VendorPrefix{.${prefixMapping[prefix]} = true });`; + let condition; + if (min == null && max == null) { + return addPrefix; + } else if (min == null) { + condition = `version <= ${max}`; + } else if (max == null) { + condition = `version >= ${min}`; + } else if (min == max) { + condition = `version == ${min}`; + } else { + condition = `version >= ${min} and version <= ${max}`; + } + + return `if (${condition}) { + ${addPrefix} + }`; + }) + .join("\n ")} + }`; + }) + .join("\n ")} + }`; + }) + .join(",\n ")} + } + return prefixes; + } + +pub fn isFlex2009(browsers: Browsers) bool { + ${Object.entries(flexSpec) + .map(([name, [min, max]]) => { + return `if (browsers.${name}) |version| { + if (version >= ${min} and version <= ${max}) { + return true; + } + }`; + }) + .join("\n ")} + return false; +} + +pub fn isWebkitGradient(browsers: Browsers) bool { + ${Object.entries(oldGradient) + .map(([name, [min, max]]) => { + return `if (browsers.${name}) |version| { + if (version >= ${min} and version <= ${max}) { + return true; + } + }`; + }) + .join("\n ")} + return false; +} +}; +`; + +fs.writeFileSync("src/css/prefixes.zig", s); +await Bun.$`zig fmt src/css/prefixes.zig`; + +let c = `// This file is autogenerated by build-prefixes.js. DO NOT EDIT! + +const Browsers = @import("./targets.zig").Browsers; + +pub const Feature = enum { + ${[...compat.keys()].flat().map(enumify).sort().join(",\n ")}, + + pub fn isCompatible(this: *const Feature, browsers: Browsers) bool { + switch (this.*) { + ${[...compat] + .map( + ([features, supportedBrowsers]) => + `${features.map(name => `.${enumify(name)}`).join(" ,\n ")} => {` + + (Object.entries(supportedBrowsers).length === 0 + ? "\n return false;\n }," + : ` + ${Object.entries(supportedBrowsers) + .map( + ([browser, min]) => + `if (browsers.${browser}) |version| { + if (version < ${min}) { + return false; + } + }`, + ) + .join("\n ")}${ + Object.keys(supportedBrowsers).length === allBrowsers.length + ? "" + : `\n if (${allBrowsers + .filter(b => !supportedBrowsers[b]) + .map(browser => `browsers.${browser} != null`) + .join(" or ")}) { + return false; + }` + } + },`), + ) + .join("\n ")} + } + return true; + } + + +pub fn isPartiallyCompatible(this: *const Feature, targets: Browsers) bool { + var browsers = Browsers{}; + ${allBrowsers + .map( + browser => `if (targets.${browser} != null) { + browsers.${browser} = targets.${browser}; + if (this.isCompatible(browsers)) { + return true; + } + browsers.${browser} = null; + }\n`, + ) + .join(" ")} + return false; +} +}; +`; + +fs.writeFileSync("src/css/compat.zig", c); +await Bun.$`zig fmt src/css/compat.zig`; + +function parseVersion(version) { + version = version.replace("≤", ""); + let [major, minor = "0", patch = "0"] = version + .split("-")[0] + .split(".") + .map(v => parseInt(v, 10)); + + if (isNaN(major) || isNaN(minor) || isNaN(patch)) { + return null; + } + + return (major << 16) | (minor << 8) | patch; +} diff --git a/src/css/compat.zig b/src/css/compat.zig new file mode 100644 index 0000000000..7d94f023d8 --- /dev/null +++ b/src/css/compat.zig @@ -0,0 +1,5407 @@ +// This file is autogenerated by build-prefixes.js. DO NOT EDIT! + +const Browsers = @import("./targets.zig").Browsers; + +pub const Feature = enum { + abs_function, + accent_system_color, + afar_list_style_type, + amharic_abegede_list_style_type, + amharic_list_style_type, + anchor_size_size, + animation_timeline_shorthand, + any_link, + any_pseudo, + arabic_indic_list_style_type, + armenian_list_style_type, + asterisks_list_style_type, + auto_size, + autofill, + bengali_list_style_type, + binary_list_style_type, + border_image_repeat_round, + border_image_repeat_space, + calc_function, + cambodian_list_style_type, + cap_unit, + case_insensitive, + ch_unit, + circle_list_style_type, + cjk_decimal_list_style_type, + cjk_earthly_branch_list_style_type, + cjk_heavenly_stem_list_style_type, + clamp_function, + color_function, + conic_gradient, + container_query_length_units, + cue, + cue_function, + custom_media_queries, + decimal_leading_zero_list_style_type, + decimal_list_style_type, + default_pseudo, + devanagari_list_style_type, + dialog, + dir_selector, + disc_list_style_type, + disclosure_closed_list_style_type, + disclosure_open_list_style_type, + double_position_gradients, + em_unit, + ethiopic_abegede_am_et_list_style_type, + ethiopic_abegede_gez_list_style_type, + ethiopic_abegede_list_style_type, + ethiopic_abegede_ti_er_list_style_type, + ethiopic_abegede_ti_et_list_style_type, + ethiopic_halehame_aa_er_list_style_type, + ethiopic_halehame_aa_et_list_style_type, + ethiopic_halehame_am_et_list_style_type, + ethiopic_halehame_gez_list_style_type, + ethiopic_halehame_om_et_list_style_type, + ethiopic_halehame_sid_et_list_style_type, + ethiopic_halehame_so_et_list_style_type, + ethiopic_halehame_tig_list_style_type, + ethiopic_list_style_type, + ethiopic_numeric_list_style_type, + ex_unit, + extended_system_fonts, + first_letter, + first_line, + fit_content_function_size, + fit_content_size, + focus_visible, + focus_within, + font_family_system_ui, + font_size_rem, + font_size_x_x_x_large, + font_stretch_percentage, + font_style_oblique_angle, + font_weight_number, + footnotes_list_style_type, + form_validation, + fullscreen, + gencontent, + georgian_list_style_type, + gradient_interpolation_hints, + gujarati_list_style_type, + gurmukhi_list_style_type, + has_selector, + hebrew_list_style_type, + hex_alpha_colors, + hiragana_iroha_list_style_type, + hiragana_list_style_type, + hypot_function, + ic_unit, + image_set, + in_out_of_range, + indeterminate_pseudo, + is_animatable_size, + is_selector, + japanese_formal_list_style_type, + japanese_informal_list_style_type, + kannada_list_style_type, + katakana_iroha_list_style_type, + katakana_list_style_type, + khmer_list_style_type, + korean_hangul_formal_list_style_type, + korean_hanja_formal_list_style_type, + korean_hanja_informal_list_style_type, + lab_colors, + lang_selector_list, + lao_list_style_type, + lh_unit, + light_dark, + linear_gradient, + logical_border_radius, + logical_border_shorthand, + logical_borders, + logical_inset, + logical_margin, + logical_margin_shorthand, + logical_padding, + logical_padding_shorthand, + logical_size, + logical_text_align, + lower_alpha_list_style_type, + lower_armenian_list_style_type, + lower_greek_list_style_type, + lower_hexadecimal_list_style_type, + lower_latin_list_style_type, + lower_norwegian_list_style_type, + lower_roman_list_style_type, + malayalam_list_style_type, + marker_pseudo, + max_content_size, + max_function, + media_interval_syntax, + media_range_syntax, + min_content_size, + min_function, + mod_function, + mongolian_list_style_type, + moz_available_size, + myanmar_list_style_type, + namespaces, + nesting, + none_list_style_type, + not_selector_list, + nth_child_of, + octal_list_style_type, + oklab_colors, + optional_pseudo, + oriya_list_style_type, + oromo_list_style_type, + overflow_shorthand, + p3_colors, + part_pseudo, + persian_list_style_type, + place_content, + place_items, + place_self, + placeholder, + placeholder_shown, + q_unit, + radial_gradient, + rcap_unit, + rch_unit, + read_only_write, + rem_function, + rem_unit, + repeating_conic_gradient, + repeating_linear_gradient, + repeating_radial_gradient, + rex_unit, + ric_unit, + rlh_unit, + round_function, + selection, + selectors2, + selectors3, + shadowdomv1, + sidama_list_style_type, + sign_function, + simp_chinese_formal_list_style_type, + simp_chinese_informal_list_style_type, + somali_list_style_type, + space_separated_color_notation, + square_list_style_type, + stretch_size, + string_list_style_type, + symbols_list_style_type, + tamil_list_style_type, + telugu_list_style_type, + text_decoration_thickness_percent, + text_decoration_thickness_shorthand, + thai_list_style_type, + tibetan_list_style_type, + tigre_list_style_type, + tigrinya_er_abegede_list_style_type, + tigrinya_er_list_style_type, + tigrinya_et_abegede_list_style_type, + tigrinya_et_list_style_type, + trad_chinese_formal_list_style_type, + trad_chinese_informal_list_style_type, + upper_alpha_list_style_type, + upper_armenian_list_style_type, + upper_hexadecimal_list_style_type, + upper_latin_list_style_type, + upper_norwegian_list_style_type, + upper_roman_list_style_type, + vb_unit, + vh_unit, + vi_unit, + viewport_percentage_units_dynamic, + viewport_percentage_units_large, + viewport_percentage_units_small, + vmax_unit, + vmin_unit, + vw_unit, + webkit_fill_available_size, + x_resolution_unit, + + pub fn isCompatible(this: Feature, browsers: Browsers) bool { + switch (this) { + .selectors2 => { + if (browsers.ie) |version| { + if (version < 458752) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 786432) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 131072) { + return false; + } + } + if (browsers.chrome) |version| { + if (version < 262144) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 196864) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 589824) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 197120) { + return false; + } + } + if (browsers.android) |version| { + if (version < 131328) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 262144) { + return false; + } + } + }, + .selectors3 => { + if (browsers.ie) |version| { + if (version < 589824) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 786432) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 197888) { + return false; + } + } + if (browsers.chrome) |version| { + if (version < 262144) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 197120) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 591104) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 197120) { + return false; + } + } + if (browsers.android) |version| { + if (version < 131328) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 262144) { + return false; + } + } + }, + .gencontent, .first_line => { + if (browsers.ie) |version| { + if (version < 589824) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 786432) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 131072) { + return false; + } + } + if (browsers.chrome) |version| { + if (version < 262144) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 196864) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 589824) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 197120) { + return false; + } + } + if (browsers.android) |version| { + if (version < 131328) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 262144) { + return false; + } + } + }, + .first_letter => { + if (browsers.ie) |version| { + if (version < 589824) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 786432) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 197888) { + return false; + } + } + if (browsers.chrome) |version| { + if (version < 589824) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 327936) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 722432) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 327680) { + return false; + } + } + if (browsers.android) |version| { + if (version < 196608) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 262144) { + return false; + } + } + }, + .in_out_of_range => { + if (browsers.edge) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 3276800) { + return false; + } + } + if (browsers.chrome) |version| { + if (version < 3473408) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 655616) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 2621440) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 656128) { + return false; + } + } + if (browsers.android) |version| { + if (version < 8323072) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 327680) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .form_validation => { + if (browsers.ie) |version| { + if (version < 655360) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 786432) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 262144) { + return false; + } + } + if (browsers.chrome) |version| { + if (version < 655360) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 655616) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 655360) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 656128) { + return false; + } + } + if (browsers.android) |version| { + if (version < 263171) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 262144) { + return false; + } + } + }, + .any_link => { + if (browsers.edge) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 3276800) { + return false; + } + } + if (browsers.chrome) |version| { + if (version < 4259840) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 589824) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 3407872) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 589824) { + return false; + } + } + if (browsers.android) |version| { + if (version < 8323072) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 590336) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .default_pseudo => { + if (browsers.edge) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 262144) { + return false; + } + } + if (browsers.chrome) |version| { + if (version < 3342336) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 655616) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 2490368) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 656128) { + return false; + } + } + if (browsers.android) |version| { + if (version < 8323072) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 327680) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .dir_selector => { + if (browsers.edge) |version| { + if (version < 7864320) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 3211264) { + return false; + } + } + if (browsers.chrome) |version| { + if (version < 7864320) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 1049600) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 6946816) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 1049600) { + return false; + } + } + if (browsers.android) |version| { + if (version < 8323072) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 1638400) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .focus_within => { + if (browsers.edge) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 3407872) { + return false; + } + } + if (browsers.chrome) |version| { + if (version < 3932160) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 655616) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 3080192) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 656128) { + return false; + } + } + if (browsers.android) |version| { + if (version < 8323072) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 524800) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .focus_visible => { + if (browsers.edge) |version| { + if (version < 5636096) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 5570560) { + return false; + } + } + if (browsers.chrome) |version| { + if (version < 5636096) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 984064) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 4718592) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 984064) { + return false; + } + } + if (browsers.android) |version| { + if (version < 8323072) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 917504) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .indeterminate_pseudo => { + if (browsers.edge) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 3342336) { + return false; + } + } + if (browsers.chrome) |version| { + if (version < 2555904) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 655616) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 1703936) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 656128) { + return false; + } + } + if (browsers.android) |version| { + if (version < 8323072) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 262144) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .is_selector => { + if (browsers.edge) |version| { + if (version < 5767168) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 5111808) { + return false; + } + } + if (browsers.chrome) |version| { + if (version < 5767168) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 917504) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 4915200) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 917504) { + return false; + } + } + if (browsers.android) |version| { + if (version < 8323072) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 983040) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .optional_pseudo => { + if (browsers.ie) |version| { + if (version < 655360) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 786432) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 262144) { + return false; + } + } + if (browsers.chrome) |version| { + if (version < 983040) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 327680) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 983040) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 327680) { + return false; + } + } + if (browsers.android) |version| { + if (version < 131840) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 262144) { + return false; + } + } + }, + .placeholder_shown => { + if (browsers.edge) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 3342336) { + return false; + } + } + if (browsers.chrome) |version| { + if (version < 3080192) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 589824) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 2228224) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 589824) { + return false; + } + } + if (browsers.android) |version| { + if (version < 8323072) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 327680) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .dialog => { + if (browsers.edge) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 6422528) { + return false; + } + } + if (browsers.chrome) |version| { + if (version < 2424832) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 984064) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 1572864) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 984064) { + return false; + } + } + if (browsers.android) |version| { + if (version < 8323072) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 262144) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .fullscreen => { + if (browsers.edge) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 4194304) { + return false; + } + } + if (browsers.chrome) |version| { + if (version < 4653056) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 1049600) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 786688) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 655616) { + return false; + } + } + if (browsers.android != null or browsers.ie != null or browsers.ios_saf != null) { + return false; + } + }, + .marker_pseudo => { + if (browsers.edge) |version| { + if (version < 5636096) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 4456448) { + return false; + } + } + if (browsers.chrome) |version| { + if (version < 5636096) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 721152) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 4718592) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 721664) { + return false; + } + } + if (browsers.android) |version| { + if (version < 8323072) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 917504) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .placeholder => { + if (browsers.edge) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 3342336) { + return false; + } + } + if (browsers.chrome) |version| { + if (version < 3735552) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 655616) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 2883584) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 656128) { + return false; + } + } + if (browsers.android) |version| { + if (version < 8323072) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 459264) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .selection => { + if (browsers.ie) |version| { + if (version < 589824) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 786432) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 4063232) { + return false; + } + } + if (browsers.chrome) |version| { + if (version < 262144) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 196864) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 591104) { + return false; + } + } + if (browsers.android) |version| { + if (version < 263168) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 262144) { + return false; + } + } + if (browsers.ios_saf != null) { + return false; + } + }, + .case_insensitive => { + if (browsers.edge) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 3080192) { + return false; + } + } + if (browsers.chrome) |version| { + if (version < 3211264) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 589824) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 2359296) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 589824) { + return false; + } + } + if (browsers.android) |version| { + if (version < 8323072) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 327680) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .read_only_write => { + if (browsers.edge) |version| { + if (version < 851968) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 5111808) { + return false; + } + } + if (browsers.chrome) |version| { + if (version < 2359296) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 589824) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 1507328) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 589824) { + return false; + } + } + if (browsers.android) |version| { + if (version < 8323072) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 262144) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .autofill => { + if (browsers.chrome) |version| { + if (version < 7208960) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 7208960) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 5636096) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 6291456) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 983040) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 983040) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 1376256) { + return false; + } + } + if (browsers.android) |version| { + if (version < 8323072) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .namespaces => { + if (browsers.ie) |version| { + if (version < 589824) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 786432) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 131072) { + return false; + } + } + if (browsers.chrome) |version| { + if (version < 262144) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 262144) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 589824) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 262656) { + return false; + } + } + if (browsers.android) |version| { + if (version < 131328) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 262144) { + return false; + } + } + }, + .shadowdomv1 => { + if (browsers.edge) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 4128768) { + return false; + } + } + if (browsers.chrome) |version| { + if (version < 3473408) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 655360) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 2621440) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 720896) { + return false; + } + } + if (browsers.android) |version| { + if (version < 8323072) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 393728) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .hex_alpha_colors => { + if (browsers.edge) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 3211264) { + return false; + } + } + if (browsers.chrome) |version| { + if (version < 4063232) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 655360) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 3407872) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 655360) { + return false; + } + } + if (browsers.android) |version| { + if (version < 8323072) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 524800) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .nesting => { + if (browsers.edge) |version| { + if (version < 7864320) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 7667712) { + return false; + } + } + if (browsers.chrome) |version| { + if (version < 7864320) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 1114624) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 6946816) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 1114624) { + return false; + } + } + if (browsers.android) |version| { + if (version < 8323072) { + return false; + } + } + if (browsers.ie != null or browsers.samsung != null) { + return false; + } + }, + .not_selector_list => { + if (browsers.edge) |version| { + if (version < 5767168) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 5505024) { + return false; + } + } + if (browsers.chrome) |version| { + if (version < 5767168) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 589824) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 4915200) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 589824) { + return false; + } + } + if (browsers.android) |version| { + if (version < 8323072) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 983040) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .has_selector => { + if (browsers.edge) |version| { + if (version < 6881280) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 7929856) { + return false; + } + } + if (browsers.chrome) |version| { + if (version < 6881280) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 984064) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 5963776) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 984064) { + return false; + } + } + if (browsers.android) |version| { + if (version < 8323072) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 1310720) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .font_family_system_ui => { + if (browsers.edge) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 6029312) { + return false; + } + } + if (browsers.chrome) |version| { + if (version < 3670016) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 720896) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 2818048) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 720896) { + return false; + } + } + if (browsers.android) |version| { + if (version < 8323072) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 393728) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .extended_system_fonts => { + if (browsers.safari) |version| { + if (version < 852224) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 852992) { + return false; + } + } + if (browsers.android != null or browsers.chrome != null or browsers.edge != null or browsers.firefox != null or browsers.ie != null or browsers.opera != null or browsers.samsung != null) { + return false; + } + }, + .calc_function => { + if (browsers.edge) |version| { + if (version < 786432) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 1048576) { + return false; + } + } + if (browsers.chrome) |version| { + if (version < 1703936) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 393472) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 983040) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 458752) { + return false; + } + } + if (browsers.android) |version| { + if (version < 8323072) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 262144) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .custom_media_queries, .fit_content_function_size, .stretch_size => { + return false; + }, + .double_position_gradients => { + if (browsers.chrome) |version| { + if (version < 4653056) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 4194304) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 3276800) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 786688) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 786944) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 655360) { + return false; + } + } + if (browsers.android) |version| { + if (version < 4653056) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .clamp_function => { + if (browsers.chrome) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 3735552) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 852224) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 852992) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 786432) { + return false; + } + } + if (browsers.android) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .place_self, .place_items => { + if (browsers.chrome) |version| { + if (version < 3866624) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 2949120) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 2818048) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 720896) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 720896) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 458752) { + return false; + } + } + if (browsers.android) |version| { + if (version < 3866624) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .place_content => { + if (browsers.chrome) |version| { + if (version < 3866624) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 2949120) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 2818048) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 589824) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 589824) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 458752) { + return false; + } + } + if (browsers.android) |version| { + if (version < 3866624) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .overflow_shorthand => { + if (browsers.chrome) |version| { + if (version < 4456448) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 3997696) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 3145728) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 852224) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 852992) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 655360) { + return false; + } + } + if (browsers.android) |version| { + if (version < 4456448) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .media_range_syntax => { + if (browsers.chrome) |version| { + if (version < 6815744) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 6815744) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 4128768) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 4653056) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 1049600) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 1049600) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 1310720) { + return false; + } + } + if (browsers.android) |version| { + if (version < 6815744) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .media_interval_syntax => { + if (browsers.chrome) |version| { + if (version < 6815744) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 6815744) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 6684672) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 4653056) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 1049600) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 1049600) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 1310720) { + return false; + } + } + if (browsers.android) |version| { + if (version < 6815744) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .logical_borders => { + if (browsers.chrome) |version| { + if (version < 4521984) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 2686976) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 3145728) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 786688) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 786944) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 655360) { + return false; + } + } + if (browsers.android) |version| { + if (version < 4521984) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .logical_border_shorthand, .logical_margin_shorthand, .logical_padding_shorthand => { + if (browsers.chrome) |version| { + if (version < 5701632) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 5701632) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 4325376) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 4063232) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 917760) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 918784) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 917504) { + return false; + } + } + if (browsers.android) |version| { + if (version < 5701632) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .logical_border_radius => { + if (browsers.chrome) |version| { + if (version < 5832704) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 5832704) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 4325376) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 4128768) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 983040) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 983040) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 983040) { + return false; + } + } + if (browsers.android) |version| { + if (version < 5832704) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .logical_margin, .logical_padding => { + if (browsers.chrome) |version| { + if (version < 4521984) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 2686976) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 3145728) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 786688) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 786944) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 655360) { + return false; + } + } + if (browsers.android) |version| { + if (version < 5701632) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .logical_inset => { + if (browsers.chrome) |version| { + if (version < 5701632) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 5701632) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 4128768) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 4063232) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 917760) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 918784) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 917504) { + return false; + } + } + if (browsers.android) |version| { + if (version < 5701632) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .logical_size => { + if (browsers.chrome) |version| { + if (version < 3735552) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 2686976) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 2818048) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 786688) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 786944) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 327680) { + return false; + } + } + if (browsers.android) |version| { + if (version < 3735552) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .logical_text_align => { + if (browsers.chrome) |version| { + if (version < 1179648) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 262144) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 917504) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 196864) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 131072) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 65536) { + return false; + } + } + if (browsers.android) |version| { + if (version < 2424832) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .lab_colors => { + if (browsers.chrome) |version| { + if (version < 7274496) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 7274496) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 7405568) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 4915200) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 983040) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 983040) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 1441792) { + return false; + } + } + if (browsers.android) |version| { + if (version < 7274496) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .oklab_colors => { + if (browsers.chrome) |version| { + if (version < 7274496) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 7274496) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 7405568) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 4915200) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 984064) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 984064) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 1441792) { + return false; + } + } + if (browsers.android) |version| { + if (version < 7274496) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .color_function => { + if (browsers.chrome) |version| { + if (version < 7274496) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 7274496) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 7405568) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 4915200) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 655616) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 656128) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 1441792) { + return false; + } + } + if (browsers.android) |version| { + if (version < 7274496) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .space_separated_color_notation => { + if (browsers.chrome) |version| { + if (version < 4259840) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 3407872) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 3080192) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 786688) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 786944) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 589824) { + return false; + } + } + if (browsers.android) |version| { + if (version < 4259840) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .text_decoration_thickness_percent => { + if (browsers.chrome) |version| { + if (version < 5701632) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 5701632) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 4063232) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 1115136) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 1115136) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 917504) { + return false; + } + } + if (browsers.android) |version| { + if (version < 5701632) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .text_decoration_thickness_shorthand => { + if (browsers.chrome) |version| { + if (version < 5701632) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 5701632) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 4063232) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 917504) { + return false; + } + } + if (browsers.android) |version| { + if (version < 5701632) { + return false; + } + } + if (browsers.ie != null or browsers.ios_saf != null or browsers.safari != null) { + return false; + } + }, + .cue => { + if (browsers.chrome) |version| { + if (version < 1703936) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 3604480) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 917504) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 458752) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 458752) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 66816) { + return false; + } + } + if (browsers.android) |version| { + if (version < 263168) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .cue_function => { + if (browsers.chrome) |version| { + if (version < 1703936) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 917504) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 458752) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 458752) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 66816) { + return false; + } + } + if (browsers.android) |version| { + if (version < 263168) { + return false; + } + } + if (browsers.firefox != null or browsers.ie != null) { + return false; + } + }, + .any_pseudo => { + if (browsers.chrome) |version| { + if (version < 1179648) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 262144) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 917504) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 327680) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 327680) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 65536) { + return false; + } + } + if (browsers.android) |version| { + if (version < 2424832) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .part_pseudo => { + if (browsers.chrome) |version| { + if (version < 4784128) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 3407872) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 852224) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 852992) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 720896) { + return false; + } + } + if (browsers.android) |version| { + if (version < 4784128) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .image_set => { + if (browsers.chrome) |version| { + if (version < 1638400) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 5767168) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 917504) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 393216) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 393216) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 66816) { + return false; + } + } + if (browsers.android) |version| { + if (version < 263168) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .x_resolution_unit => { + if (browsers.chrome) |version| { + if (version < 4456448) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 4063232) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 3145728) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 655360) { + return false; + } + } + if (browsers.android) |version| { + if (version < 4456448) { + return false; + } + } + if (browsers.ie != null or browsers.ios_saf != null or browsers.safari != null) { + return false; + } + }, + .nth_child_of => { + if (browsers.chrome) |version| { + if (version < 7274496) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 7274496) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 7405568) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 4915200) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 589824) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 589824) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 1441792) { + return false; + } + } + if (browsers.android) |version| { + if (version < 7274496) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .min_function, .max_function => { + if (browsers.chrome) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 3735552) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 721152) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 721664) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 786432) { + return false; + } + } + if (browsers.android) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .round_function, .rem_function, .mod_function => { + if (browsers.chrome) |version| { + if (version < 8192000) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 8192000) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 7733248) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 5439488) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 984064) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 984064) { + return false; + } + } + if (browsers.android) |version| { + if (version < 8192000) { + return false; + } + } + if (browsers.ie != null or browsers.samsung != null) { + return false; + } + }, + .abs_function, .sign_function => { + if (browsers.firefox) |version| { + if (version < 7733248) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 984064) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 984064) { + return false; + } + } + if (browsers.android != null or browsers.chrome != null or browsers.edge != null or browsers.ie != null or browsers.opera != null or browsers.samsung != null) { + return false; + } + }, + .hypot_function => { + if (browsers.chrome) |version| { + if (version < 7864320) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 7864320) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 7733248) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 5242880) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 984064) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 984064) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 1638400) { + return false; + } + } + if (browsers.android) |version| { + if (version < 7864320) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .gradient_interpolation_hints => { + if (browsers.chrome) |version| { + if (version < 2621440) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 2359296) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 1769472) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 458752) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 458752) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 262144) { + return false; + } + } + if (browsers.android) |version| { + if (version < 2621440) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .border_image_repeat_round => { + if (browsers.chrome) |version| { + if (version < 1966080) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 786432) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 983040) { + return false; + } + } + if (browsers.ie) |version| { + if (version < 720896) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 1179648) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 590080) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 590592) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 131072) { + return false; + } + } + if (browsers.android) |version| { + if (version < 263168) { + return false; + } + } + }, + .border_image_repeat_space => { + if (browsers.chrome) |version| { + if (version < 3670016) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 786432) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 3276800) { + return false; + } + } + if (browsers.ie) |version| { + if (version < 720896) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 2818048) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 590080) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 590592) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 393216) { + return false; + } + } + if (browsers.android) |version| { + if (version < 3670016) { + return false; + } + } + }, + .font_size_rem => { + if (browsers.chrome) |version| { + if (version < 2752512) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 786432) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 2031616) { + return false; + } + } + if (browsers.ie) |version| { + if (version < 589824) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 1835008) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 458752) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 458752) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 262144) { + return false; + } + } + if (browsers.android) |version| { + if (version < 2752512) { + return false; + } + } + }, + .font_size_x_x_x_large => { + if (browsers.chrome) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 3735552) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 1049600) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 1049600) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 786432) { + return false; + } + } + if (browsers.android) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .font_style_oblique_angle => { + if (browsers.chrome) |version| { + if (version < 4063232) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 3997696) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 3014656) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 721152) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 721664) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 524288) { + return false; + } + } + if (browsers.android) |version| { + if (version < 4063232) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .font_weight_number => { + if (browsers.chrome) |version| { + if (version < 4063232) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 1114112) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 3997696) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 3014656) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 720896) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 720896) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 524288) { + return false; + } + } + if (browsers.android) |version| { + if (version < 4063232) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .font_stretch_percentage => { + if (browsers.chrome) |version| { + if (version < 4063232) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 1179648) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 3997696) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 3014656) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 721152) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 721664) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 524288) { + return false; + } + } + if (browsers.android) |version| { + if (version < 4063232) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .light_dark => { + if (browsers.chrome) |version| { + if (version < 8060928) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 8060928) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 7864320) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 5373952) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 1115392) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 1115392) { + return false; + } + } + if (browsers.android) |version| { + if (version < 8060928) { + return false; + } + } + if (browsers.ie != null or browsers.samsung != null) { + return false; + } + }, + .accent_system_color => { + if (browsers.firefox) |version| { + if (version < 6750208) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 1049856) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 1049856) { + return false; + } + } + if (browsers.android != null or browsers.chrome != null or browsers.edge != null or browsers.ie != null or browsers.opera != null or browsers.samsung != null) { + return false; + } + }, + .animation_timeline_shorthand => { + if (browsers.chrome) |version| { + if (version < 7536640) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 7536640) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 5046272) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 1507328) { + return false; + } + } + if (browsers.android) |version| { + if (version < 7536640) { + return false; + } + } + if (browsers.firefox != null or browsers.ie != null or browsers.ios_saf != null or browsers.safari != null) { + return false; + } + }, + .q_unit => { + if (browsers.chrome) |version| { + if (version < 4128768) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 3211264) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 3014656) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 852224) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 852992) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 524288) { + return false; + } + } + if (browsers.android) |version| { + if (version < 4128768) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .cap_unit => { + if (browsers.chrome) |version| { + if (version < 7733248) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 7733248) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 6356992) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 1114624) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 1114624) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 1638400) { + return false; + } + } + if (browsers.android) |version| { + if (version < 7733248) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .ch_unit => { + if (browsers.chrome) |version| { + if (version < 1769472) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 786432) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 262144) { + return false; + } + } + if (browsers.ie) |version| { + if (version < 589824) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 983040) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 458752) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 458752) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 66816) { + return false; + } + } + if (browsers.android) |version| { + if (version < 263168) { + return false; + } + } + }, + .container_query_length_units => { + if (browsers.chrome) |version| { + if (version < 6881280) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 6881280) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 7208960) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 4718592) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 1048576) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 1048576) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 1310720) { + return false; + } + } + if (browsers.android) |version| { + if (version < 6881280) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .em_unit => { + if (browsers.chrome) |version| { + if (version < 1179648) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 786432) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 262144) { + return false; + } + } + if (browsers.ie) |version| { + if (version < 196608) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 655616) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 65536) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 65536) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 65536) { + return false; + } + } + if (browsers.android) |version| { + if (version < 65536) { + return false; + } + } + }, + .ex_unit, .circle_list_style_type, .decimal_list_style_type, .disc_list_style_type, .square_list_style_type => { + if (browsers.chrome) |version| { + if (version < 1179648) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 786432) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 262144) { + return false; + } + } + if (browsers.ie) |version| { + if (version < 262144) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 655616) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 65536) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 65536) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 65536) { + return false; + } + } + if (browsers.android) |version| { + if (version < 263168) { + return false; + } + } + }, + .ic_unit => { + if (browsers.chrome) |version| { + if (version < 6946816) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 6946816) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 6356992) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 4718592) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 984064) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 984064) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 1310720) { + return false; + } + } + if (browsers.android) |version| { + if (version < 6946816) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .lh_unit => { + if (browsers.chrome) |version| { + if (version < 7143424) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 7143424) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 7864320) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 4849664) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 1049600) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 1049600) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 1376256) { + return false; + } + } + if (browsers.android) |version| { + if (version < 7143424) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .rcap_unit => { + if (browsers.chrome) |version| { + if (version < 7733248) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 7733248) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 1114624) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 1114624) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 1638400) { + return false; + } + } + if (browsers.android) |version| { + if (version < 7733248) { + return false; + } + } + if (browsers.firefox != null or browsers.ie != null) { + return false; + } + }, + .rch_unit, .rex_unit, .ric_unit => { + if (browsers.chrome) |version| { + if (version < 7274496) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 7274496) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 4915200) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 1114624) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 1114624) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 1441792) { + return false; + } + } + if (browsers.android) |version| { + if (version < 7274496) { + return false; + } + } + if (browsers.firefox != null or browsers.ie != null) { + return false; + } + }, + .rem_unit => { + if (browsers.chrome) |version| { + if (version < 1179648) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 786432) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 262144) { + return false; + } + } + if (browsers.ie) |version| { + if (version < 589824) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 786432) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 327680) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 262144) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 65536) { + return false; + } + } + if (browsers.android) |version| { + if (version < 131072) { + return false; + } + } + }, + .rlh_unit => { + if (browsers.chrome) |version| { + if (version < 7274496) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 7274496) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 7864320) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 4915200) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 1049600) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 1049600) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 1441792) { + return false; + } + } + if (browsers.android) |version| { + if (version < 7274496) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .vb_unit, .vi_unit, .viewport_percentage_units_dynamic, .viewport_percentage_units_large, .viewport_percentage_units_small => { + if (browsers.chrome) |version| { + if (version < 7077888) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 7077888) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 6619136) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 4784128) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 984064) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 984064) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 1376256) { + return false; + } + } + if (browsers.android) |version| { + if (version < 7077888) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .vh_unit, .vw_unit => { + if (browsers.chrome) |version| { + if (version < 1638400) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 786432) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 1245184) { + return false; + } + } + if (browsers.ie) |version| { + if (version < 589824) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 917504) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 393216) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 393216) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 66816) { + return false; + } + } + if (browsers.android) |version| { + if (version < 263168) { + return false; + } + } + }, + .vmax_unit => { + if (browsers.chrome) |version| { + if (version < 1703936) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 1048576) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 1245184) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 917504) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 458752) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 458752) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 66816) { + return false; + } + } + if (browsers.android) |version| { + if (version < 66816) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .vmin_unit => { + if (browsers.chrome) |version| { + if (version < 1703936) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 786432) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 1245184) { + return false; + } + } + if (browsers.ie) |version| { + if (version < 655360) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 917504) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 458752) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 458752) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 66816) { + return false; + } + } + if (browsers.android) |version| { + if (version < 263168) { + return false; + } + } + }, + .conic_gradient, .repeating_conic_gradient => { + if (browsers.chrome) |version| { + if (version < 4521984) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 5439488) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 3145728) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 786688) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 786944) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 655360) { + return false; + } + } + if (browsers.android) |version| { + if (version < 4521984) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .linear_gradient, .repeating_linear_gradient => { + if (browsers.chrome) |version| { + if (version < 1179648) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 786432) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 262144) { + return false; + } + } + if (browsers.ie) |version| { + if (version < 655360) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 720896) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 327936) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 327680) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 65536) { + return false; + } + } + if (browsers.android) |version| { + if (version < 2424832) { + return false; + } + } + }, + .radial_gradient => { + if (browsers.chrome) |version| { + if (version < 1179648) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 786432) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 262144) { + return false; + } + } + if (browsers.ie) |version| { + if (version < 655360) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 786432) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 327936) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 327680) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 65536) { + return false; + } + } + if (browsers.android) |version| { + if (version < 2424832) { + return false; + } + } + }, + .repeating_radial_gradient => { + if (browsers.chrome) |version| { + if (version < 1179648) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 786432) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 655360) { + return false; + } + } + if (browsers.ie) |version| { + if (version < 655360) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 786432) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 327936) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 327680) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 65536) { + return false; + } + } + if (browsers.android) |version| { + if (version < 263168) { + return false; + } + } + }, + .afar_list_style_type, .amharic_list_style_type, .amharic_abegede_list_style_type, .ethiopic_list_style_type, .ethiopic_abegede_list_style_type, .ethiopic_abegede_am_et_list_style_type, .ethiopic_abegede_gez_list_style_type, .ethiopic_abegede_ti_er_list_style_type, .ethiopic_abegede_ti_et_list_style_type, .ethiopic_halehame_aa_er_list_style_type, .ethiopic_halehame_aa_et_list_style_type, .ethiopic_halehame_am_et_list_style_type, .ethiopic_halehame_gez_list_style_type, .ethiopic_halehame_om_et_list_style_type, .ethiopic_halehame_sid_et_list_style_type, .ethiopic_halehame_so_et_list_style_type, .ethiopic_halehame_tig_list_style_type, .lower_hexadecimal_list_style_type, .lower_norwegian_list_style_type, .upper_hexadecimal_list_style_type, .upper_norwegian_list_style_type => { + if (browsers.chrome) |version| { + if (version < 1179648) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 5963776) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 917504) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 327680) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 262656) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 65536) { + return false; + } + } + if (browsers.android) |version| { + if (version < 196608) { + return false; + } + } + if (browsers.firefox != null or browsers.ie != null) { + return false; + } + }, + .arabic_indic_list_style_type, .bengali_list_style_type, .cjk_earthly_branch_list_style_type, .cjk_heavenly_stem_list_style_type, .devanagari_list_style_type, .gujarati_list_style_type, .gurmukhi_list_style_type, .kannada_list_style_type, .khmer_list_style_type, .lao_list_style_type, .malayalam_list_style_type, .myanmar_list_style_type, .oriya_list_style_type, .persian_list_style_type, .telugu_list_style_type, .thai_list_style_type => { + if (browsers.chrome) |version| { + if (version < 1179648) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 262144) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 917504) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 327680) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 262656) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 65536) { + return false; + } + } + if (browsers.android) |version| { + if (version < 263168) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .armenian_list_style_type, .decimal_leading_zero_list_style_type, .georgian_list_style_type, .lower_alpha_list_style_type, .lower_greek_list_style_type, .lower_roman_list_style_type, .upper_alpha_list_style_type, .upper_latin_list_style_type, .upper_roman_list_style_type => { + if (browsers.chrome) |version| { + if (version < 1179648) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 786432) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 262144) { + return false; + } + } + if (browsers.ie) |version| { + if (version < 524288) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 655616) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 65536) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 65536) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 65536) { + return false; + } + } + if (browsers.android) |version| { + if (version < 263168) { + return false; + } + } + }, + .asterisks_list_style_type, .footnotes_list_style_type => { + if (browsers.chrome) |version| { + if (version < 1179648) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 5963776) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 917504) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 327936) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 327680) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 65536) { + return false; + } + } + if (browsers.android) |version| { + if (version < 263168) { + return false; + } + } + if (browsers.firefox != null or browsers.ie != null) { + return false; + } + }, + .binary_list_style_type, .octal_list_style_type, .oromo_list_style_type, .sidama_list_style_type, .somali_list_style_type, .tigre_list_style_type, .tigrinya_er_list_style_type, .tigrinya_er_abegede_list_style_type, .tigrinya_et_list_style_type, .tigrinya_et_abegede_list_style_type => { + if (browsers.chrome) |version| { + if (version < 1179648) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 5963776) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 917504) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 327680) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 262656) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 65536) { + return false; + } + } + if (browsers.android) |version| { + if (version < 263168) { + return false; + } + } + if (browsers.firefox != null or browsers.ie != null) { + return false; + } + }, + .cambodian_list_style_type, .mongolian_list_style_type, .tibetan_list_style_type => { + if (browsers.chrome) |version| { + if (version < 1179648) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 2162688) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 917504) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 327680) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 262656) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 65536) { + return false; + } + } + if (browsers.android) |version| { + if (version < 263168) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .cjk_decimal_list_style_type => { + if (browsers.chrome) |version| { + if (version < 5963776) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 5963776) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 1835008) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 4194304) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 983040) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 983040) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 1048576) { + return false; + } + } + if (browsers.android) |version| { + if (version < 5963776) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .disclosure_closed_list_style_type, .disclosure_open_list_style_type => { + if (browsers.chrome) |version| { + if (version < 5832704) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 5832704) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 2162688) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 4128768) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 983040) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 983040) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 983040) { + return false; + } + } + if (browsers.android) |version| { + if (version < 5832704) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .ethiopic_numeric_list_style_type, .japanese_formal_list_style_type, .japanese_informal_list_style_type, .tamil_list_style_type => { + if (browsers.chrome) |version| { + if (version < 5963776) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 5963776) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 262144) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 4194304) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 983040) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 983040) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 1048576) { + return false; + } + } + if (browsers.android) |version| { + if (version < 5963776) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .hebrew_list_style_type, .hiragana_list_style_type, .hiragana_iroha_list_style_type, .katakana_list_style_type, .katakana_iroha_list_style_type, .auto_size => { + if (browsers.chrome) |version| { + if (version < 1179648) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 786432) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 262144) { + return false; + } + } + if (browsers.ie) |version| { + if (version < 720896) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 917504) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 65536) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 65536) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 65536) { + return false; + } + } + if (browsers.android) |version| { + if (version < 263168) { + return false; + } + } + }, + .korean_hangul_formal_list_style_type, .korean_hanja_formal_list_style_type, .korean_hanja_informal_list_style_type => { + if (browsers.chrome) |version| { + if (version < 2949120) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 1835008) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 2097152) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 983040) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 983040) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 327680) { + return false; + } + } + if (browsers.android) |version| { + if (version < 2949120) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .lower_armenian_list_style_type, .upper_armenian_list_style_type => { + if (browsers.chrome) |version| { + if (version < 1179648) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 2162688) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 917504) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 327936) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 327680) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 65536) { + return false; + } + } + if (browsers.android) |version| { + if (version < 263168) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .lower_latin_list_style_type => { + if (browsers.chrome) |version| { + if (version < 1179648) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 786432) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 262144) { + return false; + } + } + if (browsers.ie) |version| { + if (version < 524288) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 655616) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 65536) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 65536) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 327680) { + return false; + } + } + if (browsers.android) |version| { + if (version < 263168) { + return false; + } + } + }, + .none_list_style_type => { + if (browsers.chrome) |version| { + if (version < 1179648) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 786432) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.ie) |version| { + if (version < 720896) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 917504) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 65536) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 65536) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 65536) { + return false; + } + } + if (browsers.android) |version| { + if (version < 263168) { + return false; + } + } + }, + .simp_chinese_formal_list_style_type, .simp_chinese_informal_list_style_type, .trad_chinese_formal_list_style_type, .trad_chinese_informal_list_style_type => { + if (browsers.chrome) |version| { + if (version < 2949120) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 262144) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 2097152) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 983040) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 983040) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 327680) { + return false; + } + } + if (browsers.android) |version| { + if (version < 2949120) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .string_list_style_type => { + if (browsers.chrome) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 2555904) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 3735552) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 917760) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 918784) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 786432) { + return false; + } + } + if (browsers.android) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .symbols_list_style_type => { + if (browsers.firefox) |version| { + if (version < 2293760) { + return false; + } + } + if (browsers.android != null or browsers.chrome != null or browsers.edge != null or browsers.ie != null or browsers.ios_saf != null or browsers.opera != null or browsers.safari != null or browsers.samsung != null) { + return false; + } + }, + .anchor_size_size => { + if (browsers.chrome) |version| { + if (version < 8192000) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 8192000) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 5439488) { + return false; + } + } + if (browsers.android) |version| { + if (version < 8192000) { + return false; + } + } + if (browsers.firefox != null or browsers.ie != null or browsers.ios_saf != null or browsers.safari != null or browsers.samsung != null) { + return false; + } + }, + .fit_content_size => { + if (browsers.chrome) |version| { + if (version < 1638400) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 262144) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 917504) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 458752) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 458752) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 66816) { + return false; + } + } + if (browsers.android) |version| { + if (version < 263168) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .is_animatable_size => { + if (browsers.chrome) |version| { + if (version < 1703936) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 786432) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 1048576) { + return false; + } + } + if (browsers.ie) |version| { + if (version < 720896) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 917504) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 458752) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 458752) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 66816) { + return false; + } + } + if (browsers.android) |version| { + if (version < 263168) { + return false; + } + } + }, + .max_content_size => { + if (browsers.chrome) |version| { + if (version < 1638400) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 262144) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 2818048) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 720896) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 720896) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 66816) { + return false; + } + } + if (browsers.android) |version| { + if (version < 263168) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .min_content_size => { + if (browsers.chrome) |version| { + if (version < 3014656) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.firefox) |version| { + if (version < 262144) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 2162688) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 720896) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 720896) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 327680) { + return false; + } + } + if (browsers.android) |version| { + if (version < 3014656) { + return false; + } + } + if (browsers.ie != null) { + return false; + } + }, + .webkit_fill_available_size => { + if (browsers.chrome) |version| { + if (version < 1638400) { + return false; + } + } + if (browsers.edge) |version| { + if (version < 5177344) { + return false; + } + } + if (browsers.opera) |version| { + if (version < 917504) { + return false; + } + } + if (browsers.safari) |version| { + if (version < 458752) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 458752) { + return false; + } + } + if (browsers.samsung) |version| { + if (version < 327680) { + return false; + } + } + if (browsers.android) |version| { + if (version < 263168) { + return false; + } + } + if (browsers.firefox != null or browsers.ie != null) { + return false; + } + }, + .moz_available_size => { + if (browsers.firefox) |version| { + if (version < 262144) { + return false; + } + } + if (browsers.android != null or browsers.chrome != null or browsers.edge != null or browsers.ie != null or browsers.ios_saf != null or browsers.opera != null or browsers.safari != null or browsers.samsung != null) { + return false; + } + }, + .p3_colors, .lang_selector_list => { + if (browsers.safari) |version| { + if (version < 655616) { + return false; + } + } + if (browsers.ios_saf) |version| { + if (version < 656128) { + return false; + } + } + if (browsers.android != null or browsers.chrome != null or browsers.edge != null or browsers.firefox != null or browsers.ie != null or browsers.opera != null or browsers.samsung != null) { + return false; + } + }, + } + return true; + } + + pub fn isPartiallyCompatible(this: *const Feature, targets: Browsers) bool { + var browsers = Browsers{}; + if (targets.android != null) { + browsers.android = targets.android; + if (this.isCompatible(browsers)) { + return true; + } + browsers.android = null; + } + if (targets.chrome != null) { + browsers.chrome = targets.chrome; + if (this.isCompatible(browsers)) { + return true; + } + browsers.chrome = null; + } + if (targets.edge != null) { + browsers.edge = targets.edge; + if (this.isCompatible(browsers)) { + return true; + } + browsers.edge = null; + } + if (targets.firefox != null) { + browsers.firefox = targets.firefox; + if (this.isCompatible(browsers)) { + return true; + } + browsers.firefox = null; + } + if (targets.ie != null) { + browsers.ie = targets.ie; + if (this.isCompatible(browsers)) { + return true; + } + browsers.ie = null; + } + if (targets.ios_saf != null) { + browsers.ios_saf = targets.ios_saf; + if (this.isCompatible(browsers)) { + return true; + } + browsers.ios_saf = null; + } + if (targets.opera != null) { + browsers.opera = targets.opera; + if (this.isCompatible(browsers)) { + return true; + } + browsers.opera = null; + } + if (targets.safari != null) { + browsers.safari = targets.safari; + if (this.isCompatible(browsers)) { + return true; + } + browsers.safari = null; + } + if (targets.samsung != null) { + browsers.samsung = targets.samsung; + if (this.isCompatible(browsers)) { + return true; + } + browsers.samsung = null; + } + + return false; + } +}; diff --git a/src/css/context.zig b/src/css/context.zig new file mode 100644 index 0000000000..db6b3964a2 --- /dev/null +++ b/src/css/context.zig @@ -0,0 +1,307 @@ +const std = @import("std"); +const Allocator = std.mem.Allocator; +const bun = @import("root").bun; +const logger = bun.logger; +const Log = logger.Log; + +pub const css = @import("./css_parser.zig"); + +const ArrayList = std.ArrayListUnmanaged; + +const MediaRule = css.css_rules.media.MediaRule; +const MediaQuery = css.media_query.MediaQuery; +const MediaCondition = css.media_query.MediaCondition; +const MediaList = css.media_query.MediaList; +const MediaFeature = css.media_query.MediaFeature; +const MediaFeatureName = css.media_query.MediaFeatureName; +const MediaFeatureValue = css.media_query.MediaFeatureValue; +const MediaFeatureId = css.media_query.MediaFeatureId; + +const UnparsedProperty = css.css_properties.custom.UnparsedProperty; + +pub const SupportsEntry = struct { + condition: css.SupportsCondition, + declarations: ArrayList(css.Property), + important_declarations: ArrayList(css.Property), + + pub fn deinit(this: *@This(), allocator: std.mem.Allocator) void { + _ = this; // autofix + _ = allocator; // autofix + @panic(css.todo_stuff.depth); + } +}; + +pub const DeclarationContext = enum { + none, + style_rule, + keyframes, + style_attribute, +}; + +pub const PropertyHandlerContext = struct { + allocator: Allocator, + targets: css.targets.Targets, + is_important: bool, + supports: ArrayList(SupportsEntry), + ltr: ArrayList(css.Property), + rtl: ArrayList(css.Property), + dark: ArrayList(css.Property), + context: DeclarationContext, + unused_symbols: *const std.StringArrayHashMapUnmanaged(void), + + pub fn new( + allocator: Allocator, + targets: css.targets.Targets, + unused_symbols: *const std.StringArrayHashMapUnmanaged(void), + ) PropertyHandlerContext { + return PropertyHandlerContext{ + .allocator = allocator, + .targets = targets, + .is_important = false, + .supports = ArrayList(SupportsEntry){}, + .ltr = ArrayList(css.Property){}, + .rtl = ArrayList(css.Property){}, + .dark = ArrayList(css.Property){}, + .context = DeclarationContext.none, + .unused_symbols = unused_symbols, + }; + } + + pub fn child(this: *const PropertyHandlerContext, context: DeclarationContext) PropertyHandlerContext { + return PropertyHandlerContext{ + .allocator = this.allocator, + .targets = this.targets, + .is_important = false, + .supports = .{}, + .ltr = .{}, + .rtl = .{}, + .dark = .{}, + .context = context, + .unused_symbols = this.unused_symbols, + }; + } + + pub fn addLogicalRule(this: *@This(), allocator: Allocator, ltr: css.Property, rtl: css.Property) void { + this.ltr.append(allocator, ltr) catch unreachable; + this.rtl.append(allocator, rtl) catch unreachable; + } + + pub fn shouldCompileLogical(this: *const @This(), feature: css.compat.Feature) bool { + // Don't convert logical properties in style attributes because + // our fallbacks rely on extra rules to define --ltr and --rtl. + if (this.context == DeclarationContext.style_attribute) return false; + + return this.targets.shouldCompileLogical(feature); + } + + pub fn getSupportsRules( + this: *const @This(), + comptime T: type, + style_rule: *const css.StyleRule(T), + ) ArrayList(css.CssRule(T)) { + if (this.supports.items.len == 0) { + return .{}; + } + + var dest = ArrayList(css.CssRule(T)).initCapacity( + this.allocator, + this.supports.items.len, + ) catch bun.outOfMemory(); + + for (this.supports.items) |*entry| { + dest.appendAssumeCapacity(css.CssRule(T){ + .supports = css.SupportsRule(T){ + .condition = entry.condition.deepClone(this.allocator), + .rules = css.CssRuleList(T){ + .v = v: { + var v = ArrayList(css.CssRule(T)).initCapacity(this.allocator, 1) catch bun.outOfMemory(); + + v.appendAssumeCapacity(.{ .style = css.StyleRule(T){ + .selectors = style_rule.selectors.deepClone(this.allocator), + .vendor_prefix = css.VendorPrefix{ .none = true }, + .declarations = css.DeclarationBlock{ + .declarations = css.deepClone(css.Property, this.allocator, &entry.declarations), + .important_declarations = css.deepClone(css.Property, this.allocator, &entry.important_declarations), + }, + .rules = css.CssRuleList(T){}, + .loc = style_rule.loc, + } }); + + break :v v; + }, + }, + .loc = style_rule.loc, + }, + }); + } + + return dest; + } + + pub fn getAdditionalRules( + this: *const @This(), + comptime T: type, + style_rule: *const css.StyleRule(T), + ) ArrayList(css.CssRule(T)) { + // TODO: :dir/:lang raises the specificity of the selector. Use :where to lower it? + var dest = ArrayList(css.CssRule(T)){}; + + if (this.ltr.items.len > 0) { + getAdditionalRulesHelper(this, T, "ltr", "ltr", style_rule, &dest); + } + + if (this.rtl.items.len > 0) { + getAdditionalRulesHelper(this, T, "rtl", "rtl", style_rule, &dest); + } + + if (this.dark.items.len > 0) { + dest.append(this.allocator, css.CssRule(T){ + .media = MediaRule(T){ + .query = MediaList{ + .media_queries = brk: { + var list = ArrayList(MediaQuery).initCapacity( + this.allocator, + 1, + ) catch bun.outOfMemory(); + + list.appendAssumeCapacity(MediaQuery{ + .qualifier = null, + .media_type = .all, + .condition = MediaCondition{ + .feature = MediaFeature{ + .plain = .{ + .name = .{ .standard = MediaFeatureId.@"prefers-color-scheme" }, + .value = .{ .ident = .{ .v = "dark " } }, + }, + }, + }, + }); + + break :brk list; + }, + }, + .rules = brk: { + var list: css.CssRuleList(T) = .{}; + + list.v.append(this.allocator, css.CssRule(T){ + .style = css.StyleRule(T){ + .selectors = style_rule.selectors.deepClone(this.allocator), + .vendor_prefix = css.VendorPrefix{ .none = true }, + .declarations = css.DeclarationBlock{ + .declarations = css.deepClone(css.Property, this.allocator, &this.dark), + .important_declarations = .{}, + }, + .rules = .{}, + .loc = style_rule.loc, + }, + }) catch bun.outOfMemory(); + + break :brk list; + }, + .loc = style_rule.loc, + }, + }) catch bun.outOfMemory(); + } + + return dest; + } + pub fn getAdditionalRulesHelper( + this: *const @This(), + comptime T: type, + comptime dir: []const u8, + comptime decls: []const u8, + sty: *const css.StyleRule(T), + dest: *ArrayList(css.CssRule(T)), + ) void { + var selectors = sty.selectors.deepClone(this.allocator); + for (selectors.v.slice_mut()) |*selector| { + selector.append(this.allocator, css.Component{ + .non_ts_pseudo_class = css.PseudoClass{ + .dir = .{ .direction = @field(css.selector.parser.Direction, dir) }, + }, + }); + } + + const rule = css.StyleRule(T){ + .selectors = selectors, + .vendor_prefix = css.VendorPrefix{ .none = true }, + .declarations = css.DeclarationBlock{ + .declarations = css.deepClone(css.Property, this.allocator, &@field(this, decls)), + .important_declarations = .{}, + }, + .rules = .{}, + .loc = sty.loc, + }; + + dest.append(this.allocator, .{ .style = rule }) catch bun.outOfMemory(); + } + + pub fn reset(this: *@This()) void { + for (this.supports.items) |*supp| { + supp.deinit(this.allocator); + } + this.supports.clearRetainingCapacity(); + + for (this.ltr.items) |*ltr| { + ltr.deinit(this.allocator); + } + this.ltr.clearRetainingCapacity(); + + for (this.rtl.items) |*rtl| { + rtl.deinit(this.allocator); + } + this.rtl.clearRetainingCapacity(); + + for (this.dark.items) |*dark| { + dark.deinit(this.allocator); + } + this.dark.clearRetainingCapacity(); + } + + pub fn addConditionalProperty(this: *@This(), condition: css.SupportsCondition, property: css.Property) void { + if (this.context != DeclarationContext.style_rule) return; + + if (brk: { + for (this.supports.items) |*supp| { + if (condition.eql(&supp.condition)) break :brk supp; + } + break :brk null; + }) |entry| { + if (this.is_important) { + entry.important_declarations.append(this.allocator, property) catch bun.outOfMemory(); + } else { + entry.declarations.append(this.allocator, property) catch bun.outOfMemory(); + } + } else { + var important_declarations = ArrayList(css.Property){}; + var declarations = ArrayList(css.Property){}; + if (this.is_important) { + important_declarations.append(this.allocator, property) catch bun.outOfMemory(); + } else { + declarations.append(this.allocator, property) catch bun.outOfMemory(); + } + this.supports.append(this.allocator, SupportsEntry{ + .condition = condition, + .declarations = declarations, + .important_declarations = important_declarations, + }) catch bun.outOfMemory(); + } + } + + pub fn addUnparsedFallbacks(this: *@This(), unparsed: *UnparsedProperty) void { + if (this.context != DeclarationContext.style_rule and this.context != DeclarationContext.style_attribute) { + return; + } + + const fallbacks = unparsed.value.getFallbacks(this.allocator, this.targets); + + for (fallbacks.slice()) |condition_and_fallback| { + this.addConditionalProperty(condition_and_fallback[0], css.Property{ + .unparsed = UnparsedProperty{ + .property_id = unparsed.property_id.deepClone(this.allocator), + .value = condition_and_fallback[1], + }, + }); + } + } +}; diff --git a/src/css/css_internals.zig b/src/css/css_internals.zig new file mode 100644 index 0000000000..578a6dfa99 --- /dev/null +++ b/src/css/css_internals.zig @@ -0,0 +1,277 @@ +const bun = @import("root").bun; +const std = @import("std"); +const builtin = @import("builtin"); +const Arena = @import("../mimalloc_arena.zig").Arena; +const Allocator = std.mem.Allocator; +const ArrayList = std.ArrayList; +const JSC = bun.JSC; +const JSValue = bun.JSC.JSValue; +const JSPromise = bun.JSC.JSPromise; +const JSGlobalObject = bun.JSC.JSGlobalObject; + +threadlocal var arena_: ?Arena = null; + +const TestKind = enum { + normal, + minify, + prefix, +}; + +pub fn minifyTestWithOptions(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { + return testingImpl(globalThis, callframe, .minify); +} + +pub fn prefixTestWithOptions(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { + return testingImpl(globalThis, callframe, .prefix); +} + +pub fn testWithOptions(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { + return testingImpl(globalThis, callframe, .normal); +} + +pub fn testingImpl(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame, comptime test_kind: TestKind) JSC.JSValue { + var arena = arena_ orelse brk: { + break :brk Arena.init() catch @panic("oopsie arena no good"); + }; + defer arena.reset(); + const alloc = arena.allocator(); + + const arguments_ = callframe.arguments(3); + var arguments = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments_.slice()); + const source_arg: JSC.JSValue = arguments.nextEat() orelse { + globalThis.throw("minifyTestWithOptions: expected 2 arguments, got 0", .{}); + return .undefined; + }; + if (!source_arg.isString()) { + globalThis.throw("minifyTestWithOptions: expected source to be a string", .{}); + return .undefined; + } + const source_bunstr = source_arg.toBunString(globalThis); + defer source_bunstr.deref(); + const source = source_bunstr.toUTF8(bun.default_allocator); + defer source.deinit(); + + const expected_arg = arguments.nextEat() orelse { + globalThis.throw("minifyTestWithOptions: expected 2 arguments, got 1", .{}); + return .undefined; + }; + if (!expected_arg.isString()) { + globalThis.throw("minifyTestWithOptions: expected `expected` arg to be a string", .{}); + return .undefined; + } + const expected_bunstr = expected_arg.toBunString(globalThis); + defer expected_bunstr.deref(); + const expected = expected_bunstr.toUTF8(bun.default_allocator); + defer expected.deinit(); + + const options_arg = arguments.nextEat(); + + var log = bun.logger.Log.init(alloc); + defer log.deinit(); + + const parser_options = parser_options: { + const opts = bun.css.ParserOptions.default(alloc, &log); + // if (test_kind == .prefix) break :parser_options opts; + + if (options_arg) |optargs| { + if (optargs.isObject()) { + // minify_options.targets.browsers = targetsFromJS(globalThis, optarg); + } + } + + break :parser_options opts; + }; + + var import_records = bun.BabyList(bun.ImportRecord){}; + switch (bun.css.StyleSheet(bun.css.DefaultAtRule).parse( + alloc, + source.slice(), + parser_options, + &import_records, + )) { + .result => |stylesheet_| { + var stylesheet = stylesheet_; + var minify_options: bun.css.MinifyOptions = bun.css.MinifyOptions.default(); + if (options_arg) |optarg| { + if (optarg.isObject()) { + minify_options.targets.browsers = targetsFromJS(globalThis, optarg); + } + } + _ = stylesheet.minify(alloc, minify_options).assert(); + + const result = stylesheet.toCss(alloc, bun.css.PrinterOptions{ + .minify = switch (test_kind) { + .minify => true, + .normal => false, + .prefix => false, + }, + .targets = .{ + .browsers = minify_options.targets.browsers, + }, + }, &import_records) catch |e| { + bun.handleErrorReturnTrace(e, @errorReturnTrace()); + return .undefined; + }; + + return bun.String.fromBytes(result.code).toJS(globalThis); + }, + .err => |err| { + if (log.hasAny()) { + return log.toJS(globalThis, bun.default_allocator, "parsing failed:"); + } + globalThis.throw("parsing failed: {}", .{err.kind}); + return .undefined; + }, + } +} + +fn targetsFromJS(globalThis: *JSC.JSGlobalObject, jsobj: JSValue) bun.css.targets.Browsers { + var targets = bun.css.targets.Browsers{}; + + if (jsobj.getTruthy(globalThis, "android")) |val| { + if (val.isInt32()) { + if (val.getNumber()) |value| { + targets.android = @intFromFloat(value); + } + } + } + if (jsobj.getTruthy(globalThis, "chrome")) |val| { + if (val.isInt32()) { + if (val.getNumber()) |value| { + targets.chrome = @intFromFloat(value); + } + } + } + if (jsobj.getTruthy(globalThis, "edge")) |val| { + if (val.isInt32()) { + if (val.getNumber()) |value| { + targets.edge = @intFromFloat(value); + } + } + } + if (jsobj.getTruthy(globalThis, "firefox")) |val| { + if (val.isInt32()) { + if (val.getNumber()) |value| { + targets.firefox = @intFromFloat(value); + } + } + } + if (jsobj.getTruthy(globalThis, "ie")) |val| { + if (val.isInt32()) { + if (val.getNumber()) |value| { + targets.ie = @intFromFloat(value); + } + } + } + if (jsobj.getTruthy(globalThis, "ios_saf")) |val| { + if (val.isInt32()) { + if (val.getNumber()) |value| { + targets.ios_saf = @intFromFloat(value); + } + } + } + if (jsobj.getTruthy(globalThis, "opera")) |val| { + if (val.isInt32()) { + if (val.getNumber()) |value| { + targets.opera = @intFromFloat(value); + } + } + } + if (jsobj.getTruthy(globalThis, "safari")) |val| { + if (val.isInt32()) { + if (val.getNumber()) |value| { + targets.safari = @intFromFloat(value); + } + } + } + if (jsobj.getTruthy(globalThis, "samsung")) |val| { + if (val.isInt32()) { + if (val.getNumber()) |value| { + targets.samsung = @intFromFloat(value); + } + } + } + + return targets; +} + +pub fn attrTest(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { + var arena = arena_ orelse brk: { + break :brk Arena.init() catch @panic("oopsie arena no good"); + }; + defer arena.reset(); + const alloc = arena.allocator(); + + const arguments_ = callframe.arguments(4); + var arguments = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments_.slice()); + const source_arg: JSC.JSValue = arguments.nextEat() orelse { + globalThis.throw("attrTest: expected 3 arguments, got 0", .{}); + return .undefined; + }; + if (!source_arg.isString()) { + globalThis.throw("attrTest: expected source to be a string", .{}); + return .undefined; + } + const source_bunstr = source_arg.toBunString(globalThis); + defer source_bunstr.deref(); + const source = source_bunstr.toUTF8(bun.default_allocator); + defer source.deinit(); + + const expected_arg = arguments.nextEat() orelse { + globalThis.throw("attrTest: expected 3 arguments, got 1", .{}); + return .undefined; + }; + if (!expected_arg.isString()) { + globalThis.throw("attrTest: expected `expected` arg to be a string", .{}); + return .undefined; + } + const expected_bunstr = expected_arg.toBunString(globalThis); + defer expected_bunstr.deref(); + const expected = expected_bunstr.toUTF8(bun.default_allocator); + defer expected.deinit(); + + const minify_arg: JSC.JSValue = arguments.nextEat() orelse { + globalThis.throw("attrTest: expected 3 arguments, got 2", .{}); + return .undefined; + }; + const minify = minify_arg.isBoolean() and minify_arg.toBoolean(); + + var targets: bun.css.targets.Targets = .{}; + if (arguments.nextEat()) |arg| { + if (arg.isObject()) { + targets.browsers = targetsFromJS(globalThis, arg); + } + } + + var log = bun.logger.Log.init(alloc); + defer log.deinit(); + + const parser_options = bun.css.ParserOptions.default(alloc, &log); + + var import_records = bun.BabyList(bun.ImportRecord){}; + switch (bun.css.StyleAttribute.parse(alloc, source.slice(), parser_options, &import_records)) { + .result => |stylesheet_| { + var stylesheet = stylesheet_; + var minify_options: bun.css.MinifyOptions = bun.css.MinifyOptions.default(); + minify_options.targets = targets; + stylesheet.minify(alloc, minify_options); + + const result = stylesheet.toCss(alloc, bun.css.PrinterOptions{ + .minify = minify, + .targets = targets, + }, &import_records) catch |e| { + bun.handleErrorReturnTrace(e, @errorReturnTrace()); + return .undefined; + }; + + return bun.String.fromBytes(result.code).toJS(globalThis); + }, + .err => |err| { + if (log.hasAny()) { + return log.toJS(globalThis, bun.default_allocator, "parsing failed:"); + } + globalThis.throw("parsing failed: {}", .{err.kind}); + return .undefined; + }, + } +} diff --git a/src/css/css_modules.zig b/src/css/css_modules.zig new file mode 100644 index 0000000000..14767a4a8c --- /dev/null +++ b/src/css/css_modules.zig @@ -0,0 +1,406 @@ +const std = @import("std"); +const Allocator = std.mem.Allocator; +const bun = @import("root").bun; +const logger = bun.logger; +const Log = logger.Log; + +pub const css = @import("./css_parser.zig"); +pub const css_values = @import("./values/values.zig"); +const DashedIdent = css_values.ident.DashedIdent; +const Ident = css_values.ident.Ident; +pub const Error = css.Error; +const PrintErr = css.PrintErr; + +const ArrayList = std.ArrayListUnmanaged; + +pub const CssModule = struct { + config: *const Config, + sources: *const ArrayList([]const u8), + hashes: ArrayList([]const u8), + exports_by_source_index: ArrayList(CssModuleExports), + references: *CssModuleReferences, + + pub fn new( + allocator: Allocator, + config: *const Config, + sources: *const ArrayList([]const u8), + project_root: ?[]const u8, + references: *CssModuleReferences, + ) CssModule { + const hashes = hashes: { + var hashes = ArrayList([]const u8).initCapacity(allocator, sources.items.len) catch bun.outOfMemory(); + for (sources.items) |path| { + var alloced = false; + const source = source: { + if (project_root) |root| { + if (bun.path.Platform.auto.isAbsolute(root)) { + alloced = true; + // TODO: should we use this allocator or something else + break :source allocator.dupe(u8, bun.path.relative(root, path)) catch bun.outOfMemory(); + } + } + break :source path; + }; + defer if (alloced) allocator.free(source); + hashes.appendAssumeCapacity(hash( + allocator, + "{s}", + .{source}, + config.pattern.segments.at(0).* == .hash, + )); + } + break :hashes hashes; + }; + const exports_by_source_index = exports_by_source_index: { + var exports_by_source_index = ArrayList(CssModuleExports).initCapacity(allocator, sources.items.len) catch bun.outOfMemory(); + exports_by_source_index.appendNTimesAssumeCapacity(CssModuleExports{}, sources.items.len); + break :exports_by_source_index exports_by_source_index; + }; + return CssModule{ + .config = config, + .sources = sources, + .references = references, + .hashes = hashes, + .exports_by_source_index = exports_by_source_index, + }; + } + + pub fn deinit(this: *CssModule) void { + _ = this; // autofix + // TODO: deinit + } + + pub fn referenceDashed( + this: *CssModule, + name: []const u8, + from: *const ?css.css_properties.css_modules.Specifier, + source_index: u32, + ) ?[]const u8 { + _ = this; // autofix + _ = name; // autofix + _ = from; // autofix + _ = source_index; // autofix + @panic(css.todo_stuff.depth); + } + + pub fn handleComposes( + this: *CssModule, + allocator: Allocator, + selectors: *const css.selector.parser.SelectorList, + composes: *const css.css_properties.css_modules.Composes, + source_index: u32, + ) css.Maybe(void, css.PrinterErrorKind) { + for (selectors.v.slice()) |*sel| { + if (sel.len() == 1) { + const component: *const css.selector.parser.Component = &sel.components.items[0]; + switch (component.*) { + .class => |id| { + for (composes.names.slice()) |name| { + const reference: CssModuleReference = if (composes.from) |*specifier| + switch (specifier.*) { + .source_index => |dep_source_index| { + if (this.exports_by_source_index.items[dep_source_index].get(name.v)) |entry| { + const entry_name = entry.name; + const composes2 = &entry.composes; + const @"export" = this.exports_by_source_index.items[source_index].getPtr(id.v).?; + + @"export".composes.append(allocator, .{ .local = .{ .name = entry_name } }) catch bun.outOfMemory(); + @"export".composes.appendSlice(allocator, composes2.items) catch bun.outOfMemory(); + } + continue; + }, + .global => CssModuleReference{ .global = .{ .name = name.v } }, + .file => |file| CssModuleReference{ + .dependency = .{ + .name = name.v, + .specifier = file, + }, + }, + } + else + CssModuleReference{ + .local = .{ + .name = this.config.pattern.writeToString( + allocator, + ArrayList(u8){}, + this.hashes.items[source_index], + this.sources.items[source_index], + name.v, + ), + }, + }; + + const export_value = this.exports_by_source_index.items[source_index].getPtr(id.v) orelse unreachable; + export_value.composes.append(allocator, reference) catch bun.outOfMemory(); + + const contains_reference = brk: { + for (export_value.composes.items) |*compose_| { + const compose: *const CssModuleReference = compose_; + if (compose.eql(&reference)) { + break :brk true; + } + } + break :brk false; + }; + if (!contains_reference) { + export_value.composes.append(allocator, reference) catch bun.outOfMemory(); + } + } + }, + else => {}, + } + } + + // The composes property can only be used within a simple class selector. + return .{ .err = css.PrinterErrorKind.invalid_composes_selector }; + } + + return .{ .result = {} }; + } + + pub fn addDashed(this: *CssModule, allocator: Allocator, local: []const u8, source_index: u32) void { + const gop = this.exports_by_source_index.items[source_index].getOrPut(allocator, local) catch bun.outOfMemory(); + if (!gop.found_existing) { + gop.value_ptr.* = CssModuleExport{ + // todo_stuff.depth + .name = this.config.pattern.writeToStringWithPrefix( + allocator, + "--", + this.hashes.items[source_index], + this.sources.items[source_index], + local[2..], + ), + .composes = .{}, + .is_referenced = false, + }; + } + } + + pub fn addLocal(this: *CssModule, allocator: Allocator, exported: []const u8, local: []const u8, source_index: u32) void { + const gop = this.exports_by_source_index.items[source_index].getOrPut(allocator, exported) catch bun.outOfMemory(); + if (!gop.found_existing) { + gop.value_ptr.* = CssModuleExport{ + // todo_stuff.depth + .name = this.config.pattern.writeToString( + allocator, + .{}, + this.hashes.items[source_index], + this.sources.items[source_index], + local, + ), + .composes = .{}, + .is_referenced = false, + }; + } + } +}; + +/// Configuration for CSS modules. +pub const Config = struct { + /// The name pattern to use when renaming class names and other identifiers. + /// Default is `[hash]_[local]`. + pattern: Pattern, + + /// Whether to rename dashed identifiers, e.g. custom properties. + dashed_idents: bool, + + /// Whether to scope animation names. + /// Default is `true`. + animation: bool, + + /// Whether to scope grid names. + /// Default is `true`. + grid: bool, + + /// Whether to scope custom identifiers + /// Default is `true`. + custom_idents: bool, +}; + +/// A CSS modules class name pattern. +pub const Pattern = struct { + /// The list of segments in the pattern. + segments: css.SmallList(Segment, 2), + + /// Write the substituted pattern to a destination. + pub fn write( + this: *const Pattern, + hash_: []const u8, + path: []const u8, + local: []const u8, + closure: anytype, + comptime writefn: *const fn (@TypeOf(closure), []const u8, replace_dots: bool) void, + ) void { + for (this.segments.slice()) |*segment| { + switch (segment.*) { + .literal => |s| { + writefn(closure, s, false); + }, + .name => { + const stem = std.fs.path.stem(path); + if (std.mem.indexOf(u8, stem, ".")) |_| { + writefn(closure, stem, true); + } else { + writefn(closure, stem, false); + } + }, + .local => { + writefn(closure, local, false); + }, + .hash => { + writefn(closure, hash_, false); + }, + } + } + } + + pub fn writeToStringWithPrefix( + this: *const Pattern, + allocator: Allocator, + comptime prefix: []const u8, + hash_: []const u8, + path: []const u8, + local: []const u8, + ) []const u8 { + const Closure = struct { res: ArrayList(u8), allocator: Allocator }; + var closure = Closure{ .res = .{}, .allocator = allocator }; + this.write( + hash_, + path, + local, + &closure, + struct { + pub fn writefn(self: *Closure, slice: []const u8, replace_dots: bool) void { + self.res.appendSlice(self.allocator, prefix) catch bun.outOfMemory(); + if (replace_dots) { + const start = self.res.items.len; + self.res.appendSlice(self.allocator, slice) catch bun.outOfMemory(); + const end = self.res.items.len; + for (self.res.items[start..end]) |*c| { + if (c.* == '.') { + c.* = '-'; + } + } + return; + } + self.res.appendSlice(self.allocator, slice) catch bun.outOfMemory(); + } + }.writefn, + ); + return closure.res.items; + } + + pub fn writeToString( + this: *const Pattern, + allocator: Allocator, + res_: ArrayList(u8), + hash_: []const u8, + path: []const u8, + local: []const u8, + ) []const u8 { + var res = res_; + const Closure = struct { res: *ArrayList(u8), allocator: Allocator }; + var closure = Closure{ .res = &res, .allocator = allocator }; + this.write( + hash_, + path, + local, + &closure, + struct { + pub fn writefn(self: *Closure, slice: []const u8, replace_dots: bool) void { + if (replace_dots) { + const start = self.res.items.len; + self.res.appendSlice(self.allocator, slice) catch bun.outOfMemory(); + const end = self.res.items.len; + for (self.res.items[start..end]) |*c| { + if (c.* == '.') { + c.* = '-'; + } + } + return; + } + self.res.appendSlice(self.allocator, slice) catch bun.outOfMemory(); + return; + } + }.writefn, + ); + + return res.items; + } +}; + +/// A segment in a CSS modules class name pattern. +/// +/// See [Pattern](Pattern). +pub const Segment = union(enum) { + /// A literal string segment. + literal: []const u8, + + /// The base file name. + name, + + /// The original class name. + local, + + /// A hash of the file name. + hash, +}; + +/// A map of exported names to values. +pub const CssModuleExports = std.StringArrayHashMapUnmanaged(CssModuleExport); + +/// A map of placeholders to references. +pub const CssModuleReferences = std.StringArrayHashMapUnmanaged(CssModuleReference); + +/// An exported value from a CSS module. +pub const CssModuleExport = struct { + /// The local (compiled) name for this export. + name: []const u8, + /// Other names that are composed by this export. + composes: ArrayList(CssModuleReference), + /// Whether the export is referenced in this file. + is_referenced: bool, +}; + +/// A referenced name within a CSS module, e.g. via the `composes` property. +/// +/// See [CssModuleExport](CssModuleExport). +pub const CssModuleReference = union(enum) { + /// A local reference. + local: struct { + /// The local (compiled) name for the reference. + name: []const u8, + }, + /// A global reference. + global: struct { + /// The referenced global name. + name: []const u8, + }, + /// A reference to an export in a different file. + dependency: struct { + /// The name to reference within the dependency. + name: []const u8, + /// The dependency specifier for the referenced file. + specifier: []const u8, + }, + + pub fn eql(this: *const @This(), other: *const @This()) bool { + if (@intFromEnum(this.*) != @intFromEnum(other.*)) return false; + + return switch (this.*) { + .local => |v| bun.strings.eql(v.name, other.local.name), + .global => |v| bun.strings.eql(v.name, other.global.name), + .dependency => |v| bun.strings.eql(v.name, other.dependency.name) and bun.strings.eql(v.specifier, other.dependency.specifier), + }; + } +}; + +// TODO: replace with bun's hash +pub fn hash(allocator: Allocator, comptime fmt: []const u8, args: anytype, at_start: bool) []const u8 { + _ = fmt; // autofix + _ = args; // autofix + _ = allocator; // autofix + _ = at_start; // autofix + // @compileError(css.todo_stuff.depth); + @panic(css.todo_stuff.depth); +} diff --git a/src/css/css_parser.zig b/src/css/css_parser.zig new file mode 100644 index 0000000000..edac851099 --- /dev/null +++ b/src/css/css_parser.zig @@ -0,0 +1,6830 @@ +const std = @import("std"); +const Allocator = std.mem.Allocator; +const bun = @import("root").bun; +const logger = bun.logger; +const Log = logger.Log; + +const ArrayList = std.ArrayListUnmanaged; + +const ImportRecord = bun.ImportRecord; +const ImportKind = bun.ImportKind; + +pub const prefixes = @import("./prefixes.zig"); + +pub const dependencies = @import("./dependencies.zig"); +pub const Dependency = dependencies.Dependency; + +pub const css_modules = @import("./css_modules.zig"); +pub const CssModuleExports = css_modules.CssModuleExports; +pub const CssModule = css_modules.CssModule; +pub const CssModuleReferences = css_modules.CssModuleReferences; +pub const CssModuleReference = css_modules.CssModuleReference; + +pub const css_rules = @import("./rules/rules.zig"); +pub const CssRule = css_rules.CssRule; +pub const CssRuleList = css_rules.CssRuleList; +pub const LayerName = css_rules.layer.LayerName; +pub const SupportsCondition = css_rules.supports.SupportsCondition; +pub const CustomMedia = css_rules.custom_media.CustomMediaRule; +pub const NamespaceRule = css_rules.namespace.NamespaceRule; +pub const UnknownAtRule = css_rules.unknown.UnknownAtRule; +pub const ImportRule = css_rules.import.ImportRule; +pub const StyleRule = css_rules.style.StyleRule; +pub const StyleContext = css_rules.StyleContext; +pub const SupportsRule = css_rules.supports.SupportsRule; +pub const TailwindAtRule = css_rules.tailwind.TailwindAtRule; + +pub const MinifyContext = css_rules.MinifyContext; + +pub const media_query = @import("./media_query.zig"); +pub const MediaList = media_query.MediaList; +pub const MediaFeatureType = media_query.MediaFeatureType; + +pub const css_values = @import("./values/values.zig"); +pub const DashedIdent = css_values.ident.DashedIdent; +pub const DashedIdentFns = css_values.ident.DashedIdentFns; +pub const CssColor = css_values.color.CssColor; +pub const ColorFallbackKind = css_values.color.ColorFallbackKind; +pub const CSSString = css_values.string.CSSString; +pub const CSSStringFns = css_values.string.CSSStringFns; +pub const CSSInteger = css_values.number.CSSInteger; +pub const CSSIntegerFns = css_values.number.CSSIntegerFns; +pub const CSSNumber = css_values.number.CSSNumber; +pub const CSSNumberFns = css_values.number.CSSNumberFns; +pub const Ident = css_values.ident.Ident; +pub const IdentFns = css_values.ident.IdentFns; +pub const CustomIdent = css_values.ident.CustomIdent; +pub const CustomIdentFns = css_values.ident.CustomIdentFns; +pub const Url = css_values.url.Url; + +pub const declaration = @import("./declaration.zig"); + +pub const css_properties = @import("./properties/properties.zig"); +pub const Property = css_properties.Property; +pub const PropertyId = css_properties.PropertyId; +pub const PropertyIdTag = css_properties.PropertyIdTag; +pub const TokenList = css_properties.custom.TokenList; +pub const TokenListFns = css_properties.custom.TokenListFns; + +const css_decls = @import("./declaration.zig"); +pub const DeclarationList = css_decls.DeclarationList; +pub const DeclarationBlock = css_decls.DeclarationBlock; + +pub const selector = @import("./selectors/selector.zig"); +pub const SelectorList = selector.parser.SelectorList; +pub const Selector = selector.parser.Selector; +pub const Component = selector.parser.Component; +pub const PseudoClass = selector.parser.PseudoClass; +pub const PseudoElement = selector.parser.PseudoElement; + +pub const logical = @import("./logical.zig"); +pub const PropertyCategory = logical.PropertyCategory; +pub const LogicalGroup = logical.LogicalGroup; + +pub const css_printer = @import("./printer.zig"); +pub const Printer = css_printer.Printer; +pub const PrinterOptions = css_printer.PrinterOptions; +pub const targets = @import("./targets.zig"); +pub const Targets = css_printer.Targets; +// pub const Features = css_printer.Features; + +const context = @import("./context.zig"); +pub const PropertyHandlerContext = context.PropertyHandlerContext; +pub const DeclarationHandler = declaration.DeclarationHandler; + +pub const Maybe = bun.JSC.Node.Maybe; +// TODO: Remove existing Error defined here and replace it with these +const errors_ = @import("./error.zig"); +pub const Err = errors_.Err; +pub const PrinterErrorKind = errors_.PrinterErrorKind; +pub const PrinterError = errors_.PrinterError; +pub const ErrorLocation = errors_.ErrorLocation; +pub const ParseError = errors_.ParseError; +pub const ParserError = errors_.ParserError; +pub const BasicParseError = errors_.BasicParseError; +pub const BasicParseErrorKind = errors_.BasicParseErrorKind; +pub const SelectorError = errors_.SelectorError; +pub const MinifyErrorKind = errors_.MinifyErrorKind; +pub const MinifyError = errors_.MinifyError; +pub const MinifyErr = errors_.MinifyErr; + +pub const generic = @import("./generics.zig"); +pub const HASH_SEED = generic.HASH_SEED; + +pub const ImportConditions = css_rules.import.ImportConditions; + +pub const compat = @import("./compat.zig"); + +pub const Features = targets.Features; +pub const Feature = compat.Feature; + +pub const fmtPrinterError = errors_.fmtPrinterError; + +pub const PrintErr = error{ + lol, +}; + +pub fn OOM(e: anyerror) noreturn { + if (comptime bun.Environment.isDebug) { + std.debug.assert(e == std.mem.Allocator.Error.OutOfMemory); + } + bun.outOfMemory(); +} + +pub const SmallList = @import("./small_list.zig").SmallList; +pub const Bitflags = bun.Bitflags; + +pub const todo_stuff = struct { + pub const think_mem_mgmt = "TODO: think about memory management"; + + pub const depth = "TODO: we need to go deeper"; + + pub const match_ignore_ascii_case = "TODO: implement match_ignore_ascii_case"; + + pub const enum_property = "TODO: implement enum_property!"; + + pub const match_byte = "TODO: implement match_byte!"; + + pub const warn = "TODO: implement warning"; +}; + +pub const VendorPrefix = packed struct(u8) { + /// No vendor prefixes. + /// 0b00000001 + none: bool = false, + /// The `-webkit` vendor prefix. + /// 0b00000010 + webkit: bool = false, + /// The `-moz` vendor prefix. + /// 0b00000100 + moz: bool = false, + /// The `-ms` vendor prefix. + /// 0b00001000 + ms: bool = false, + /// The `-o` vendor prefix. + /// 0b00010000 + o: bool = false, + __unused: u3 = 0, + + pub const NONE = VendorPrefix{ .none = true }; + pub const WEBKIT = VendorPrefix{ .webkit = true }; + pub const MOZ = VendorPrefix{ .moz = true }; + + /// Fields listed here so we can iterate them in the order we want + pub const FIELDS: []const []const u8 = &.{ "webkit", "moz", "ms", "o", "none" }; + + pub usingnamespace Bitflags(@This()); + + pub fn toCss(this: *const VendorPrefix, comptime W: type, dest: *Printer(W)) PrintErr!void { + return switch (this.asBits()) { + VendorPrefix.asBits(.{ .webkit = true }) => dest.writeStr("-webkit-"), + VendorPrefix.asBits(.{ .moz = true }) => dest.writeStr("-moz-"), + VendorPrefix.asBits(.{ .ms = true }) => dest.writeStr("-ms-"), + VendorPrefix.asBits(.{ .o = true }) => dest.writeStr("-o-"), + else => {}, + }; + } + + /// Returns VendorPrefix::None if empty. + pub fn orNone(this: VendorPrefix) VendorPrefix { + return this.bitwiseOr(VendorPrefix{ .none = true }); + } +}; + +pub const SourceLocation = struct { + line: u32, + column: u32, + + /// Create a new BasicParseError at this location for an unexpected token + pub fn newBasicUnexpectedTokenError(this: SourceLocation, token: Token) ParseError(ParserError) { + return BasicParseError.intoDefaultParseError(.{ + .kind = .{ .unexpected_token = token }, + .location = this, + }); + } + + /// Create a new ParseError at this location for an unexpected token + pub fn newUnexpectedTokenError(this: SourceLocation, token: Token) ParseError(ParserError) { + return ParseError(ParserError){ + .kind = .{ .basic = .{ .unexpected_token = token } }, + .location = this, + }; + } + + pub fn newCustomError(this: SourceLocation, err: anytype) ParseError(ParserError) { + return switch (@TypeOf(err)) { + ParserError => .{ + .kind = .{ .custom = err }, + .location = this, + }, + BasicParseError => .{ + .kind = .{ .custom = BasicParseError.intoDefaultParseError(err) }, + .location = this, + }, + selector.parser.SelectorParseErrorKind => .{ + .kind = .{ .custom = selector.parser.SelectorParseErrorKind.intoDefaultParserError(err) }, + .location = this, + }, + else => @compileError("TODO implement this for: " ++ @typeName(@TypeOf(err))), + }; + } +}; +pub const Location = css_rules.Location; + +pub const Error = Err(ParserError); + +pub fn Result(comptime T: type) type { + return Maybe(T, ParseError(ParserError)); +} + +pub fn PrintResult(comptime T: type) type { + return Maybe(T, PrinterError); +} + +pub fn todo(comptime fmt: []const u8, args: anytype) noreturn { + bun.Analytics.Features.todo_panic = 1; + std.debug.panic("TODO: " ++ fmt, args); +} + +pub fn voidWrap(comptime T: type, comptime parsefn: *const fn (*Parser) Result(T)) *const fn (void, *Parser) Result(T) { + const Wrapper = struct { + fn wrapped(_: void, p: *Parser) Result(T) { + return parsefn(p); + } + }; + return Wrapper.wrapped; +} + +pub fn DefineListShorthand(comptime T: type) type { + _ = T; // autofix + // TODO: implement this when we implement visit? + // does nothing now + return struct {}; +} + +pub fn DefineShorthand(comptime T: type, comptime property_name: PropertyIdTag) type { + _ = property_name; // autofix + // TODO: validate map, make sure each field is set + // make sure each field is same index as in T + _ = T.PropertyFieldMap; + + return struct { + /// Returns a shorthand from the longhand properties defined in the given declaration block. + pub fn fromLonghands(allocator: Allocator, decls: *const DeclarationBlock, vendor_prefix: VendorPrefix) ?struct { T, bool } { + _ = allocator; // autofix + _ = decls; // autofix + _ = vendor_prefix; // autofix + // var count: usize = 0; + // var important_count: usize = 0; + // var this: T = undefined; + // var set_fields = std.StaticBitSet(std.meta.fields(T).len).initEmpty(); + // const all_fields_set = std.StaticBitSet(std.meta.fields(T).len).initFull(); + + // // Loop through each property in `decls.declarations` and then `decls.important_declarations` + // // The inline for loop is so we can share the code for both + // const DECL_FIELDS = &.{ "declarations", "important_declarations" }; + // inline for (DECL_FIELDS) |decl_field_name| { + // const decl_list: *const ArrayList(css_properties.Property) = &@field(decls, decl_field_name); + // const important = comptime std.mem.eql(u8, decl_field_name, "important_declarations"); + + // // Now loop through each property in the list + // main_loop: for (decl_list.items) |*property| { + // // The property field map maps each field in `T` to a tag of `Property` + // // Here we do `inline for` to basically switch on the tag of `property` to see + // // if it matches a field in `T` which maps to the same tag + // // + // // Basically, check that `@as(PropertyIdTag, property.*)` equals `T.PropertyFieldMap[field.name]` + // inline for (std.meta.fields(@TypeOf(T.PropertyFieldMap))) |field| { + // const tag: PropertyIdTag = @as(?*const PropertyIdTag, field.default_value).?.*; + + // if (@intFromEnum(@as(PropertyIdTag, property.*)) == tag) { + // if (@hasField(T.VendorPrefixMap, field.name)) { + // if (@hasField(T.VendorPrefixMap, field.name) and + // !VendorPrefix.eq(@field(property, field.name)[1], vendor_prefix)) + // { + // return null; + // } + + // @field(this, field.name) = if (@hasDecl(@TypeOf(@field(property, field.name)[0]), "clone")) + // @field(property, field.name)[0].deepClone(allocator) + // else + // @field(property, field.name)[0]; + // } else { + // @field(this, field.name) = if (@hasDecl(@TypeOf(@field(property, field.name)), "clone")) + // @field(property, field.name).deepClone(allocator) + // else + // @field(property, field.name); + // } + + // set_fields.set(std.meta.fieldIndex(T, field.name)); + // count += 1; + // if (important) { + // important_count += 1; + // } + + // continue :main_loop; + // } + // } + + // // If `property` matches none of the tags in `T.PropertyFieldMap` then let's try + // // if it matches the tag specified by `property_name` + // if (@as(PropertyIdTag, property.*) == property_name) { + // inline for (std.meta.fields(@TypeOf(T.PropertyFieldMap))) |field| { + // if (@hasField(T.VendorPrefixMap, field.name)) { + // @field(this, field.name) = if (@hasDecl(@TypeOf(@field(property, field.name)[0]), "clone")) + // @field(property, field.name)[0].deepClone(allocator) + // else + // @field(property, field.name)[0]; + // } else { + // @field(this, field.name) = if (@hasDecl(@TypeOf(@field(property, field.name)), "clone")) + // @field(property, field.name).deepClone(allocator) + // else + // @field(property, field.name); + // } + + // set_fields.set(std.meta.fieldIndex(T, field.name)); + // count += 1; + // if (important) { + // important_count += 1; + // } + // } + // continue :main_loop; + // } + + // // Otherwise, try to convert to te fields using `.longhand()` + // inline for (std.meta.fields(@TypeOf(T.PropertyFieldMap))) |field| { + // const property_id = @unionInit( + // PropertyId, + // field.name, + // if (@hasDecl(T.VendorPrefixMap, field.name)) vendor_prefix else {}, + // ); + // const value = property.longhand(&property_id); + // if (@as(PropertyIdTag, value) == @as(PropertyIdTag, property_id)) { + // @field(this, field.name) = if (@hasDecl(T.VendorPrefixMap, field.name)) + // @field(value, field.name)[0] + // else + // @field(value, field.name); + // set_fields.set(std.meta.fieldIndex(T, field.name)); + // count += 1; + // if (important) { + // important_count += 1; + // } + // } + // } + // } + // } + + // if (important_count > 0 and important_count != count) { + // return null; + // } + + // // All properties in the group must have a matching value to produce a shorthand. + // if (set_fields.eql(all_fields_set)) { + // return .{ this, important_count > 0 }; + // } + + // return null; + @panic(todo_stuff.depth); + } + + /// Returns a shorthand from the longhand properties defined in the given declaration block. + pub fn longhands(vendor_prefix: VendorPrefix) []const PropertyId { + _ = vendor_prefix; // autofix + // const out: []const PropertyId = comptime out: { + // var out: [std.meta.fields(@TypeOf(T.PropertyFieldMap)).len]PropertyId = undefined; + + // for (std.meta.fields(@TypeOf(T.PropertyFieldMap)), 0..) |field, i| { + // out[i] = @unionInit( + // PropertyId, + // field.name, + // if (@hasField(T.VendorPrefixMap, field.name)) vendor_prefix else {}, + // ); + // } + + // break :out out; + // }; + // return out; + + @panic(todo_stuff.depth); + } + + /// Returns a longhand property for this shorthand. + pub fn longhand(this: *const T, allocator: Allocator, property_id: *const PropertyId) ?Property { + _ = this; // autofix + _ = allocator; // autofix + _ = property_id; // autofix + // inline for (std.meta.fields(@TypeOf(T.PropertyFieldMap))) |field| { + // if (@as(PropertyIdTag, property_id.*) == @field(T.PropertyFieldMap, field.name)) { + // const val = if (@hasDecl(@TypeOf(@field(T, field.namee)), "clone")) + // @field(this, field.name).deepClone(allocator) + // else + // @field(this, field.name); + // return @unionInit( + // Property, + // field.name, + // if (@field(T.VendorPrefixMap, field.name)) + // .{ val, @field(property_id, field.name)[1] } + // else + // val, + // ); + // } + // } + // return null; + @panic(todo_stuff.depth); + } + + /// Updates this shorthand from a longhand property. + pub fn setLonghand(this: *T, allocator: Allocator, property: *const Property) bool { + _ = this; // autofix + _ = allocator; // autofix + _ = property; // autofix + // inline for (std.meta.fields(T.PropertyFieldMap)) |field| { + // if (@as(PropertyIdTag, property.*) == @field(T.PropertyFieldMap, field.name)) { + // const val = if (@hasDecl(@TypeOf(@field(T, field.name)), "clone")) + // @field(this, field.name).deepClone(allocator) + // else + // @field(this, field.name); + + // @field(this, field.name) = val; + + // return true; + // } + // } + // return false; + @panic(todo_stuff.depth); + } + }; +} + +pub fn DefineRectShorthand(comptime T: type, comptime V: type) type { + return struct { + pub fn parse(input: *Parser) Result(T) { + const rect = switch (css_values.rect.Rect(V).parse(input)) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + + return .{ + .result = .{ + .top = rect.top, + .right = rect.right, + .bottom = rect.bottom, + .left = rect.left, + }, + }; + } + + pub fn toCss(this: *const T, comptime W: type, dest: *Printer(W)) PrintErr!void { + const rect = css_values.rect.Rect(V){ + .top = this.top, + .right = this.right, + .bottom = this.bottom, + .left = this.left, + }; + return rect.toCss(W, dest); + } + }; +} + +pub fn DefineSizeShorthand(comptime T: type, comptime V: type) type { + if (std.meta.fields(T).len != 2) @compileError("DefineSizeShorthand must be used on a struct with 2 fields"); + return struct { + pub fn toCss(this: *const T, comptime W: type, dest: *Printer(W)) PrintErr!void { + const size: css_values.size.Size2D(V) = .{ + .a = @field(this, std.meta.fields(T)[0].name), + .b = @field(this, std.meta.fields(T)[1].name), + }; + return size.toCss(W, dest); + // TODO: unfuck this + // @panic(todo_stuff.depth); + } + + pub fn parse(input: *Parser) Result(T) { + const size = switch (css_values.size.Size2D(V).parse(input)) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + + var this: T = undefined; + @field(this, std.meta.fields(T)[0].name) = size.a; + @field(this, std.meta.fields(T)[1].name) = size.b; + + return .{ .result = this }; + // TODO: unfuck this + // @panic(todo_stuff.depth); + } + }; +} + +pub fn DeriveParse(comptime T: type) type { + const tyinfo = @typeInfo(T); + const is_union_enum = tyinfo == .Union; + const enum_type = if (comptime is_union_enum) @typeInfo(tyinfo.Union.tag_type.?) else tyinfo; + const enum_actual_type = if (comptime is_union_enum) tyinfo.Union.tag_type.? else T; + + const Map = bun.ComptimeEnumMap(enum_actual_type); + + return struct { + pub fn parse(input: *Parser) Result(T) { + if (comptime is_union_enum) { + const payload_count, const first_payload_index, const void_count, const first_void_index = comptime counts: { + var first_void_index: ?usize = null; + var first_payload_index: ?usize = null; + var payload_count: usize = 0; + var void_count: usize = 0; + for (tyinfo.Union.fields, 0..) |field, i| { + if (field.type == void) { + void_count += 1; + if (first_void_index == null) first_void_index = i; + } else { + payload_count += 1; + if (first_payload_index == null) first_payload_index = i; + } + } + if (first_payload_index == null) { + @compileError("Type defined as `union(enum)` but no variant carries a payload. Make it an `enum` instead."); + } + if (first_void_index) |void_index| { + // Check if they overlap + if (first_payload_index.? < void_index and void_index < first_payload_index.? + payload_count) @compileError("Please put all the fields with data together and all the fields with no data together."); + if (first_payload_index.? > void_index and first_payload_index.? < void_index + void_count) @compileError("Please put all the fields with data together and all the fields with no data together."); + } + break :counts .{ payload_count, first_payload_index.?, void_count, first_void_index }; + }; + + return gnerateCode(input, first_payload_index, first_void_index, void_count, payload_count); + } + + const location = input.currentSourceLocation(); + const ident = switch (input.expectIdent()) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + if (Map.getCaseInsensitiveWithEql(ident, bun.strings.eqlComptimeIgnoreLen)) |matched| { + inline for (bun.meta.EnumFields(enum_actual_type)) |field| { + if (field.value == @intFromEnum(matched)) { + if (comptime is_union_enum) return .{ .result = @unionInit(T, field.name, void) }; + return .{ .result = @enumFromInt(field.value) }; + } + } + unreachable; + } + return .{ .err = location.newUnexpectedTokenError(.{ .ident = ident }) }; + } + + /// Comptime code which constructs the parsing code for a union(enum) which could contain + /// void fields (fields with no associated data) and payload fields (fields which carry data), + /// for example: + /// + /// ```zig + /// /// A value for the [border-width](https://www.w3.org/TR/css-backgrounds-3/#border-width) property. + /// pub const BorderSideWidth = union(enum) { + /// /// A UA defined `thin` value. + /// thin, + /// /// A UA defined `medium` value. + /// medium, + /// /// A UA defined `thick` value. + /// thick, + /// /// An explicit width. + /// length: Length, + /// } + /// ``` + /// + /// During parsing, we can check if it is one of the void fields (in this case `thin`, `medium`, or `thick`) by reading a single + /// identifier from the Parser, and checking if it matches any of the void field names. We already constructed a ComptimeEnumMap (see above) + /// to make this super cheap. + /// + /// If we don't get an identifier that matches any of the void fields, we can then try to parse the payload fields. + /// + /// This function is made more complicated by the fact that it tries to parse in order of the fields that were declared in the union(enum). + /// If, for example, all the void fields were declared after the `length: Length` field, this function will try to parse the `length` field first, + /// and then try to parse the void fields. + /// + /// This parsing order is a detail copied from LightningCSS. I'm not sure if it is necessary. But it could be. + inline fn gnerateCode( + input: *Parser, + comptime first_payload_index: usize, + comptime maybe_first_void_index: ?usize, + comptime void_count: usize, + comptime payload_count: usize, + ) Result(T) { + const last_payload_index = first_payload_index + payload_count - 1; + if (comptime maybe_first_void_index == null) { + inline for (tyinfo.Union.fields[first_payload_index .. first_payload_index + payload_count], first_payload_index..) |field, i| { + if (comptime (i == last_payload_index)) { + return .{ .result = switch (generic.parseFor(field.type)(input)) { + .result => |v| @unionInit(T, field.name, v), + .err => |e| return .{ .err = e }, + } }; + } + if (input.tryParse(generic.parseFor(field.type), .{}).asValue()) |v| { + return .{ .result = @unionInit(T, field.name, v) }; + } + } + } + + const first_void_index = maybe_first_void_index.?; + + const void_fields = bun.meta.EnumFields(T)[first_void_index .. first_void_index + void_count]; + + if (comptime void_count == 1) { + const void_field = enum_type.Enum.fields[first_void_index]; + // The field is declared before the payload fields. + // So try to parse an ident matching the name of the field, then fallthrough + // to parsing the payload fields. + if (comptime first_void_index < first_payload_index) { + if (input.tryParse(Parser.expectIdentMatching, .{void_field.name}).isOk()) { + if (comptime is_union_enum) return .{ .result = @unionInit(T, void_field.name, {}) }; + return .{ .result = @enumFromInt(void_field.value) }; + } + + inline for (tyinfo.Union.fields[first_payload_index .. first_payload_index + payload_count], first_payload_index..) |field, i| { + if (comptime (i == last_payload_index and last_payload_index > first_void_index)) { + return .{ .result = switch (generic.parseFor(field.type)(input)) { + .result => |v| @unionInit(T, field.name, v), + .err => |e| return .{ .err = e }, + } }; + } + if (input.tryParse(generic.parseFor(field.type), .{}).asValue()) |v| { + return .{ .result = @unionInit(T, field.name, v) }; + } + } + } else { + inline for (tyinfo.Union.fields[first_payload_index .. first_payload_index + payload_count], first_payload_index..) |field, i| { + if (comptime (i == last_payload_index and last_payload_index > first_void_index)) { + return .{ .result = switch (generic.parseFor(field.type)(input)) { + .result => |v| @unionInit(T, field.name, v), + .err => |e| return .{ .err = e }, + } }; + } + if (input.tryParse(generic.parseFor(field.type), .{}).asValue()) |v| { + return .{ .result = @unionInit(T, field.name, v) }; + } + } + + // We can generate this as the last statements of the function, avoiding the `input.tryParse` routine above + if (input.expectIdentMatching(void_field.name).asErr()) |e| return .{ .err = e }; + if (comptime is_union_enum) return .{ .result = @unionInit(T, void_field.name, {}) }; + return .{ .result = @enumFromInt(void_field.value) }; + } + } else if (comptime first_void_index < first_payload_index) { + // Multiple fields declared before the payload fields, use tryParse + const state = input.state(); + if (input.tryParse(Parser.expectIdent, .{}).asValue()) |ident| { + if (Map.getCaseInsensitiveWithEql(ident, bun.strings.eqlComptimeIgnoreLen)) |matched| { + inline for (void_fields) |field| { + if (field.value == @intFromEnum(matched)) { + if (comptime is_union_enum) return .{ .result = @unionInit(T, field.name, {}) }; + return .{ .result = @enumFromInt(field.value) }; + } + } + unreachable; + } + input.reset(&state); + } + + inline for (tyinfo.Union.fields[first_payload_index .. first_payload_index + payload_count], first_payload_index..) |field, i| { + if (comptime (i == last_payload_index and last_payload_index > first_void_index)) { + return .{ .result = switch (generic.parseFor(field.type)(input)) { + .result => |v| @unionInit(T, field.name, v), + .err => |e| return .{ .err = e }, + } }; + } + if (input.tryParse(generic.parseFor(field.type), .{}).asValue()) |v| { + return .{ .result = @unionInit(T, field.name, v) }; + } + } + } else if (comptime first_void_index > first_payload_index) { + inline for (tyinfo.Union.fields[first_payload_index .. first_payload_index + payload_count], first_payload_index..) |field, i| { + if (comptime (i == last_payload_index and last_payload_index > first_void_index)) { + return .{ .result = switch (generic.parseFor(field.type)(input)) { + .result => |v| @unionInit(T, field.name, v), + .err => |e| return .{ .err = e }, + } }; + } + if (input.tryParse(generic.parseFor(field.type), .{}).asValue()) |v| { + return .{ .result = @unionInit(T, field.name, v) }; + } + } + + const location = input.currentSourceLocation(); + const ident = switch (input.expectIdent()) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + if (Map.getCaseInsensitiveWithEql(ident, bun.strings.eqlComptimeIgnoreLen)) |matched| { + inline for (void_fields) |field| { + if (field.value == @intFromEnum(matched)) { + if (comptime is_union_enum) return .{ .result = @unionInit(T, field.name, {}) }; + return .{ .result = @enumFromInt(field.value) }; + } + } + unreachable; + } + return .{ .err = location.newUnexpectedTokenError(.{ .ident = ident }) }; + } + @compileError("SHOULD BE UNREACHABLE!"); + } + + // inline fn generatePayloadBranches( + // input: *Parser, + // comptime first_payload_index: usize, + // comptime first_void_index: usize, + // comptime payload_count: usize, + // ) Result(T) { + // const last_payload_index = first_payload_index + payload_count - 1; + // inline for (tyinfo.Union.fields[first_payload_index..], first_payload_index..) |field, i| { + // if (comptime (i == last_payload_index and last_payload_index > first_void_index)) { + // return generic.parseFor(field.type)(input); + // } + // if (input.tryParse(generic.parseFor(field.type), .{}).asValue()) |v| { + // return .{ .result = @unionInit(T, field.name, v) }; + // } + // } + // // The last field will return so this is never reachable + // unreachable; + // } + + // pub fn parse(this: *const T, comptime W: type, dest: *Printer(W)) PrintErr!void { + // // to implement this, we need to cargo expand the derive macro + // _ = this; // autofix + // _ = dest; // autofix + // @compileError(todo_stuff.depth); + // } + }; +} + +/// This uses comptime reflection to generate a `toCss` function enums and union(enum)s. +/// +/// Supported payload types for union(enum)s are: +/// - any type that has a `toCss` function +/// - void types (stringifies the identifier) +/// - optional types (unwraps the optional) +/// - anonymous structs, will automatically serialize it if it has a `__generateToCss` function +pub fn DeriveToCss(comptime T: type) type { + const tyinfo = @typeInfo(T); + const enum_fields = bun.meta.EnumFields(T); + const is_enum_or_union_enum = tyinfo == .Union or tyinfo == .Enum; + + return struct { + pub fn toCss(this: *const T, comptime W: type, dest: *Printer(W)) PrintErr!void { + if (comptime is_enum_or_union_enum) { + inline for (std.meta.fields(T), 0..) |field, i| { + if (@intFromEnum(this.*) == enum_fields[i].value) { + if (comptime field.type == void) { + return dest.writeStr(enum_fields[i].name); + } else if (comptime generic.hasToCss(field.type)) { + return generic.toCss(field.type, &@field(this, field.name), W, dest); + } else if (@hasDecl(field.type, "__generateToCss") and @typeInfo(field.type) == .Struct) { + const variant_fields = std.meta.fields(field.type); + if (variant_fields.len > 1) { + const last = variant_fields.len - 1; + inline for (variant_fields, 0..) |variant_field, j| { + // Unwrap it from the optional + if (@typeInfo(variant_field.type) == .Optional) { + if (@field(@field(this, field.name), variant_field.name)) |*value| { + try value.toCss(W, dest); + } + } else { + try @field(@field(this, field.name), variant_field.name).toCss(W, dest); + } + + // Emit a space if there are more fields after + if (comptime j != last) { + try dest.writeChar(' '); + } + } + } else { + const variant_field = variant_fields[0]; + try @field(variant_field.type, "toCss")(@field(@field(this, field.name), variant_field.name), W, dest); + } + } else { + @compileError("Don't know how to serialize this variant: " ++ @typeName(field.type) ++ ", on " ++ @typeName(T) ++ ".\n\nYou probably want to implement a `toCss` function for this type, or add a dummy `fn __generateToCss() void {}` to the type signal that it is okay for it to be auto-generated by this function.."); + } + } + } + } else { + @compileError("Unsupported type: " ++ @typeName(T)); + } + return; + } + }; +} + +pub const enum_property_util = struct { + pub fn asStr(comptime T: type, this: *const T) []const u8 { + const tag = @intFromEnum(this.*); + inline for (bun.meta.EnumFields(T)) |field| { + if (tag == field.value) return field.name; + } + unreachable; + } + + pub inline fn parse(comptime T: type, input: *Parser) Result(T) { + const location = input.currentSourceLocation(); + const ident = switch (input.expectIdent()) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + + const Map = comptime bun.ComptimeEnumMap(T); + if (Map.getASCIIICaseInsensitive(ident)) |x| return .{ .result = x }; + // inline for (std.meta.fields(T)) |field| { + // if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(ident, field.name)) return .{ .result = @enumFromInt(field.value) }; + // } + + return .{ .err = location.newUnexpectedTokenError(.{ .ident = ident }) }; + } + + pub fn toCss(comptime T: type, this: *const T, comptime W: type, dest: *Printer(W)) PrintErr!void { + return dest.writeStr(asStr(T, this)); + } +}; + +pub fn DefineEnumProperty(comptime T: type) type { + const fields: []const std.builtin.Type.EnumField = std.meta.fields(T); + + return struct { + pub fn eql(lhs: *const T, rhs: *const T) bool { + return @intFromEnum(lhs.*) == @intFromEnum(rhs.*); + } + + pub fn asStr(this: *const T) []const u8 { + const tag = @intFromEnum(this.*); + inline for (fields) |field| { + if (tag == field.value) return field.name; + } + unreachable; + } + + pub fn parse(input: *Parser) Result(T) { + const location = input.currentSourceLocation(); + const ident = switch (input.expectIdent()) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + + // todo_stuff.match_ignore_ascii_case + inline for (fields) |field| { + if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(ident, field.name)) return .{ .result = @enumFromInt(field.value) }; + } + + return .{ .err = location.newUnexpectedTokenError(.{ .ident = ident }) }; + // @panic("TODO renable this"); + } + + pub fn toCss(this: *const T, comptime W: type, dest: *Printer(W)) PrintErr!void { + return dest.writeStr(asStr(this)); + } + + pub inline fn deepClone(this: *const T, _: std.mem.Allocator) T { + return this.*; + } + + pub fn hash(this: *const T, hasher: *std.hash.Wyhash) void { + const tag = @intFromEnum(this.*); + hasher.update(std.mem.asBytes(&tag)); + } + }; +} + +pub fn DeriveValueType(comptime T: type) type { + _ = @typeInfo(T).Enum; + + const ValueTypeMap = T.ValueTypeMap; + const field_values: []const MediaFeatureType = field_values: { + const fields = std.meta.fields(T); + var mapping: [fields.len]MediaFeatureType = undefined; + for (fields, 0..) |field, i| { + // Check that it exists in the type map + mapping[i] = @field(ValueTypeMap, field.name); + } + const mapping_final = mapping; + break :field_values mapping_final[0..]; + }; + + return struct { + pub fn valueType(this: *const T) MediaFeatureType { + inline for (std.meta.fields(T), 0..) |field, i| { + if (field.value == @intFromEnum(this.*)) { + return field_values[i]; + } + } + unreachable; + } + }; +} + +fn consume_until_end_of_block(block_type: BlockType, tokenizer: *Tokenizer) void { + const StackCount = 16; + var sfb = std.heap.stackFallback(@sizeOf(BlockType) * StackCount, tokenizer.allocator); + const alloc = sfb.get(); + var stack = std.ArrayList(BlockType).initCapacity(alloc, StackCount) catch unreachable; + defer stack.deinit(); + + stack.appendAssumeCapacity(block_type); + + while (switch (tokenizer.next()) { + .result => |v| v, + .err => null, + }) |tok| { + if (BlockType.closing(&tok)) |b| { + if (stack.getLast() == b) { + _ = stack.pop(); + if (stack.items.len == 0) return; + } + } + + if (BlockType.opening(&tok)) |bt| stack.append(bt) catch unreachable; + } +} + +fn parse_at_rule( + allocator: Allocator, + start: *const ParserState, + name: []const u8, + input: *Parser, + comptime P: type, + parser: *P, +) Result(P.AtRuleParser.AtRule) { + _ = allocator; // autofix + ValidAtRuleParser(P); + const delimiters = Delimiters{ .semicolon = true, .curly_bracket = true }; + const Closure = struct { + name: []const u8, + parser: *P, + + pub fn parsefn(this: *@This(), input2: *Parser) Result(P.AtRuleParser.Prelude) { + return P.AtRuleParser.parsePrelude(this.parser, this.name, input2); + } + }; + var closure = Closure{ .name = name, .parser = parser }; + const prelude: P.AtRuleParser.Prelude = switch (input.parseUntilBefore(delimiters, P.AtRuleParser.Prelude, &closure, Closure.parsefn)) { + .result => |vvv| vvv, + .err => |e| { + // const end_position = input.position(); + // _ = end_position; k + out: { + const tok = switch (input.next()) { + .result => |v| v, + .err => break :out, + }; + if (tok.* != .open_curly and tok.* != .semicolon) bun.unreachablePanic("Should have consumed these delimiters", .{}); + break :out; + } + return .{ .err = e }; + }, + }; + const next = switch (input.next()) { + .result => |v| v.*, + .err => { + return switch (P.AtRuleParser.ruleWithoutBlock(parser, prelude, start)) { + .result => |v| { + return .{ .result = v }; + }, + .err => { + return .{ .err = input.newUnexpectedTokenError(.semicolon) }; + }, + }; + }, + }; + switch (next) { + .semicolon => { + switch (P.AtRuleParser.ruleWithoutBlock(parser, prelude, start)) { + .result => |v| { + return .{ .result = v }; + }, + .err => { + return .{ .err = input.newUnexpectedTokenError(.semicolon) }; + }, + } + }, + .open_curly => { + const AnotherClosure = struct { + prelude: P.AtRuleParser.Prelude, + start: *const ParserState, + parser: *P, + pub fn parsefn(this: *@This(), input2: *Parser) Result(P.AtRuleParser.AtRule) { + return P.AtRuleParser.parseBlock(this.parser, this.prelude, this.start, input2); + } + }; + var another_closure = AnotherClosure{ + .prelude = prelude, + .start = start, + .parser = parser, + }; + return parse_nested_block(input, P.AtRuleParser.AtRule, &another_closure, AnotherClosure.parsefn); + }, + else => { + bun.unreachablePanic("", .{}); + }, + } +} + +fn parse_custom_at_rule_prelude(name: []const u8, input: *Parser, options: *const ParserOptions, comptime T: type, at_rule_parser: *T) Result(AtRulePrelude(T.CustomAtRuleParser.Prelude)) { + ValidCustomAtRuleParser(T); + switch (T.CustomAtRuleParser.parsePrelude(at_rule_parser, name, input, options)) { + .result => |prelude| { + return .{ .result = .{ .custom = prelude } }; + }, + .err => |e| { + if (e.kind == .basic and e.kind.basic == .at_rule_invalid) { + // do nothing + } else return .{ + .err = input.newCustomError( + ParserError{ .at_rule_prelude_invalid = {} }, + ), + }; + }, + } + + options.warn(input.newError(.{ .at_rule_invalid = name })); + input.skipWhitespace(); + const tokens = switch (TokenListFns.parse(input, options, 0)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + return .{ .result = .{ .unknown = .{ + .name = name, + .tokens = tokens, + } } }; +} + +fn parse_custom_at_rule_without_block( + comptime T: type, + prelude: T.CustomAtRuleParser.Prelude, + start: *const ParserState, + options: *const ParserOptions, + at_rule_parser: *T, + is_nested: bool, +) Maybe(CssRule(T.CustomAtRuleParser.AtRule), void) { + return switch (T.CustomAtRuleParser.ruleWithoutBlock(at_rule_parser, prelude, start, options, is_nested)) { + .result => |v| .{ .result = CssRule(T.CustomAtRuleParser.AtRule){ .custom = v } }, + .err => |e| .{ .err = e }, + }; +} + +fn parse_custom_at_rule_body( + comptime T: type, + prelude: T.CustomAtRuleParser.Prelude, + input: *Parser, + start: *const ParserState, + options: *const ParserOptions, + at_rule_parser: *T, + is_nested: bool, +) Result(T.CustomAtRuleParser.AtRule) { + const result = switch (T.CustomAtRuleParser.parseBlock(at_rule_parser, prelude, start, input, options, is_nested)) { + .result => |vv| vv, + .err => |e| { + _ = e; // autofix + // match &err.kind { + // ParseErrorKind::Basic(kind) => ParseError { + // kind: ParseErrorKind::Basic(kind.clone()), + // location: err.location, + // }, + // _ => input.new_error(BasicParseErrorKind::AtRuleBodyInvalid), + // } + todo("This part here", .{}); + }, + }; + return .{ .result = result }; +} + +fn parse_qualified_rule( + start: *const ParserState, + input: *Parser, + comptime P: type, + parser: *P, + delimiters: Delimiters, +) Result(P.QualifiedRuleParser.QualifiedRule) { + ValidQualifiedRuleParser(P); + const prelude_result = brk: { + const prelude = input.parseUntilBefore(delimiters, P.QualifiedRuleParser.Prelude, parser, P.QualifiedRuleParser.parsePrelude); + break :brk prelude; + }; + if (input.expectCurlyBracketBlock().asErr()) |e| return .{ .err = e }; + const prelude = switch (prelude_result) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + const Closure = struct { + start: *const ParserState, + prelude: P.QualifiedRuleParser.Prelude, + parser: *P, + + pub fn parsefn(this: *@This(), input2: *Parser) Result(P.QualifiedRuleParser.QualifiedRule) { + return P.QualifiedRuleParser.parseBlock(this.parser, this.prelude, this.start, input2); + } + }; + var closure = Closure{ + .start = start, + .prelude = prelude, + .parser = parser, + }; + return parse_nested_block(input, P.QualifiedRuleParser.QualifiedRule, &closure, Closure.parsefn); +} + +fn parse_until_before( + parser: *Parser, + delimiters_: Delimiters, + error_behavior: ParseUntilErrorBehavior, + comptime T: type, + closure: anytype, + comptime parse_fn: *const fn (@TypeOf(closure), *Parser) Result(T), +) Result(T) { + const delimiters = parser.stop_before.bitwiseOr(delimiters_); + const result = result: { + var delimited_parser = Parser{ + .input = parser.input, + .at_start_of = if (parser.at_start_of) |block_type| brk: { + parser.at_start_of = null; + break :brk block_type; + } else null, + .stop_before = delimiters, + .import_records = parser.import_records, + }; + const result = delimited_parser.parseEntirely(T, closure, parse_fn); + if (error_behavior == .stop and result.isErr()) { + return result; + } + if (delimited_parser.at_start_of) |block_type| { + consume_until_end_of_block(block_type, &delimited_parser.input.tokenizer); + } + break :result result; + }; + + // FIXME: have a special-purpose tokenizer method for this that does less work. + while (true) { + if (delimiters.contains(Delimiters.fromByte(parser.input.tokenizer.nextByte()))) break; + + switch (parser.input.tokenizer.next()) { + .result => |token| { + if (BlockType.opening(&token)) |block_type| { + consume_until_end_of_block(block_type, &parser.input.tokenizer); + } + }, + else => break, + } + } + + return result; +} + +// fn parse_until_before_impl(parser: *Parser, delimiters: Delimiters, error_behavior: Parse + +pub fn parse_until_after( + parser: *Parser, + delimiters: Delimiters, + error_behavior: ParseUntilErrorBehavior, + comptime T: type, + closure: anytype, + comptime parsefn: *const fn (@TypeOf(closure), *Parser) Result(T), +) Result(T) { + const result = parse_until_before(parser, delimiters, error_behavior, T, closure, parsefn); + const is_err = result.isErr(); + if (error_behavior == .stop and is_err) { + return result; + } + const next_byte = parser.input.tokenizer.nextByte(); + if (next_byte != null and !parser.stop_before.contains(Delimiters.fromByte(next_byte))) { + bun.debugAssert(delimiters.contains(Delimiters.fromByte(next_byte))); + // We know this byte is ASCII. + parser.input.tokenizer.advance(1); + if (next_byte == '{') { + consume_until_end_of_block(BlockType.curly_bracket, &parser.input.tokenizer); + } + } + return result; +} + +fn parse_nested_block(parser: *Parser, comptime T: type, closure: anytype, comptime parsefn: *const fn (@TypeOf(closure), *Parser) Result(T)) Result(T) { + const block_type: BlockType = if (parser.at_start_of) |block_type| brk: { + parser.at_start_of = null; + break :brk block_type; + } else @panic( + \\ + \\A nested parser can only be created when a Function, + \\ParenthisisBlock, SquareBracketBlock, or CurlyBracketBlock + \\token was just consumed. + ); + + const closing_delimiter = switch (block_type) { + .curly_bracket => Delimiters{ .close_curly_bracket = true }, + .square_bracket => Delimiters{ .close_square_bracket = true }, + .parenthesis => Delimiters{ .close_parenthesis = true }, + }; + var nested_parser = Parser{ + .input = parser.input, + .stop_before = closing_delimiter, + .import_records = parser.import_records, + }; + const result = nested_parser.parseEntirely(T, closure, parsefn); + if (nested_parser.at_start_of) |block_type2| { + consume_until_end_of_block(block_type2, &nested_parser.input.tokenizer); + } + consume_until_end_of_block(block_type, &parser.input.tokenizer); + return result; +} + +pub fn ValidQualifiedRuleParser(comptime T: type) void { + // The intermediate representation of a qualified rule prelude. + _ = T.QualifiedRuleParser.Prelude; + + // The finished representation of a qualified rule. + _ = T.QualifiedRuleParser.QualifiedRule; + + // Parse the prelude of a qualified rule. For style rules, this is as Selector list. + // + // Return the representation of the prelude, + // or `Err(())` to ignore the entire at-rule as invalid. + // + // The prelude is the part before the `{ /* ... */ }` block. + // + // The given `input` is a "delimited" parser + // that ends where the prelude should end (before the next `{`). + // + // fn parsePrelude(this: *T, input: *Parser) Error!T.QualifiedRuleParser.Prelude; + _ = T.QualifiedRuleParser.parsePrelude; + + // Parse the content of a `{ /* ... */ }` block for the body of the qualified rule. + // + // The location passed in is source location of the start of the prelude. + // + // Return the finished representation of the qualified rule + // as returned by `RuleListParser::next`, + // or `Err(())` to ignore the entire at-rule as invalid. + // + // fn parseBlock(this: *T, prelude: P.QualifiedRuleParser.Prelude, start: *const ParserState, input: *Parser) Error!P.QualifiedRuleParser.QualifiedRule; + _ = T.QualifiedRuleParser.parseBlock; +} + +pub const DefaultAtRule = struct { + pub fn toCss(_: *const @This(), comptime W: type, dest: *Printer(W)) PrintErr!void { + return dest.newError(.fmt_error, null); + } + + pub fn deepClone(_: *const @This(), _: std.mem.Allocator) @This() { + return .{}; + } +}; + +pub const DefaultAtRuleParser = struct { + const This = @This(); + + pub const CustomAtRuleParser = struct { + pub const Prelude = void; + pub const AtRule = DefaultAtRule; + + pub fn parsePrelude(_: *This, name: []const u8, input: *Parser, _: *const ParserOptions) Result(Prelude) { + return .{ .err = input.newError(BasicParseErrorKind{ .at_rule_invalid = name }) }; + } + + pub fn parseBlock(_: *This, _: CustomAtRuleParser.Prelude, _: *const ParserState, input: *Parser, _: *const ParserOptions, _: bool) Result(CustomAtRuleParser.AtRule) { + return .{ .err = input.newError(BasicParseErrorKind.at_rule_body_invalid) }; + } + + pub fn ruleWithoutBlock(_: *This, _: CustomAtRuleParser.Prelude, _: *const ParserState, _: *const ParserOptions, _: bool) Maybe(CustomAtRuleParser.AtRule, void) { + return .{ .err = {} }; + } + + pub fn onImportRule(_: *This, _: *ImportRule, _: u32, _: u32) void {} + }; +}; + +/// We may want to enable this later +pub const ENABLE_TAILWIND_PARSING = false; + +pub const BundlerAtRule = if (ENABLE_TAILWIND_PARSING) TailwindAtRule else DefaultAtRule; +pub const BundlerAtRuleParser = struct { + const This = @This(); + allocator: Allocator, + import_records: *bun.BabyList(ImportRecord), + options: *const ParserOptions, + + pub const CustomAtRuleParser = struct { + pub const Prelude = if (ENABLE_TAILWIND_PARSING) union(enum) { + tailwind: TailwindAtRule, + } else void; + pub const AtRule = if (ENABLE_TAILWIND_PARSING) TailwindAtRule else DefaultAtRule; + + pub fn parsePrelude(this: *This, name: []const u8, input: *Parser, _: *const ParserOptions) Result(Prelude) { + if (comptime ENABLE_TAILWIND_PARSING) { + const PreludeNames = enum { + tailwind, + }; + const Map = comptime bun.ComptimeEnumMap(PreludeNames); + if (Map.getASCIIICaseInsensitive(name)) |prelude| return switch (prelude) { + .tailwind => { + const loc_ = input.currentSourceLocation(); + const loc = css_rules.Location{ + .source_index = this.options.source_index, + .line = loc_.line, + .column = loc_.column, + }; + const style_name = switch (css_rules.tailwind.TailwindStyleName.parse(input)) { + .result => |v| v, + .err => return .{ .err = input.newError(BasicParseErrorKind{ .at_rule_invalid = name }) }, + }; + return .{ .result = .{ + .tailwind = .{ + .style_name = style_name, + .loc = loc, + }, + } }; + }, + }; + } + return .{ .err = input.newError(BasicParseErrorKind{ .at_rule_invalid = name }) }; + } + + pub fn parseBlock(_: *This, _: CustomAtRuleParser.Prelude, _: *const ParserState, input: *Parser, _: *const ParserOptions, _: bool) Result(CustomAtRuleParser.AtRule) { + return .{ .err = input.newError(BasicParseErrorKind.at_rule_body_invalid) }; + } + + pub fn ruleWithoutBlock(_: *This, prelude: CustomAtRuleParser.Prelude, _: *const ParserState, _: *const ParserOptions, _: bool) Maybe(CustomAtRuleParser.AtRule, void) { + if (comptime ENABLE_TAILWIND_PARSING) { + return switch (prelude) { + .tailwind => |v| return .{ .result = v }, + }; + } + return .{ .err = {} }; + } + + pub fn onImportRule(this: *This, import_rule: *ImportRule, start_position: u32, end_position: u32) void { + const import_record_index = this.import_records.len; + import_rule.import_record_idx = import_record_index; + this.import_records.push(this.allocator, ImportRecord{ + .path = bun.fs.Path.init(import_rule.url), + .kind = if (import_rule.supports != null) .at_conditional else .at, + .range = bun.logger.Range{ + .loc = bun.logger.Loc{ .start = @intCast(start_position) }, + .len = @intCast(end_position - start_position), + }, + }) catch bun.outOfMemory(); + } + }; +}; + +/// Same as `ValidAtRuleParser` but modified to provide parser options +/// +/// Also added: +/// - onImportRule to handle @import rules +pub fn ValidCustomAtRuleParser(comptime T: type) void { + // The intermediate representation of prelude of an at-rule. + _ = T.CustomAtRuleParser.Prelude; + + // The finished representation of an at-rule. + _ = T.CustomAtRuleParser.AtRule; + + // Parse the prelude of an at-rule with the given `name`. + // + // Return the representation of the prelude and the type of at-rule, + // or `Err(())` to ignore the entire at-rule as invalid. + // + // The prelude is the part after the at-keyword + // and before the `;` semicolon or `{ /* ... */ }` block. + // + // At-rule name matching should be case-insensitive in the ASCII range. + // This can be done with `std::ascii::Ascii::eq_ignore_ascii_case`, + // or with the `match_ignore_ascii_case!` macro. + // + // The given `input` is a "delimited" parser + // that ends wherever the prelude should end. + // (Before the next semicolon, the next `{`, or the end of the current block.) + // + // pub fn parsePrelude(this: *T, allocator: Allocator, name: []const u8, *Parser, options: *ParserOptions) Result(T.CustomAtRuleParser.Prelude) {} + _ = T.CustomAtRuleParser.parsePrelude; + + // End an at-rule which doesn't have block. Return the finished + // representation of the at-rule. + // + // The location passed in is source location of the start of the prelude. + // `is_nested` indicates whether the rule is nested inside a style rule. + // + // This is only called when either the `;` semicolon indeed follows the prelude, + // or parser is at the end of the input. + _ = T.CustomAtRuleParser.ruleWithoutBlock; + + // Parse the content of a `{ /* ... */ }` block for the body of the at-rule. + // + // The location passed in is source location of the start of the prelude. + // `is_nested` indicates whether the rule is nested inside a style rule. + // + // Return the finished representation of the at-rule + // as returned by `RuleListParser::next` or `DeclarationListParser::next`, + // or `Err(())` to ignore the entire at-rule as invalid. + // + // This is only called when a block was found following the prelude. + _ = T.CustomAtRuleParser.parseBlock; + + _ = T.CustomAtRuleParser.onImportRule; +} + +pub fn ValidAtRuleParser(comptime T: type) void { + _ = T.AtRuleParser.AtRule; + _ = T.AtRuleParser.Prelude; + + // Parse the prelude of an at-rule with the given `name`. + // + // Return the representation of the prelude and the type of at-rule, + // or `Err(())` to ignore the entire at-rule as invalid. + // + // The prelude is the part after the at-keyword + // and before the `;` semicolon or `{ /* ... */ }` block. + // + // At-rule name matching should be case-insensitive in the ASCII range. + // This can be done with `std::ascii::Ascii::eq_ignore_ascii_case`, + // or with the `match_ignore_ascii_case!` macro. + // + // The given `input` is a "delimited" parser + // that ends wherever the prelude should end. + // (Before the next semicolon, the next `{`, or the end of the current block.) + // + // pub fn parsePrelude(this: *T, allocator: Allocator, name: []const u8, *Parser) Result(T.AtRuleParser.Prelude) {} + _ = T.AtRuleParser.parsePrelude; + + // End an at-rule which doesn't have block. Return the finished + // representation of the at-rule. + // + // The location passed in is source location of the start of the prelude. + // + // This is only called when `parse_prelude` returned `WithoutBlock`, and + // either the `;` semicolon indeed follows the prelude, or parser is at + // the end of the input. + // fn ruleWithoutBlock(this: *T, allocator: Allocator, prelude: T.AtRuleParser.Prelude, state: *const ParserState) Maybe(T.AtRuleParser.AtRule, void) + _ = T.AtRuleParser.ruleWithoutBlock; + + // Parse the content of a `{ /* ... */ }` block for the body of the at-rule. + // + // The location passed in is source location of the start of the prelude. + // + // Return the finished representation of the at-rule + // as returned by `RuleListParser::next` or `DeclarationListParser::next`, + // or `Err(())` to ignore the entire at-rule as invalid. + // + // This is only called when `parse_prelude` returned `WithBlock`, and a block + // was indeed found following the prelude. + // + // fn parseBlock(this: *T, prelude: T.AtRuleParser.Prelude, start: *const ParserState, input: *Parser) Error!T.AtRuleParser.AtRule + _ = T.AtRuleParser.parseBlock; +} + +pub fn AtRulePrelude(comptime T: type) type { + return union(enum) { + font_face, + font_feature_values, + font_palette_values: DashedIdent, + counter_style: CustomIdent, + import: struct { + []const u8, + MediaList, + ?SupportsCondition, + ?struct { value: ?LayerName }, + }, + namespace: struct { + ?[]const u8, + []const u8, + }, + charset, + custom_media: struct { + DashedIdent, + MediaList, + }, + property: struct { + DashedIdent, + }, + media: MediaList, + supports: SupportsCondition, + viewport: VendorPrefix, + keyframes: struct { + name: css_rules.keyframes.KeyframesName, + prefix: VendorPrefix, + }, + page: ArrayList(css_rules.page.PageSelector), + moz_document, + layer: ArrayList(LayerName), + container: struct { + name: ?css_rules.container.ContainerName, + condition: css_rules.container.ContainerCondition, + }, + starting_style, + nest: selector.parser.SelectorList, + scope: struct { + scope_start: ?selector.parser.SelectorList, + scope_end: ?selector.parser.SelectorList, + }, + unknown: struct { + name: []const u8, + /// The tokens of the prelude + tokens: TokenList, + }, + custom: T, + + pub fn allowedInStyleRule(this: *const @This()) bool { + return switch (this.*) { + .media, .supports, .container, .moz_document, .layer, .starting_style, .scope, .nest, .unknown, .custom => true, + .namespace, .font_face, .font_feature_values, .font_palette_values, .counter_style, .keyframes, .page, .property, .import, .custom_media, .viewport, .charset => false, + }; + } + }; +} + +pub fn TopLevelRuleParser(comptime AtRuleParserT: type) type { + ValidCustomAtRuleParser(AtRuleParserT); + const AtRuleT = AtRuleParserT.CustomAtRuleParser.AtRule; + const AtRulePreludeT = AtRulePrelude(AtRuleParserT.CustomAtRuleParser.Prelude); + + return struct { + allocator: Allocator, + options: *const ParserOptions, + state: State, + at_rule_parser: *AtRuleParserT, + // TODO: think about memory management + rules: *CssRuleList(AtRuleT), + + const State = enum(u8) { + start = 1, + layers = 2, + imports = 3, + namespaces = 4, + body = 5, + }; + + const This = @This(); + + pub const AtRuleParser = struct { + pub const Prelude = AtRulePreludeT; + pub const AtRule = void; + + pub fn parsePrelude(this: *This, name: []const u8, input: *Parser) Result(Prelude) { + const PreludeEnum = enum { + import, + charset, + namespace, + @"custom-media", + property, + }; + const Map = comptime bun.ComptimeEnumMap(PreludeEnum); + + if (Map.getASCIIICaseInsensitive(name)) |prelude| { + switch (prelude) { + .import => { + if (@intFromEnum(this.state) > @intFromEnum(State.imports)) { + return .{ .err = input.newCustomError(@as(ParserError, ParserError.unexpected_import_rule)) }; + } + const url_str = switch (input.expectUrlOrString()) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + + const layer: ?struct { value: ?LayerName } = + if (input.tryParse(Parser.expectIdentMatching, .{"layer"}) == .result) + .{ .value = null } + else if (input.tryParse(Parser.expectFunctionMatching, .{"layer"}) == .result) brk: { + break :brk .{ + .value = switch (input.parseNestedBlock(LayerName, {}, voidWrap(LayerName, LayerName.parse))) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }, + }; + } else null; + + const supports = if (input.tryParse(Parser.expectFunctionMatching, .{"supports"}) == .result) brk: { + const Func = struct { + pub fn do(_: void, p: *Parser) Result(SupportsCondition) { + const result = p.tryParse(SupportsCondition.parse, .{}); + if (result == .err) return SupportsCondition.parseDeclaration(p); + return result; + } + }; + break :brk switch (input.parseNestedBlock(SupportsCondition, {}, Func.do)) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + } else null; + + const media = switch (MediaList.parse(input)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + + return .{ + .result = .{ + .import = .{ + url_str, + media, + supports, + if (layer) |l| .{ .value = if (l.value) |ll| ll else null } else null, + }, + }, + }; + }, + .namespace => { + if (@intFromEnum(this.state) > @intFromEnum(State.namespaces)) { + return .{ .err = input.newCustomError(ParserError{ .unexpected_namespace_rule = {} }) }; + } + + const prefix = switch (input.tryParse(Parser.expectIdent, .{})) { + .result => |v| v, + .err => null, + }; + const namespace = switch (input.expectUrlOrString()) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + return .{ .result = .{ .namespace = .{ prefix, namespace } } }; + }, + .charset => { + // @charset is removed by rust-cssparser if it's the first rule in the stylesheet. + // Anything left is technically invalid, however, users often concatenate CSS files + // together, so we are more lenient and simply ignore @charset rules in the middle of a file. + if (input.expectString().asErr()) |e| return .{ .err = e }; + return .{ .result = .charset }; + }, + .@"custom-media" => { + const custom_media_name = switch (DashedIdentFns.parse(input)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + const media = switch (MediaList.parse(input)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + return .{ + .result = .{ + .custom_media = .{ + custom_media_name, + media, + }, + }, + }; + }, + .property => { + const property_name = switch (DashedIdentFns.parse(input)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + return .{ .result = .{ .property = .{property_name} } }; + }, + } + } + + const Nested = NestedRuleParser(AtRuleParserT); + var nested_rule_parser: Nested = this.nested(); + return Nested.AtRuleParser.parsePrelude(&nested_rule_parser, name, input); + } + + pub fn parseBlock(this: *This, prelude: AtRuleParser.Prelude, start: *const ParserState, input: *Parser) Result(AtRuleParser.AtRule) { + this.state = .body; + var nested_parser = this.nested(); + return NestedRuleParser(AtRuleParserT).AtRuleParser.parseBlock(&nested_parser, prelude, start, input); + } + + pub fn ruleWithoutBlock(this: *This, prelude: AtRuleParser.Prelude, start: *const ParserState) Maybe(AtRuleParser.AtRule, void) { + const loc_ = start.sourceLocation(); + const loc = css_rules.Location{ + .source_index = this.options.source_index, + .line = loc_.line, + .column = loc_.column, + }; + + switch (prelude) { + .import => { + this.state = State.imports; + var import_rule = ImportRule{ + .url = prelude.import[0], + .media = prelude.import[1], + .supports = prelude.import[2], + .layer = if (prelude.import[3]) |v| .{ .v = v.value } else null, + .loc = loc, + }; + AtRuleParserT.CustomAtRuleParser.onImportRule(this.at_rule_parser, &import_rule, @intCast(start.position), @intCast(start.position + 1)); + this.rules.v.append(this.allocator, .{ + .import = import_rule, + }) catch bun.outOfMemory(); + return .{ .result = {} }; + }, + .namespace => { + this.state = State.namespaces; + + const prefix = prelude.namespace[0]; + const url = prelude.namespace[1]; + + this.rules.v.append(this.allocator, .{ + .namespace = NamespaceRule{ + .prefix = if (prefix) |p| .{ .v = p } else null, + .url = url, + .loc = loc, + }, + }) catch bun.outOfMemory(); + + return .{ .result = {} }; + }, + .custom_media => { + const name = prelude.custom_media[0]; + const query = prelude.custom_media[1]; + this.state = State.body; + this.rules.v.append( + this.allocator, + .{ + .custom_media = css_rules.custom_media.CustomMediaRule{ + .name = name, + .query = query, + .loc = loc, + }, + }, + ) catch bun.outOfMemory(); + return .{ .result = {} }; + }, + .layer => { + if (@intFromEnum(this.state) <= @intFromEnum(State.layers)) { + this.state = .layers; + } else { + this.state = .body; + } + var nested_parser = this.nested(); + return NestedRuleParser(AtRuleParserT).AtRuleParser.ruleWithoutBlock(&nested_parser, prelude, start); + }, + .charset => return .{ .result = {} }, + .unknown => { + const name = prelude.unknown.name; + const prelude2 = prelude.unknown.tokens; + this.rules.v.append(this.allocator, .{ .unknown = UnknownAtRule{ + .name = name, + .prelude = prelude2, + .block = null, + .loc = loc, + } }) catch bun.outOfMemory(); + return .{ .result = {} }; + }, + .custom => { + this.state = .body; + var nested_parser = this.nested(); + return NestedRuleParser(AtRuleParserT).AtRuleParser.ruleWithoutBlock(&nested_parser, prelude, start); + }, + else => return .{ .err = {} }, + } + } + }; + + pub const QualifiedRuleParser = struct { + pub const Prelude = selector.parser.SelectorList; + pub const QualifiedRule = void; + + pub fn parsePrelude(this: *This, input: *Parser) Result(Prelude) { + this.state = .body; + var nested_parser = this.nested(); + const N = @TypeOf(nested_parser); + return N.QualifiedRuleParser.parsePrelude(&nested_parser, input); + } + + pub fn parseBlock(this: *This, prelude: Prelude, start: *const ParserState, input: *Parser) Result(QualifiedRule) { + var nested_parser = this.nested(); + const N = @TypeOf(nested_parser); + return N.QualifiedRuleParser.parseBlock(&nested_parser, prelude, start, input); + } + }; + + pub fn new(allocator: Allocator, options: *const ParserOptions, at_rule_parser: *AtRuleParserT, rules: *CssRuleList(AtRuleT)) @This() { + return .{ + .options = options, + .state = .start, + .at_rule_parser = at_rule_parser, + .rules = rules, + .allocator = allocator, + }; + } + + pub fn nested(this: *This) NestedRuleParser(AtRuleParserT) { + return NestedRuleParser(AtRuleParserT){ + .options = this.options, + .at_rule_parser = this.at_rule_parser, + .declarations = DeclarationList{}, + .important_declarations = DeclarationList{}, + .rules = this.rules, + .is_in_style_rule = false, + .allow_declarations = false, + .allocator = this.allocator, + }; + } + }; +} + +pub fn NestedRuleParser(comptime T: type) type { + ValidCustomAtRuleParser(T); + + const AtRuleT = T.CustomAtRuleParser.AtRule; + + return struct { + options: *const ParserOptions, + at_rule_parser: *T, + // todo_stuff.think_mem_mgmt + declarations: DeclarationList, + // todo_stuff.think_mem_mgmt + important_declarations: DeclarationList, + // todo_stuff.think_mem_mgmt + rules: *CssRuleList(T.CustomAtRuleParser.AtRule), + is_in_style_rule: bool, + allow_declarations: bool, + allocator: Allocator, + + const This = @This(); + + pub fn getLoc(this: *This, start: *const ParserState) Location { + const loc = start.sourceLocation(); + return Location{ + .source_index = this.options.source_index, + .line = loc.line, + .column = loc.column, + }; + } + + pub const AtRuleParser = struct { + pub const Prelude = AtRulePrelude(T.CustomAtRuleParser.Prelude); + pub const AtRule = void; + + pub fn parsePrelude(this: *This, name: []const u8, input: *Parser) Result(Prelude) { + const result: Prelude = brk: { + const PreludeEnum = enum { + media, + supports, + @"font-face", + @"font-palette-values", + @"counter-style", + viewport, + keyframes, + @"-ms-viewport", + @"-moz-keyframes", + @"-o-keyframes", + @"-ms-keyframes", + page, + @"-moz-document", + layer, + container, + @"starting-style", + scope, + nest, + }; + const Map = comptime bun.ComptimeEnumMap(PreludeEnum); + if (Map.getASCIIICaseInsensitive(name)) |kind| switch (kind) { + .media => { + const media = switch (MediaList.parse(input)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + break :brk .{ .media = media }; + }, + .supports => { + const cond = switch (SupportsCondition.parse(input)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + break :brk .{ .supports = cond }; + }, + .@"font-face" => break :brk .font_face, + .@"font-palette-values" => { + const dashed_ident_name = switch (DashedIdentFns.parse(input)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + break :brk .{ .font_palette_values = dashed_ident_name }; + }, + .@"counter-style" => { + const custom_name = switch (CustomIdentFns.parse(input)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + break :brk .{ .counter_style = custom_name }; + }, + .viewport, .@"-ms-viewport" => { + const prefix: VendorPrefix = if (bun.strings.startsWithCaseInsensitiveAscii(name, "-ms")) VendorPrefix{ .ms = true } else VendorPrefix{ .none = true }; + break :brk .{ .viewport = prefix }; + }, + .keyframes, .@"-moz-keyframes", .@"-o-keyframes", .@"-ms-keyframes" => { + const prefix: VendorPrefix = if (bun.strings.startsWithCaseInsensitiveAscii(name, "-webkit")) + VendorPrefix{ .webkit = true } + else if (bun.strings.startsWithCaseInsensitiveAscii(name, "-moz-")) + VendorPrefix{ .moz = true } + else if (bun.strings.startsWithCaseInsensitiveAscii(name, "-o-")) + VendorPrefix{ .o = true } + else if (bun.strings.startsWithCaseInsensitiveAscii(name, "-ms-")) VendorPrefix{ .ms = true } else VendorPrefix{ .none = true }; + + const keyframes_name = switch (input.tryParse(css_rules.keyframes.KeyframesName.parse, .{})) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + break :brk .{ .keyframes = .{ .name = keyframes_name, .prefix = prefix } }; + }, + .page => { + const Fn = struct { + pub fn parsefn(input2: *Parser) Result(ArrayList(css_rules.page.PageSelector)) { + return input2.parseCommaSeparated(css_rules.page.PageSelector, css_rules.page.PageSelector.parse); + } + }; + const selectors = switch (input.tryParse(Fn.parsefn, .{})) { + .result => |v| v, + .err => ArrayList(css_rules.page.PageSelector){}, + }; + break :brk .{ .page = selectors }; + }, + .@"-moz-document" => { + // Firefox only supports the url-prefix() function with no arguments as a legacy CSS hack. + // See https://css-tricks.com/snippets/css/css-hacks-targeting-firefox/ + if (input.expectFunctionMatching("url-prefix").asErr()) |e| return .{ .err = e }; + const Fn = struct { + pub fn parsefn(_: void, input2: *Parser) Result(void) { + // Firefox also allows an empty string as an argument... + // https://github.com/mozilla/gecko-dev/blob/0077f2248712a1b45bf02f0f866449f663538164/servo/components/style/stylesheets/document_rule.rs#L303 + _ = input2.tryParse(parseInner, .{}); + if (input2.expectExhausted().asErr()) |e| return .{ .err = e }; + return .{ .result = {} }; + } + fn parseInner(input2: *Parser) Result(void) { + const s = switch (input2.expectString()) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + if (s.len > 0) { + return .{ .err = input2.newCustomError(ParserError.invalid_value) }; + } + return .{ .result = {} }; + } + }; + if (input.parseNestedBlock(void, {}, Fn.parsefn).asErr()) |e| return .{ .err = e }; + break :brk .moz_document; + }, + .layer => { + const names = switch (input.parseList(LayerName, LayerName.parse)) { + .result => |vv| vv, + .err => |e| names: { + if (e.kind == .basic and e.kind.basic == .end_of_input) { + break :names ArrayList(LayerName){}; + } + return .{ .err = e }; + }, + }; + + break :brk .{ .layer = names }; + }, + .container => { + const container_name = switch (input.tryParse(css_rules.container.ContainerName.parse, .{})) { + .result => |vv| vv, + .err => null, + }; + const condition = switch (css_rules.container.ContainerCondition.parse(input)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + break :brk .{ .container = .{ .name = container_name, .condition = condition } }; + }, + .@"starting-style" => break :brk .starting_style, + .scope => { + var selector_parser = selector.parser.SelectorParser{ + .is_nesting_allowed = true, + .options = this.options, + .allocator = input.allocator(), + }; + const Closure = struct { + selector_parser: *selector.parser.SelectorParser, + pub fn parsefn(self: *@This(), input2: *Parser) Result(selector.parser.SelectorList) { + return selector.parser.SelectorList.parseRelative(self.selector_parser, input2, .ignore_invalid_selector, .none); + } + }; + var closure = Closure{ + .selector_parser = &selector_parser, + }; + + const scope_start = if (input.tryParse(Parser.expectParenthesisBlock, .{}).isOk()) scope_start: { + break :scope_start switch (input.parseNestedBlock(selector.parser.SelectorList, &closure, Closure.parsefn)) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + } else null; + + const scope_end = if (input.tryParse(Parser.expectIdentMatching, .{"to"}).isOk()) scope_end: { + if (input.expectParenthesisBlock().asErr()) |e| return .{ .err = e }; + break :scope_end switch (input.parseNestedBlock(selector.parser.SelectorList, &closure, Closure.parsefn)) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + } else null; + + break :brk .{ + .scope = .{ + .scope_start = scope_start, + .scope_end = scope_end, + }, + }; + }, + .nest => { + if (this.is_in_style_rule) { + this.options.warn(input.newCustomError(ParserError{ .deprecated_nest_rule = {} })); + var selector_parser = selector.parser.SelectorParser{ + .is_nesting_allowed = true, + .options = this.options, + .allocator = input.allocator(), + }; + const selectors = switch (selector.parser.SelectorList.parse(&selector_parser, input, .discard_list, .contained)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + break :brk .{ .nest = selectors }; + } + }, + }; + + switch (parse_custom_at_rule_prelude( + name, + input, + this.options, + T, + this.at_rule_parser, + )) { + .result => |v| break :brk v, + .err => |e| return .{ .err = e }, + } + }; + + if (this.is_in_style_rule and !result.allowedInStyleRule()) { + return .{ .err = input.newError(BasicParseErrorKind{ .at_rule_invalid = name }) }; + } + + return .{ .result = result }; + } + + pub fn parseBlock(this: *This, prelude: AtRuleParser.Prelude, start: *const ParserState, input: *Parser) Result(AtRuleParser.AtRule) { + const loc = this.getLoc(start); + switch (prelude) { + .font_face => { + var decl_parser = css_rules.font_face.FontFaceDeclarationParser{}; + var parser = RuleBodyParser(css_rules.font_face.FontFaceDeclarationParser).new(input, &decl_parser); + // todo_stuff.think_mem_mgmt + var properties: ArrayList(css_rules.font_face.FontFaceProperty) = .{}; + + while (parser.next()) |result| { + if (result.asValue()) |decl| { + properties.append( + input.allocator(), + decl, + ) catch bun.outOfMemory(); + } + } + + this.rules.v.append( + input.allocator(), + .{ + .font_face = css_rules.font_face.FontFaceRule{ + .properties = properties, + .loc = loc, + }, + }, + ) catch bun.outOfMemory(); + return .{ .result = {} }; + }, + .font_palette_values => { + const name = prelude.font_palette_values; + const rule = switch (css_rules.font_palette_values.FontPaletteValuesRule.parse(name, input, loc)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + this.rules.v.append( + input.allocator(), + .{ .font_palette_values = rule }, + ) catch bun.outOfMemory(); + return .{ .result = {} }; + }, + .counter_style => { + const name = prelude.counter_style; + this.rules.v.append( + input.allocator(), + .{ + .counter_style = css_rules.counter_style.CounterStyleRule{ + .name = name, + .declarations = switch (DeclarationBlock.parse(input, this.options)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }, + .loc = loc, + }, + }, + ) catch bun.outOfMemory(); + return .{ .result = {} }; + }, + .media => { + const query = prelude.media; + const rules = switch (this.parseStyleBlock(input)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + this.rules.v.append( + input.allocator(), + .{ + .media = css_rules.media.MediaRule(T.CustomAtRuleParser.AtRule){ + .query = query, + .rules = rules, + .loc = loc, + }, + }, + ) catch bun.outOfMemory(); + return .{ .result = {} }; + }, + .supports => { + const condition = prelude.supports; + const rules = switch (this.parseStyleBlock(input)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + this.rules.v.append(input.allocator(), .{ + .supports = css_rules.supports.SupportsRule(T.CustomAtRuleParser.AtRule){ + .condition = condition, + .rules = rules, + .loc = loc, + }, + }) catch bun.outOfMemory(); + return .{ .result = {} }; + }, + .container => { + const rules = switch (this.parseStyleBlock(input)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + this.rules.v.append( + input.allocator(), + .{ + .container = css_rules.container.ContainerRule(T.CustomAtRuleParser.AtRule){ + .name = prelude.container.name, + .condition = prelude.container.condition, + .rules = rules, + .loc = loc, + }, + }, + ) catch bun.outOfMemory(); + return .{ .result = {} }; + }, + .scope => { + const rules = switch (this.parseStyleBlock(input)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + this.rules.v.append( + input.allocator(), + .{ + .scope = css_rules.scope.ScopeRule(T.CustomAtRuleParser.AtRule){ + .scope_start = prelude.scope.scope_start, + .scope_end = prelude.scope.scope_end, + .rules = rules, + .loc = loc, + }, + }, + ) catch bun.outOfMemory(); + return .{ .result = {} }; + }, + .viewport => { + this.rules.v.append(input.allocator(), .{ + .viewport = css_rules.viewport.ViewportRule{ + .vendor_prefix = prelude.viewport, + .declarations = switch (DeclarationBlock.parse(input, this.options)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }, + .loc = loc, + }, + }) catch bun.outOfMemory(); + return .{ .result = {} }; + }, + .keyframes => { + var parser = css_rules.keyframes.KeyframesListParser{}; + var iter = RuleBodyParser(css_rules.keyframes.KeyframesListParser).new(input, &parser); + // todo_stuff.think_mem_mgmt + var keyframes = ArrayList(css_rules.keyframes.Keyframe){}; + + while (iter.next()) |result| { + if (result.asValue()) |keyframe| { + keyframes.append( + input.allocator(), + keyframe, + ) catch bun.outOfMemory(); + } + } + + this.rules.v.append(input.allocator(), .{ + .keyframes = css_rules.keyframes.KeyframesRule{ + .name = prelude.keyframes.name, + .keyframes = keyframes, + .vendor_prefix = prelude.keyframes.prefix, + .loc = loc, + }, + }) catch bun.outOfMemory(); + return .{ .result = {} }; + }, + .page => { + const selectors = prelude.page; + const rule = switch (css_rules.page.PageRule.parse(selectors, input, loc, this.options)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + this.rules.v.append( + input.allocator(), + .{ .page = rule }, + ) catch bun.outOfMemory(); + return .{ .result = {} }; + }, + .moz_document => { + const rules = switch (this.parseStyleBlock(input)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + this.rules.v.append(input.allocator(), .{ + .moz_document = css_rules.document.MozDocumentRule(T.CustomAtRuleParser.AtRule){ + .rules = rules, + .loc = loc, + }, + }) catch bun.outOfMemory(); + return .{ .result = {} }; + }, + .layer => { + const name = if (prelude.layer.items.len == 0) null else if (prelude.layer.items.len == 1) names: { + var out: LayerName = .{}; + std.mem.swap(LayerName, &out, &prelude.layer.items[0]); + break :names out; + } else return .{ .err = input.newError(.at_rule_body_invalid) }; + + const rules = switch (this.parseStyleBlock(input)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + + this.rules.v.append(input.allocator(), .{ + .layer_block = css_rules.layer.LayerBlockRule(T.CustomAtRuleParser.AtRule){ .name = name, .rules = rules, .loc = loc }, + }) catch bun.outOfMemory(); + return .{ .result = {} }; + }, + .property => { + const name = prelude.property[0]; + this.rules.v.append(input.allocator(), .{ + .property = switch (css_rules.property.PropertyRule.parse(name, input, loc)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }, + }) catch bun.outOfMemory(); + return .{ .result = {} }; + }, + .import, .namespace, .custom_media, .charset => { + // These rules don't have blocks + return .{ .err = input.newUnexpectedTokenError(.open_curly) }; + }, + .starting_style => { + const rules = switch (this.parseStyleBlock(input)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + this.rules.v.append( + input.allocator(), + .{ + .starting_style = css_rules.starting_style.StartingStyleRule(T.CustomAtRuleParser.AtRule){ + .rules = rules, + .loc = loc, + }, + }, + ) catch bun.outOfMemory(); + return .{ .result = {} }; + }, + .nest => { + const selectors = prelude.nest; + const result = switch (this.parseNested(input, true)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + const declarations = result[0]; + const rules = result[1]; + this.rules.v.append( + input.allocator(), + .{ + .nesting = css_rules.nesting.NestingRule(T.CustomAtRuleParser.AtRule){ + .style = css_rules.style.StyleRule(T.CustomAtRuleParser.AtRule){ + .selectors = selectors, + .declarations = declarations, + .vendor_prefix = VendorPrefix.empty(), + .rules = rules, + .loc = loc, + }, + .loc = loc, + }, + }, + ) catch bun.outOfMemory(); + return .{ .result = {} }; + }, + .font_feature_values => bun.unreachablePanic("", .{}), + .unknown => { + this.rules.v.append( + input.allocator(), + .{ + .unknown = css_rules.unknown.UnknownAtRule{ + .name = prelude.unknown.name, + .prelude = prelude.unknown.tokens, + .block = switch (TokenListFns.parse(input, this.options, 0)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }, + .loc = loc, + }, + }, + ) catch bun.outOfMemory(); + return .{ .result = {} }; + }, + .custom => { + this.rules.v.append( + input.allocator(), + .{ + .custom = switch (parse_custom_at_rule_body(T, prelude.custom, input, start, this.options, this.at_rule_parser, this.is_in_style_rule)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }, + }, + ) catch bun.outOfMemory(); + return .{ .result = {} }; + }, + } + } + + pub fn ruleWithoutBlock(this: *This, prelude: AtRuleParser.Prelude, start: *const ParserState) Maybe(AtRuleParser.AtRule, void) { + const loc = this.getLoc(start); + switch (prelude) { + .layer => { + if (this.is_in_style_rule or prelude.layer.items.len == 0) { + return .{ .err = {} }; + } + + this.rules.v.append( + this.allocator, + .{ + .layer_statement = css_rules.layer.LayerStatementRule{ + .names = prelude.layer, + .loc = loc, + }, + }, + ) catch bun.outOfMemory(); + return .{ .result = {} }; + }, + .unknown => { + this.rules.v.append( + this.allocator, + .{ + .unknown = css_rules.unknown.UnknownAtRule{ + .name = prelude.unknown.name, + .prelude = prelude.unknown.tokens, + .block = null, + .loc = loc, + }, + }, + ) catch bun.outOfMemory(); + return .{ .result = {} }; + }, + .custom => { + this.rules.v.append(this.allocator, switch (parse_custom_at_rule_without_block( + T, + prelude.custom, + start, + this.options, + this.at_rule_parser, + this.is_in_style_rule, + )) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }) catch bun.outOfMemory(); + return .{ .result = {} }; + }, + else => return .{ .err = {} }, + } + } + }; + + pub const QualifiedRuleParser = struct { + pub const Prelude = selector.parser.SelectorList; + pub const QualifiedRule = void; + + pub fn parsePrelude(this: *This, input: *Parser) Result(Prelude) { + var selector_parser = selector.parser.SelectorParser{ + .is_nesting_allowed = true, + .options = this.options, + .allocator = input.allocator(), + }; + + if (this.is_in_style_rule) { + return selector.parser.SelectorList.parseRelative(&selector_parser, input, .discard_list, .implicit); + } else { + return selector.parser.SelectorList.parse(&selector_parser, input, .discard_list, .none); + } + } + + pub fn parseBlock(this: *This, selectors: Prelude, start: *const ParserState, input: *Parser) Result(QualifiedRule) { + const loc = this.getLoc(start); + const result = switch (this.parseNested(input, true)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + const declarations = result[0]; + const rules = result[1]; + + this.rules.v.append(this.allocator, .{ + .style = StyleRule(AtRuleT){ + .selectors = selectors, + .vendor_prefix = VendorPrefix{}, + .declarations = declarations, + .rules = rules, + .loc = loc, + }, + }) catch bun.outOfMemory(); + + return Result(QualifiedRule).success; + } + }; + + pub const RuleBodyItemParser = struct { + pub fn parseQualified(this: *This) bool { + _ = this; // autofix + return true; + } + + pub fn parseDeclarations(this: *This) bool { + return this.allow_declarations; + } + }; + + pub const DeclarationParser = struct { + pub const Declaration = void; + + pub fn parseValue(this: *This, name: []const u8, input: *Parser) Result(Declaration) { + return css_decls.parse_declaration( + name, + input, + &this.declarations, + &this.important_declarations, + this.options, + ); + } + }; + + pub fn parseNested(this: *This, input: *Parser, is_style_rule: bool) Result(struct { DeclarationBlock, CssRuleList(T.CustomAtRuleParser.AtRule) }) { + // TODO: think about memory management in error cases + var rules = CssRuleList(T.CustomAtRuleParser.AtRule){}; + var nested_parser = This{ + .allocator = input.allocator(), + .options = this.options, + .at_rule_parser = this.at_rule_parser, + .declarations = DeclarationList{}, + .important_declarations = DeclarationList{}, + .rules = &rules, + .is_in_style_rule = this.is_in_style_rule or is_style_rule, + .allow_declarations = this.allow_declarations or this.is_in_style_rule or is_style_rule, + }; + + const parse_declarations = This.RuleBodyItemParser.parseDeclarations(&nested_parser); + // TODO: think about memory management + var errors = ArrayList(ParseError(ParserError)){}; + var iter = RuleBodyParser(This).new(input, &nested_parser); + + while (iter.next()) |result| { + if (result.asErr()) |e| { + if (parse_declarations) { + iter.parser.declarations.clearRetainingCapacity(); + iter.parser.important_declarations.clearRetainingCapacity(); + errors.append( + this.allocator, + e, + ) catch bun.outOfMemory(); + } else { + if (iter.parser.options.error_recovery) { + iter.parser.options.warn(e); + continue; + } + return .{ .err = e }; + } + } + } + + if (parse_declarations) { + if (errors.items.len > 0) { + if (this.options.error_recovery) { + for (errors.items) |e| { + this.options.warn(e); + } + } else { + return .{ .err = errors.orderedRemove(0) }; + } + } + } + + return .{ + .result = .{ + DeclarationBlock{ + .declarations = nested_parser.declarations, + .important_declarations = nested_parser.important_declarations, + }, + rules, + }, + }; + } + + pub fn parseStyleBlock(this: *This, input: *Parser) Result(CssRuleList(T.CustomAtRuleParser.AtRule)) { + const srcloc = input.currentSourceLocation(); + const loc = Location{ + .source_index = this.options.source_index, + .line = srcloc.line, + .column = srcloc.column, + }; + + // Declarations can be immediately within @media and @supports blocks that are nested within a parent style rule. + // These act the same way as if they were nested within a `& { ... }` block. + const declarations, var rules = switch (this.parseNested(input, false)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + + if (declarations.len() > 0) { + rules.v.insert( + input.allocator(), + 0, + .{ + .style = StyleRule(T.CustomAtRuleParser.AtRule){ + .selectors = selector.parser.SelectorList.fromSelector( + input.allocator(), + selector.parser.Selector.fromComponent(input.allocator(), .nesting), + ), + .declarations = declarations, + .vendor_prefix = VendorPrefix.empty(), + .rules = .{}, + .loc = loc, + }, + }, + ) catch unreachable; + } + + return .{ .result = rules }; + } + }; +} + +pub fn StyleSheetParser(comptime P: type) type { + ValidAtRuleParser(P); + ValidQualifiedRuleParser(P); + + if (P.QualifiedRuleParser.QualifiedRule != P.AtRuleParser.AtRule) { + @compileError("StyleSheetParser: P.QualifiedRuleParser.QualifiedRule != P.AtRuleParser.AtRule"); + } + + const Item = P.AtRuleParser.AtRule; + + return struct { + input: *Parser, + parser: *P, + any_rule_so_far: bool = false, + + pub fn new(input: *Parser, parser: *P) @This() { + return .{ + .input = input, + .parser = parser, + }; + } + + pub fn next(this: *@This(), allocator: Allocator) ?(Result(Item)) { + while (true) { + this.input.@"skip cdc and cdo"(); + + const start = this.input.state(); + const at_keyword: ?[]const u8 = switch (this.input.nextByte() orelse return null) { + '@' => brk: { + const at_keyword: *Token = switch (this.input.nextIncludingWhitespaceAndComments()) { + .result => |vv| vv, + .err => { + this.input.reset(&start); + break :brk null; + }, + }; + + if (at_keyword.* == .at_keyword) break :brk at_keyword.at_keyword; + this.input.reset(&start); + break :brk null; + }, + else => null, + }; + + if (at_keyword) |name| { + const first_stylesheet_rule = !this.any_rule_so_far; + this.any_rule_so_far = true; + + if (first_stylesheet_rule and bun.strings.eqlCaseInsensitiveASCII(name, "charset", true)) { + const delimiters = Delimiters{ + .semicolon = true, + .close_curly_bracket = true, + }; + _ = this.input.parseUntilAfter(delimiters, void, {}, voidWrap(void, Parser.parseEmpty)); + } else { + return parse_at_rule(allocator, &start, name, this.input, P, this.parser); + } + } else { + this.any_rule_so_far = true; + const result = parse_qualified_rule(&start, this.input, P, this.parser, Delimiters{ .curly_bracket = true }); + return result; + } + } + } + }; +} + +/// A result returned from `to_css`, including the serialized CSS +/// and other metadata depending on the input options. +pub const ToCssResult = struct { + /// Serialized CSS code. + code: []const u8, + /// A map of CSS module exports, if the `css_modules` option was + /// enabled during parsing. + exports: ?CssModuleExports, + /// A map of CSS module references, if the `css_modules` config + /// had `dashed_idents` enabled. + references: ?CssModuleReferences, + /// A list of dependencies (e.g. `@import` or `url()`) found in + /// the style sheet, if the `analyze_dependencies` option is enabled. + dependencies: ?ArrayList(Dependency), +}; + +pub const ToCssResultInternal = struct { + /// A map of CSS module exports, if the `css_modules` option was + /// enabled during parsing. + exports: ?CssModuleExports, + /// A map of CSS module references, if the `css_modules` config + /// had `dashed_idents` enabled. + references: ?CssModuleReferences, + /// A list of dependencies (e.g. `@import` or `url()`) found in + /// the style sheet, if the `analyze_dependencies` option is enabled. + dependencies: ?ArrayList(Dependency), +}; + +pub const MinifyOptions = struct { + /// Targets to compile the CSS for. + targets: targets.Targets, + /// A list of known unused symbols, including CSS class names, + /// ids, and `@keyframe` names. The declarations of these will be removed. + unused_symbols: std.StringArrayHashMapUnmanaged(void), + + pub fn default() MinifyOptions { + return MinifyOptions{ + .targets = .{}, + .unused_symbols = .{}, + }; + } +}; + +pub const BundlerStyleSheet = StyleSheet(BundlerAtRule); +pub const BundlerCssRuleList = CssRuleList(BundlerAtRule); +pub const BundlerCssRule = CssRule(BundlerAtRule); +pub const BundlerLayerBlockRule = css_rules.layer.LayerBlockRule(BundlerAtRule); +pub const BundlerTailwindState = struct { + source: []const u8, + index: bun.bundle_v2.Index, + output_from_tailwind: ?[]const u8 = null, +}; + +pub fn StyleSheet(comptime AtRule: type) type { + return struct { + /// A list of top-level rules within the style sheet. + rules: CssRuleList(AtRule), + sources: ArrayList([]const u8), + source_map_urls: ArrayList(?[]const u8), + license_comments: ArrayList([]const u8), + options: ParserOptions, + tailwind: if (AtRule == BundlerAtRule) ?*BundlerTailwindState else u0 = if (AtRule == BundlerAtRule) null else 0, + + const This = @This(); + + pub fn empty(allocator: Allocator) This { + return This{ + .rules = .{}, + .sources = .{}, + .source_map_urls = .{}, + .license_comments = .{}, + .options = ParserOptions.default(allocator, null), + }; + } + + /// Minify and transform the style sheet for the provided browser targets. + pub fn minify(this: *@This(), allocator: Allocator, options: MinifyOptions) Maybe(void, Err(MinifyErrorKind)) { + const ctx = PropertyHandlerContext.new(allocator, options.targets, &options.unused_symbols); + var handler = declaration.DeclarationHandler.default(); + var important_handler = declaration.DeclarationHandler.default(); + + // @custom-media rules may be defined after they are referenced, but may only be defined at the top level + // of a stylesheet. Do a pre-scan here and create a lookup table by name. + var custom_media: ?std.StringArrayHashMapUnmanaged(css_rules.custom_media.CustomMediaRule) = if (this.options.flags.contains(ParserFlags{ .custom_media = true }) and options.targets.shouldCompileSame(.custom_media_queries)) brk: { + var custom_media = std.StringArrayHashMapUnmanaged(css_rules.custom_media.CustomMediaRule){}; + + for (this.rules.v.items) |*rule| { + if (rule.* == .custom_media) { + custom_media.put(allocator, rule.custom_media.name.v, rule.custom_media.deepClone(allocator)) catch bun.outOfMemory(); + } + } + + break :brk custom_media; + } else null; + defer if (custom_media) |*media| media.deinit(allocator); + + var minify_ctx = MinifyContext{ + .allocator = allocator, + .targets = &options.targets, + .handler = &handler, + .important_handler = &important_handler, + .handler_context = ctx, + .unused_symbols = &options.unused_symbols, + .custom_media = custom_media, + .css_modules = this.options.css_modules != null, + }; + + this.rules.minify(&minify_ctx, false) catch { + @panic("TODO: Handle"); + }; + + return .{ .result = {} }; + } + + pub fn toCssWithWriter(this: *const @This(), allocator: Allocator, writer: anytype, options: css_printer.PrinterOptions, import_records: ?*const bun.BabyList(ImportRecord)) PrintErr!ToCssResultInternal { + const W = @TypeOf(writer); + const project_root = options.project_root; + var printer = Printer(@TypeOf(writer)).new(allocator, std.ArrayList(u8).init(allocator), writer, options, import_records); + + // #[cfg(feature = "sourcemap")] + // { + // printer.sources = Some(&self.sources); + // } + + // #[cfg(feature = "sourcemap")] + // if printer.source_map.is_some() { + // printer.source_maps = self.sources.iter().enumerate().map(|(i, _)| self.source_map(i)).collect(); + // } + + for (this.license_comments.items) |comment| { + try printer.writeStr("/*"); + try printer.writeComment(comment); + try printer.writeStr("*/"); + try printer.newline(); + } + + if (this.options.css_modules) |*config| { + var references = CssModuleReferences{}; + printer.css_module = CssModule.new(allocator, config, &this.sources, project_root, &references); + + try this.rules.toCss(W, &printer); + try printer.newline(); + + return ToCssResultInternal{ + .dependencies = printer.dependencies, + .exports = exports: { + const val = printer.css_module.?.exports_by_source_index.items[0]; + printer.css_module.?.exports_by_source_index.items[0] = .{}; + break :exports val; + }, + // .code = dest.items, + .references = references, + }; + } else { + try this.rules.toCss(W, &printer); + try printer.newline(); + return ToCssResultInternal{ + .dependencies = printer.dependencies, + // .code = dest.items, + .exports = null, + .references = null, + }; + } + } + + pub fn toCss(this: *const @This(), allocator: Allocator, options: css_printer.PrinterOptions, import_records: ?*const bun.BabyList(ImportRecord)) PrintErr!ToCssResult { + // TODO: this is not necessary + // Make sure we always have capacity > 0: https://github.com/napi-rs/napi-rs/issues/1124. + var dest = ArrayList(u8).initCapacity(allocator, 1) catch unreachable; + const writer = dest.writer(allocator); + const result = try toCssWithWriter(this, allocator, writer, options, import_records); + return ToCssResult{ + .code = dest.items, + .dependencies = result.dependencies, + .exports = result.exports, + .references = result.references, + }; + } + + pub fn parse(allocator: Allocator, code: []const u8, options: ParserOptions, import_records: ?*bun.BabyList(ImportRecord)) Maybe(This, Err(ParserError)) { + var default_at_rule_parser = DefaultAtRuleParser{}; + return parseWith(allocator, code, options, DefaultAtRuleParser, &default_at_rule_parser, import_records); + } + + pub fn parseBundler(allocator: Allocator, code: []const u8, options: ParserOptions, import_records: *bun.BabyList(ImportRecord)) Maybe(This, Err(ParserError)) { + var at_rule_parser = BundlerAtRuleParser{ + .import_records = import_records, + .allocator = allocator, + .options = &options, + }; + return parseWith(allocator, code, options, BundlerAtRuleParser, &at_rule_parser, import_records); + } + + /// Parse a style sheet from a string. + pub fn parseWith( + allocator: Allocator, + code: []const u8, + options: ParserOptions, + comptime P: type, + at_rule_parser: *P, + import_records: ?*bun.BabyList(ImportRecord), + ) Maybe(This, Err(ParserError)) { + var input = ParserInput.new(allocator, code); + var parser = Parser.new(&input, import_records); + + var license_comments = ArrayList([]const u8){}; + var state = parser.state(); + while (switch (parser.nextIncludingWhitespaceAndComments()) { + .result => |v| v, + .err => null, + }) |token| { + switch (token.*) { + .whitespace => {}, + .comment => |comment| { + if (bun.strings.startsWithChar(comment, '!')) { + license_comments.append(allocator, comment) catch bun.outOfMemory(); + } + }, + else => break, + } + state = parser.state(); + } + parser.reset(&state); + + var rules = CssRuleList(AtRule){}; + var rule_parser = TopLevelRuleParser(P).new(allocator, &options, at_rule_parser, &rules); + var rule_list_parser = StyleSheetParser(TopLevelRuleParser(P)).new(&parser, &rule_parser); + + while (rule_list_parser.next(allocator)) |result| { + if (result.asErr()) |e| { + const result_options = rule_list_parser.parser.options; + if (result_options.error_recovery) { + // todo_stuff.warn + continue; + } + + return .{ .err = Err(ParserError).fromParseError(e, options.filename) }; + } + } + + var sources = ArrayList([]const u8){}; + sources.append(allocator, options.filename) catch bun.outOfMemory(); + var source_map_urls = ArrayList(?[]const u8){}; + source_map_urls.append(allocator, parser.currentSourceMapUrl()) catch bun.outOfMemory(); + + return .{ + .result = This{ + .rules = rules, + .sources = sources, + .source_map_urls = source_map_urls, + .license_comments = license_comments, + .options = options, + }, + }; + } + + pub fn containsTailwindDirectives(this: *const @This()) bool { + if (comptime AtRule != BundlerAtRule) @compileError("Expected BundlerAtRule for this function."); + var found_import: bool = false; + for (this.rules.v.items) |*rule| { + switch (rule.*) { + .custom => { + return true; + }, + // .charset => {}, + // TODO: layer + .layer_block => {}, + .import => { + found_import = true; + }, + else => { + return false; + }, + } + } + return false; + } + + pub fn newFromTailwindImports( + allocator: Allocator, + options: ParserOptions, + imports_from_tailwind: CssRuleList(AtRule), + ) @This() { + _ = allocator; // autofix + if (comptime AtRule != BundlerAtRule) @compileError("Expected BundlerAtRule for this function."); + + const stylesheet = This{ + .rules = imports_from_tailwind, + .sources = .{}, + .source_map_urls = .{}, + .license_comments = .{}, + .options = options, + }; + + return stylesheet; + } + + /// *NOTE*: Used for Tailwind stylesheets only + /// + /// This plucks out the import rules from the Tailwind stylesheet into a separate rule list, + /// replacing them with `.ignored` rules. + /// + /// We do this because Tailwind's compiler pipeline does not bundle imports, so we handle that + /// ourselves in the bundler. + pub fn pluckImports(this: *const @This(), allocator: Allocator, out: *CssRuleList(AtRule), new_import_records: *bun.BabyList(ImportRecord)) void { + if (comptime AtRule != BundlerAtRule) @compileError("Expected BundlerAtRule for this function."); + const State = enum { count, exec }; + + const STATES = comptime [_]State{ .count, .exec }; + + var count: u32 = 0; + inline for (STATES[0..]) |state| { + if (comptime state == .exec) { + out.v.ensureUnusedCapacity(allocator, count) catch bun.outOfMemory(); + } + var saw_imports = false; + for (this.rules.v.items) |*rule| { + switch (rule.*) { + // TODO: layer, might have imports + .layer_block => {}, + .import => { + if (!saw_imports) saw_imports = true; + switch (state) { + .count => count += 1, + .exec => { + const import_rule = &rule.import; + out.v.appendAssumeCapacity(rule.*); + const import_record_idx = new_import_records.len; + import_rule.import_record_idx = import_record_idx; + new_import_records.push(allocator, ImportRecord{ + .path = bun.fs.Path.init(import_rule.url), + .kind = if (import_rule.supports != null) .at_conditional else .at, + .range = bun.logger.Range.None, + }) catch bun.outOfMemory(); + rule.* = .ignored; + }, + } + }, + .unknown => { + if (bun.strings.eqlComptime(rule.unknown.name, "tailwind")) { + continue; + } + }, + else => {}, + } + if (saw_imports) break; + } + } + } + }; +} + +pub const StyleAttribute = struct { + declarations: DeclarationBlock, + sources: ArrayList([]const u8), + + pub fn parse(allocator: Allocator, code: []const u8, options: ParserOptions, import_records: *bun.BabyList(ImportRecord)) Maybe(StyleAttribute, Err(ParserError)) { + var input = ParserInput.new(allocator, code); + var parser = Parser.new(&input, import_records); + const sources = sources: { + var s = ArrayList([]const u8).initCapacity(allocator, 1) catch bun.outOfMemory(); + s.appendAssumeCapacity(options.filename); + break :sources s; + }; + return .{ .result = StyleAttribute{ + .declarations = switch (DeclarationBlock.parse(&parser, &options)) { + .result => |v| v, + .err => |e| return .{ .err = Err(ParserError).fromParseError(e, "") }, + }, + .sources = sources, + } }; + } + + pub fn toCss(this: *const StyleAttribute, allocator: Allocator, options: PrinterOptions, import_records: *bun.BabyList(ImportRecord)) PrintErr!ToCssResult { + // #[cfg(feature = "sourcemap")] + // assert!( + // options.source_map.is_none(), + // "Source maps are not supported for style attributes" + // ); + + var dest = ArrayList(u8){}; + const writer = dest.writer(allocator); + var printer = Printer(@TypeOf(writer)).new(allocator, std.ArrayList(u8).init(allocator), writer, options, import_records); + printer.sources = &this.sources; + + try this.declarations.toCss(@TypeOf(writer), &printer); + + return ToCssResult{ + .dependencies = printer.dependencies, + .code = dest.items, + .exports = null, + .references = null, + }; + } + + pub fn minify(this: *@This(), allocator: Allocator, options: MinifyOptions) void { + _ = allocator; // autofix + _ = this; // autofix + _ = options; // autofix + // TODO: IMPLEMENT THIS! + } +}; + +pub fn ValidDeclarationParser(comptime P: type) void { + // The finished representation of a declaration. + _ = P.DeclarationParser.Declaration; + + // Parse the value of a declaration with the given `name`. + // + // Return the finished representation for the declaration + // as returned by `DeclarationListParser::next`, + // or `Err(())` to ignore the entire declaration as invalid. + // + // Declaration name matching should be case-insensitive in the ASCII range. + // This can be done with `std::ascii::Ascii::eq_ignore_ascii_case`, + // or with the `match_ignore_ascii_case!` macro. + // + // The given `input` is a "delimited" parser + // that ends wherever the declaration value should end. + // (In declaration lists, before the next semicolon or end of the current block.) + // + // If `!important` can be used in a given context, + // `input.try_parse(parse_important).is_ok()` should be used at the end + // of the implementation of this method and the result should be part of the return value. + // + // fn parseValue(this: *T, name: []const u8, input: *Parser) Error!T.DeclarationParser.Declaration + _ = P.DeclarationParser.parseValue; +} + +/// Also checks that P is: +/// - ValidDeclarationParser(P) +/// - ValidQualifiedRuleParser(P) +/// - ValidAtRuleParser(P) +pub fn ValidRuleBodyItemParser(comptime P: type) void { + ValidDeclarationParser(P); + ValidQualifiedRuleParser(P); + ValidAtRuleParser(P); + + // Whether we should attempt to parse declarations. If you know you won't, returning false + // here is slightly faster. + _ = P.RuleBodyItemParser.parseDeclarations; + + // Whether we should attempt to parse qualified rules. If you know you won't, returning false + // would be slightly faster. + _ = P.RuleBodyItemParser.parseQualified; + + // We should have: + // P.DeclarationParser.Declaration == P.QualifiedRuleParser.QualifiedRule == P.AtRuleParser.AtRule + if (P.DeclarationParser.Declaration != P.QualifiedRuleParser.QualifiedRule or + P.DeclarationParser.Declaration != P.AtRuleParser.AtRule) + { + @compileError("ValidRuleBodyItemParser: P.DeclarationParser.Declaration != P.QualifiedRuleParser.QualifiedRule or\n P.DeclarationParser.Declaration != P.AtRuleParser.AtRule"); + } +} + +pub fn RuleBodyParser(comptime P: type) type { + ValidRuleBodyItemParser(P); + // Same as P.AtRuleParser.AtRule and P.DeclarationParser.Declaration + const I = P.QualifiedRuleParser.QualifiedRule; + + return struct { + input: *Parser, + parser: *P, + + const This = @This(); + + pub fn new(input: *Parser, parser: *P) This { + return .{ + .input = input, + .parser = parser, + }; + } + + /// TODO: result is actually: + /// type Item = Result, &'i str)>; + /// + /// but nowhere in the source do i actually see it using the string part of the tuple + pub fn next(this: *This) ?(Result(I)) { + while (true) { + this.input.skipWhitespace(); + const start = this.input.state(); + + const tok: *Token = switch (this.input.nextIncludingWhitespaceAndComments()) { + .err => |_| return null, + .result => |vvv| vvv, + }; + + switch (tok.*) { + .close_curly, .whitespace, .semicolon, .comment => continue, + .at_keyword => { + const name = tok.at_keyword; + return parse_at_rule( + this.input.allocator(), + &start, + name, + this.input, + P, + this.parser, + ); + }, + .ident => { + if (P.RuleBodyItemParser.parseDeclarations(this.parser)) { + const name = tok.ident; + const parse_qualified = P.RuleBodyItemParser.parseQualified(this.parser); + const result: Result(I) = result: { + const error_behavior: ParseUntilErrorBehavior = if (parse_qualified) .stop else .consume; + const Closure = struct { + parser: *P, + name: []const u8, + pub fn parsefn(self: *@This(), input: *Parser) Result(I) { + if (input.expectColon().asErr()) |e| return .{ .err = e }; + return P.DeclarationParser.parseValue(self.parser, self.name, input); + } + }; + var closure = Closure{ + .parser = this.parser, + .name = name, + }; + break :result parse_until_after(this.input, Delimiters{ .semicolon = true }, error_behavior, I, &closure, Closure.parsefn); + }; + if (result.isErr() and parse_qualified) { + this.input.reset(&start); + if (parse_qualified_rule( + &start, + this.input, + P, + this.parser, + Delimiters{ .semicolon = true, .curly_bracket = true }, + ).asValue()) |qual| { + return .{ .result = qual }; + } + } + + return result; + } + }, + else => {}, + } + + const result: Result(I) = if (P.RuleBodyItemParser.parseQualified(this.parser)) result: { + this.input.reset(&start); + const delimiters = if (P.RuleBodyItemParser.parseDeclarations(this.parser)) Delimiters{ + .semicolon = true, + .curly_bracket = true, + } else Delimiters{ .curly_bracket = true }; + break :result parse_qualified_rule(&start, this.input, P, this.parser, delimiters); + } else result: { + const token = tok.*; + + const Closure = struct { token: Token, start: ParserState }; + break :result this.input.parseUntilAfter(Delimiters{ .semicolon = true }, I, &Closure{ .token = token, .start = start }, struct { + pub fn parseFn(closure: *const Closure, i: *Parser) Result(I) { + _ = i; // autofix + return .{ .err = closure.start.sourceLocation().newUnexpectedTokenError(closure.token) }; + } + }.parseFn); + }; + + return result; + } + } + }; +} + +pub const ParserOptions = struct { + /// Filename to use in error messages. + filename: []const u8, + /// Whether the enable [CSS modules](https://github.com/css-modules/css-modules). + css_modules: ?css_modules.Config, + /// The source index to assign to all parsed rules. Impacts the source map when + /// the style sheet is serialized. + source_index: u32, + /// Whether to ignore invalid rules and declarations rather than erroring. + error_recovery: bool, + /// A list that will be appended to when a warning occurs. + logger: ?*Log = null, + /// Feature flags to enable. + flags: ParserFlags, + allocator: Allocator, + + pub fn warn(this: *const ParserOptions, warning: ParseError(ParserError)) void { + if (this.logger) |lg| { + lg.addWarningFmtLineCol( + this.filename, + warning.location.line, + warning.location.column, + this.allocator, + "{}", + .{warning.kind}, + ) catch unreachable; + } + } + + pub fn default(allocator: std.mem.Allocator, log: ?*Log) ParserOptions { + return ParserOptions{ + .filename = "", + .css_modules = null, + .source_index = 0, + .error_recovery = false, + .logger = log, + .flags = ParserFlags{}, + .allocator = allocator, + }; + } +}; + +/// Parser feature flags to enable. +pub const ParserFlags = packed struct(u8) { + /// Whether the enable the [CSS nesting](https://www.w3.org/TR/css-nesting-1/) draft syntax. + nesting: bool = false, + /// Whether to enable the [custom media](https://drafts.csswg.org/mediaqueries-5/#custom-mq) draft syntax. + custom_media: bool = false, + /// Whether to enable the non-standard >>> and /deep/ selector combinators used by Vue and Angular. + deep_selector_combinator: bool = false, + __unused: u5 = 0, + + pub usingnamespace Bitflags(@This()); +}; + +const ParseUntilErrorBehavior = enum { + consume, + stop, +}; + +// const ImportRecordHandler = union(enum) { +// list: *bun.BabyList(ImportRecord), +// // dummy: u32, + +// pub fn add(this: *ImportRecordHandler, allocator: Allocator, record: ImportRecord) u32 { +// return switch (this.*) { +// .list => |list| { +// const len = list.len; +// list.push(allocator, record) catch bun.outOfMemory(); +// return len; +// }, +// // .dummy => |*d| { +// // const val = d.*; +// // d.* += 1; +// // return val; +// // }, +// }; +// } +// }; + +pub const Parser = struct { + input: *ParserInput, + at_start_of: ?BlockType = null, + stop_before: Delimiters = Delimiters.NONE, + import_records: ?*bun.BabyList(ImportRecord), + + // TODO: dedupe import records?? + pub fn addImportRecordForUrl(this: *Parser, url: []const u8, start_position: usize) Result(u32) { + if (this.import_records) |import_records| { + const idx = import_records.len; + import_records.push(this.allocator(), ImportRecord{ + .path = bun.fs.Path.init(url), + .kind = .url, + .range = bun.logger.Range{ + .loc = bun.logger.Loc{ .start = @intCast(start_position) }, + .len = @intCast(url.len), // TODO: technically this is not correct because the url could be escaped + }, + }) catch bun.outOfMemory(); + return .{ .result = idx }; + } else { + return .{ .err = this.newBasicUnexpectedTokenError(.{ .unquoted_url = url }) }; + } + } + + pub inline fn allocator(self: *Parser) Allocator { + return self.input.tokenizer.allocator; + } + + /// Create a new Parser + /// + /// Pass in `import_records` to track imports (`@import` rules, `url()` tokens). If this + /// is `null`, calling `Parser.addImportRecordForUrl` will error. + pub fn new(input: *ParserInput, import_records: ?*bun.BabyList(ImportRecord)) Parser { + return Parser{ + .input = input, + .import_records = import_records, + }; + } + + pub fn newCustomError(this: *const Parser, err: ParserError) ParseError(ParserError) { + return this.currentSourceLocation().newCustomError(err); + } + + pub fn newBasicError(this: *const Parser, kind: BasicParseErrorKind) BasicParseError { + return BasicParseError{ + .kind = kind, + .location = this.currentSourceLocation(), + }; + } + + pub fn newError(this: *const Parser, kind: BasicParseErrorKind) ParseError(ParserError) { + return .{ + .kind = .{ .basic = kind }, + .location = this.currentSourceLocation(), + }; + } + + pub fn newUnexpectedTokenError(this: *const Parser, token: Token) ParseError(ParserError) { + return this.newError(.{ .unexpected_token = token }); + } + + pub fn newBasicUnexpectedTokenError(this: *const Parser, token: Token) ParseError(ParserError) { + return this.newBasicError(.{ .unexpected_token = token }).intoDefaultParseError(); + } + + pub fn currentSourceLocation(this: *const Parser) SourceLocation { + return this.input.tokenizer.currentSourceLocation(); + } + + pub fn currentSourceMapUrl(this: *const Parser) ?[]const u8 { + return this.input.tokenizer.currentSourceMapUrl(); + } + + /// Return a slice of the CSS input, from the given position to the current one. + pub fn sliceFrom(this: *const Parser, start_position: usize) []const u8 { + return this.input.tokenizer.sliceFrom(start_position); + } + + /// Implementation of Vec::::parse + pub fn parseList(this: *Parser, comptime T: type, comptime parse_one: *const fn (*Parser) Result(T)) Result(ArrayList(T)) { + return this.parseCommaSeparated(T, parse_one); + } + + /// Parse a list of comma-separated values, all with the same syntax. + /// + /// The given closure is called repeatedly with a "delimited" parser + /// (see the `Parser::parse_until_before` method) so that it can over + /// consume the input past a comma at this block/function nesting level. + /// + /// Successful results are accumulated in a vector. + /// + /// This method returns `Err(())` the first time that a closure call does, + /// or if a closure call leaves some input before the next comma or the end + /// of the input. + pub fn parseCommaSeparated( + this: *Parser, + comptime T: type, + comptime parse_one: *const fn (*Parser) Result(T), + ) Result(ArrayList(T)) { + return this.parseCommaSeparatedInternal(T, {}, voidWrap(T, parse_one), false); + } + + pub fn parseCommaSeparatedWithCtx( + this: *Parser, + comptime T: type, + closure: anytype, + comptime parse_one: *const fn (@TypeOf(closure), *Parser) Result(T), + ) Result(ArrayList(T)) { + return this.parseCommaSeparatedInternal(T, closure, parse_one, false); + } + + fn parseCommaSeparatedInternal( + this: *Parser, + comptime T: type, + closure: anytype, + comptime parse_one: *const fn (@TypeOf(closure), *Parser) Result(T), + ignore_errors: bool, + ) Result(ArrayList(T)) { + // Vec grows from 0 to 4 by default on first push(). So allocate with + // capacity 1, so in the somewhat common case of only one item we don't + // way overallocate. Note that we always push at least one item if + // parsing succeeds. + // + // TODO(zack): might be faster to use stack fallback here + // in the common case we may have just 1, but I feel like it is also very common to have >1 + // which means every time we have >1 items we will always incur 1 more additional allocation + var sfb = std.heap.stackFallback(@sizeOf(T), this.allocator()); + const alloc = sfb.get(); + var values = ArrayList(T).initCapacity(alloc, 1) catch unreachable; + + while (true) { + this.skipWhitespace(); // Unnecessary for correctness, but may help try() in parse_one rewind less. + switch (this.parseUntilBefore(Delimiters{ .comma = true }, T, closure, parse_one)) { + .result => |v| { + values.append(alloc, v) catch unreachable; + }, + .err => |e| { + if (!ignore_errors) return .{ .err = e }; + }, + } + + const tok = switch (this.next()) { + .result => |v| v, + .err => { + // need to clone off the stack + const needs_clone = values.items.len == 1; + if (needs_clone) return .{ .result = values.clone(this.allocator()) catch bun.outOfMemory() }; + return .{ .result = values }; + }, + }; + if (tok.* != .comma) bun.unreachablePanic("", .{}); + } + } + + /// Execute the given closure, passing it the parser. + /// If the result (returned unchanged) is `Err`, + /// the internal state of the parser (including position within the input) + /// is restored to what it was before the call. + /// + /// func needs to be a funtion like this: `fn func(*Parser, ...@TypeOf(args_)) T` + pub inline fn tryParse(this: *Parser, comptime func: anytype, args_: anytype) bun.meta.ReturnOf(func) { + const start = this.state(); + const result = result: { + const args = brk: { + var args: std.meta.ArgsTuple(@TypeOf(func)) = undefined; + args[0] = this; + + inline for (args_, 1..) |a, i| { + args[i] = a; + } + + break :brk args; + }; + + break :result @call(.auto, func, args); + }; + if (result == .err) { + this.reset(&start); + } + return result; + } + + pub inline fn tryParseImpl(this: *Parser, comptime Ret: type, comptime func: anytype, args: anytype) Ret { + const start = this.state(); + const result = result: { + break :result @call(.auto, func, args); + }; + if (result == .err) { + this.reset(&start); + } + return result; + } + + pub inline fn parseNestedBlock(this: *Parser, comptime T: type, closure: anytype, comptime parsefn: *const fn (@TypeOf(closure), *Parser) Result(T)) Result(T) { + return parse_nested_block(this, T, closure, parsefn); + } + + pub fn isExhausted(this: *Parser) bool { + return this.expectExhausted().isOk(); + } + + /// Parse the input until exhaustion and check that it contains no “error” token. + /// + /// See `Token::is_parse_error`. This also checks nested blocks and functions recursively. + pub fn expectNoErrorToken(this: *Parser) Result(void) { + while (true) { + const tok = switch (this.nextIncludingWhitespaceAndComments()) { + .err => return .{ .result = {} }, + .result => |v| v, + }; + switch (tok.*) { + .function, .open_paren, .open_square, .open_curly => { + if (this.parseNestedBlock(void, {}, struct { + pub fn parse(_: void, i: *Parser) Result(void) { + if (i.expectNoErrorToken().asErr()) |e| { + return .{ .err = e }; + } + return .{ .result = {} }; + } + }.parse).asErr()) |err| { + return .{ .err = err }; + } + return .{ .result = {} }; + }, + else => { + if (tok.isParseError()) { + return .{ .err = this.newUnexpectedTokenError(tok.*) }; + } + }, + } + } + } + + pub fn expectPercentage(this: *Parser) Result(f32) { + const start_location = this.currentSourceLocation(); + const tok = switch (this.next()) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + if (tok.* == .percentage) return .{ .result = tok.percentage.unit_value }; + return .{ .err = start_location.newUnexpectedTokenError(tok.*) }; + } + + pub fn expectComma(this: *Parser) Result(void) { + const start_location = this.currentSourceLocation(); + const tok = switch (this.next()) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + switch (tok.*) { + .comma => return .{ .result = {} }, + else => {}, + } + return .{ .err = start_location.newUnexpectedTokenError(tok.*) }; + } + + /// Parse a that does not have a fractional part, and return the integer value. + pub fn expectInteger(this: *Parser) Result(i32) { + const start_location = this.currentSourceLocation(); + const tok = switch (this.next()) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + if (tok.* == .number and tok.number.int_value != null) return .{ .result = tok.number.int_value.? }; + return .{ .err = start_location.newUnexpectedTokenError(tok.*) }; + } + + /// Parse a and return the integer value. + pub fn expectNumber(this: *Parser) Result(f32) { + const start_location = this.currentSourceLocation(); + const tok = switch (this.next()) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + if (tok.* == .number) return .{ .result = tok.number.value }; + return .{ .err = start_location.newUnexpectedTokenError(tok.*) }; + } + + pub fn expectDelim(this: *Parser, delim: u8) Result(void) { + const start_location = this.currentSourceLocation(); + const tok = switch (this.next()) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + if (tok.* == .delim and tok.delim == delim) return .{ .result = {} }; + return .{ .err = start_location.newUnexpectedTokenError(tok.*) }; + } + + pub fn expectParenthesisBlock(this: *Parser) Result(void) { + const start_location = this.currentSourceLocation(); + const tok = switch (this.next()) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + if (tok.* == .open_paren) return .{ .result = {} }; + return .{ .err = start_location.newUnexpectedTokenError(tok.*) }; + } + + pub fn expectColon(this: *Parser) Result(void) { + const start_location = this.currentSourceLocation(); + const tok = switch (this.next()) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + if (tok.* == .colon) return .{ .result = {} }; + return .{ .err = start_location.newUnexpectedTokenError(tok.*) }; + } + + pub fn expectString(this: *Parser) Result([]const u8) { + const start_location = this.currentSourceLocation(); + const tok = switch (this.next()) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + if (tok.* == .quoted_string) return .{ .result = tok.quoted_string }; + return .{ .err = start_location.newUnexpectedTokenError(tok.*) }; + } + + pub fn expectIdent(this: *Parser) Result([]const u8) { + const start_location = this.currentSourceLocation(); + const tok = switch (this.next()) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + if (tok.* == .ident) return .{ .result = tok.ident }; + return .{ .err = start_location.newUnexpectedTokenError(tok.*) }; + } + + /// Parse either a or a , and return the unescaped value. + pub fn expectIdentOrString(this: *Parser) Result([]const u8) { + const start_location = this.currentSourceLocation(); + const tok = switch (this.next()) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + switch (tok.*) { + .ident => |i| return .{ .result = i }, + .quoted_string => |s| return .{ .result = s }, + else => {}, + } + return .{ .err = start_location.newUnexpectedTokenError(tok.*) }; + } + + pub fn expectIdentMatching(this: *Parser, name: []const u8) Result(void) { + const start_location = this.currentSourceLocation(); + const tok = switch (this.next()) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + switch (tok.*) { + .ident => |i| if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, i)) return .{ .result = {} }, + else => {}, + } + return .{ .err = start_location.newUnexpectedTokenError(tok.*) }; + } + + pub fn expectFunction(this: *Parser) Result([]const u8) { + const start_location = this.currentSourceLocation(); + const tok = switch (this.next()) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + switch (tok.*) { + .function => |fn_name| return .{ .result = fn_name }, + else => {}, + } + return .{ .err = start_location.newUnexpectedTokenError(tok.*) }; + } + + pub fn expectFunctionMatching(this: *Parser, name: []const u8) Result(void) { + const start_location = this.currentSourceLocation(); + const tok = switch (this.next()) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + switch (tok.*) { + .function => |fn_name| if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, fn_name)) return .{ .result = {} }, + else => {}, + } + return .{ .err = start_location.newUnexpectedTokenError(tok.*) }; + } + + pub fn expectCurlyBracketBlock(this: *Parser) Result(void) { + const start_location = this.currentSourceLocation(); + const tok = switch (this.next()) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + switch (tok.*) { + .open_curly => return .{ .result = {} }, + else => return .{ .err = start_location.newUnexpectedTokenError(tok.*) }, + } + } + + /// Parse a and return the unescaped value. + pub fn expectUrl(this: *Parser) Result([]const u8) { + const start_location = this.currentSourceLocation(); + const tok = switch (this.next()) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + switch (tok.*) { + .unquoted_url => |value| return .{ .result = value }, + .function => |name| { + if (bun.strings.eqlCaseInsensitiveASCIIICheckLength("url", name)) { + const result = this.parseNestedBlock([]const u8, {}, struct { + fn parse(_: void, parser: *Parser) Result([]const u8) { + return switch (parser.expectString()) { + .result => |v| .{ .result = v }, + .err => |e| .{ .err = e }, + }; + } + }.parse); + return switch (result) { + .result => |v| .{ .result = v }, + .err => |e| .{ .err = e }, + }; + } + }, + else => {}, + } + return .{ .err = start_location.newUnexpectedTokenError(tok.*) }; + } + + /// Parse either a or a , and return the unescaped value. + pub fn expectUrlOrString(this: *Parser) Result([]const u8) { + const start_location = this.currentSourceLocation(); + const tok = switch (this.next()) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + switch (tok.*) { + .unquoted_url => |value| return .{ .result = value }, + .quoted_string => |value| return .{ .result = value }, + .function => |name| { + if (bun.strings.eqlCaseInsensitiveASCIIICheckLength("url", name)) { + const result = this.parseNestedBlock([]const u8, {}, struct { + fn parse(_: void, parser: *Parser) Result([]const u8) { + return switch (parser.expectString()) { + .result => |v| .{ .result = v }, + .err => |e| .{ .err = e }, + }; + } + }.parse); + return switch (result) { + .result => |v| .{ .result = v }, + .err => |e| .{ .err = e }, + }; + } + }, + else => {}, + } + return .{ .err = start_location.newUnexpectedTokenError(tok.*) }; + } + + pub fn position(this: *Parser) usize { + bun.debugAssert(bun.strings.isOnCharBoundary(this.input.tokenizer.src, this.input.tokenizer.position)); + return this.input.tokenizer.position; + } + + fn parseEmpty(_: *Parser) Result(void) { + return .{ .result = {} }; + } + + /// Like `parse_until_before`, but also consume the delimiter token. + /// + /// This can be useful when you don’t need to know which delimiter it was + /// (e.g. if these is only one in the given set) + /// or if it was there at all (as opposed to reaching the end of the input). + pub fn parseUntilAfter( + this: *Parser, + delimiters: Delimiters, + comptime T: type, + closure: anytype, + comptime parse_fn: *const fn (@TypeOf(closure), *Parser) Result(T), + ) Result(T) { + return parse_until_after( + this, + delimiters, + ParseUntilErrorBehavior.consume, + T, + closure, + parse_fn, + ); + } + + pub fn parseUntilBefore(this: *Parser, delimiters: Delimiters, comptime T: type, closure: anytype, comptime parse_fn: *const fn (@TypeOf(closure), *Parser) Result(T)) Result(T) { + return parse_until_before(this, delimiters, .consume, T, closure, parse_fn); + } + + pub fn parseEntirely(this: *Parser, comptime T: type, closure: anytype, comptime parsefn: *const fn (@TypeOf(closure), *Parser) Result(T)) Result(T) { + const result = switch (parsefn(closure, this)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + if (this.expectExhausted().asErr()) |e| return .{ .err = e }; + return .{ .result = result }; + } + + /// Check whether the input is exhausted. That is, if `.next()` would return a token. + /// Return a `Result` so that the `?` operator can be used: `input.expect_exhausted()?` + /// + /// This ignores whitespace and comments. + pub fn expectExhausted(this: *Parser) Result(void) { + const start = this.state(); + const result: Result(void) = switch (this.next()) { + .result => |t| .{ .err = start.sourceLocation().newUnexpectedTokenError(t.*) }, + .err => |e| brk: { + if (e.kind == .basic and e.kind.basic == .end_of_input) break :brk .{ .result = {} }; + bun.unreachablePanic("Unexpected error encountered: {}", .{e.kind}); + }, + }; + this.reset(&start); + return result; + } + + pub fn @"skip cdc and cdo"(this: *@This()) void { + if (this.at_start_of) |block_type| { + this.at_start_of = null; + consume_until_end_of_block(block_type, &this.input.tokenizer); + } + + this.input.tokenizer.@"skip cdc and cdo"(); + } + + pub fn skipWhitespace(this: *@This()) void { + if (this.at_start_of) |block_type| { + this.at_start_of = null; + consume_until_end_of_block(block_type, &this.input.tokenizer); + } + + this.input.tokenizer.skipWhitespace(); + } + + pub fn next(this: *@This()) Result(*Token) { + this.skipWhitespace(); + return this.nextIncludingWhitespaceAndComments(); + } + + /// Same as `Parser::next`, but does not skip whitespace tokens. + pub fn nextIncludingWhitespace(this: *@This()) Result(*Token) { + while (true) { + switch (this.nextIncludingWhitespaceAndComments()) { + .result => |tok| if (tok.* == .comment) {} else break, + .err => |e| return .{ .err = e }, + } + } + return .{ .result = &this.input.cached_token.?.token }; + } + + pub fn nextByte(this: *@This()) ?u8 { + const byte = this.input.tokenizer.nextByte(); + if (this.stop_before.contains(Delimiters.fromByte(byte))) { + return null; + } + return byte; + } + + pub fn reset(this: *Parser, state_: *const ParserState) void { + this.input.tokenizer.reset(state_); + this.at_start_of = state_.at_start_of; + if (this.import_records) |import_records| import_records.len = state_.import_record_count; + } + + pub fn state(this: *Parser) ParserState { + return ParserState{ + .position = this.input.tokenizer.getPosition(), + .current_line_start_position = this.input.tokenizer.current_line_start_position, + .current_line_number = @intCast(this.input.tokenizer.current_line_number), + .at_start_of = this.at_start_of, + .import_record_count = if (this.import_records) |import_records| import_records.len else 0, + }; + } + + /// Same as `Parser::next`, but does not skip whitespace or comment tokens. + /// + /// **Note**: This should only be used in contexts like a CSS pre-processor + /// where comments are preserved. + /// When parsing higher-level values, per the CSS Syntax specification, + /// comments should always be ignored between tokens. + pub fn nextIncludingWhitespaceAndComments(this: *Parser) Result(*Token) { + if (this.at_start_of) |block_type| { + this.at_start_of = null; + consume_until_end_of_block(block_type, &this.input.tokenizer); + } + + const byte = this.input.tokenizer.nextByte(); + if (this.stop_before.contains(Delimiters.fromByte(byte))) { + return .{ .err = this.newError(BasicParseErrorKind.end_of_input) }; + } + + const token_start_position = this.input.tokenizer.getPosition(); + const using_cached_token = this.input.cached_token != null and this.input.cached_token.?.start_position == token_start_position; + + const token = if (using_cached_token) token: { + const cached_token = &this.input.cached_token.?; + this.input.tokenizer.reset(&cached_token.end_state); + if (cached_token.token == .function) { + this.input.tokenizer.seeFunction(cached_token.token.function); + } + break :token &cached_token.token; + } else token: { + const new_token = switch (this.input.tokenizer.next()) { + .result => |v| v, + .err => return .{ .err = this.newError(BasicParseErrorKind.end_of_input) }, + }; + this.input.cached_token = CachedToken{ + .token = new_token, + .start_position = token_start_position, + .end_state = this.input.tokenizer.state(), + }; + break :token &this.input.cached_token.?.token; + }; + + if (BlockType.opening(token)) |block_type| { + this.at_start_of = block_type; + } + + return .{ .result = token }; + } + + /// Create a new unexpected token or EOF ParseError at the current location + pub fn newErrorForNextToken(this: *Parser) ParseError(ParserError) { + const token = switch (this.next()) { + .result => |t| t.*, + .err => |e| return e, + }; + return this.newError(BasicParseErrorKind{ .unexpected_token = token }); + } +}; + +/// A set of characters, to be used with the `Parser::parse_until*` methods. +/// +/// The union of two sets can be obtained with the `|` operator. Example: +/// +/// ```{rust,ignore} +/// input.parse_until_before(Delimiter::CurlyBracketBlock | Delimiter::Semicolon) +/// ``` +pub const Delimiters = packed struct(u8) { + /// The delimiter set with only the `{` opening curly bracket + curly_bracket: bool = false, + /// The delimiter set with only the `;` semicolon + semicolon: bool = false, + /// The delimiter set with only the `!` exclamation point + bang: bool = false, + /// The delimiter set with only the `,` comma + comma: bool = false, + close_curly_bracket: bool = false, + close_square_bracket: bool = false, + close_parenthesis: bool = false, + __unused: u1 = 0, + + pub usingnamespace Bitflags(Delimiters); + + const NONE: Delimiters = .{}; + + pub fn getDelimiter(comptime tag: @TypeOf(.EnumLiteral)) Delimiters { + var empty = Delimiters{}; + @field(empty, @tagName(tag)) = true; + return empty; + } + + const TABLE: [256]Delimiters = brk: { + var table: [256]Delimiters = [_]Delimiters{.{}} ** 256; + table[';'] = getDelimiter(.semicolon); + table['!'] = getDelimiter(.bang); + table[','] = getDelimiter(.comma); + table['{'] = getDelimiter(.curly_bracket); + table['}'] = getDelimiter(.close_curly_bracket); + table[']'] = getDelimiter(.close_square_bracket); + table[')'] = getDelimiter(.close_parenthesis); + break :brk table; + }; + + // pub fn bitwiseOr(lhs: Delimiters, rhs: Delimiters) Delimiters { + // return @bitCast(@as(u8, @bitCast(lhs)) | @as(u8, @bitCast(rhs))); + // } + + // pub fn contains(lhs: Delimiters, rhs: Delimiters) bool { + // return @as(u8, @bitCast(lhs)) & @as(u8, @bitCast(rhs)) != 0; + // } + + pub fn fromByte(byte: ?u8) Delimiters { + if (byte) |b| return TABLE[b]; + return .{}; + } +}; + +pub const ParserInput = struct { + tokenizer: Tokenizer, + cached_token: ?CachedToken = null, + + pub fn new(allocator: Allocator, code: []const u8) ParserInput { + return ParserInput{ + .tokenizer = Tokenizer.init(allocator, code), + }; + } +}; + +/// A capture of the internal state of a `Parser` (including the position within the input), +/// obtained from the `Parser::position` method. +/// +/// Can be used with the `Parser::reset` method to restore that state. +/// Should only be used with the `Parser` instance it came from. +pub const ParserState = struct { + position: usize, + current_line_start_position: usize, + current_line_number: u32, + import_record_count: u32, + at_start_of: ?BlockType, + + pub fn sourceLocation(this: *const ParserState) SourceLocation { + return .{ + .line = this.current_line_number, + .column = @intCast(this.position - this.current_line_start_position + 1), + }; + } +}; + +const BlockType = enum { + parenthesis, + square_bracket, + curly_bracket, + + fn opening(token: *const Token) ?BlockType { + return switch (token.*) { + .function, .open_paren => .parenthesis, + .open_square => .square_bracket, + .open_curly => .curly_bracket, + else => null, + }; + } + + fn closing(token: *const Token) ?BlockType { + return switch (token.*) { + .close_paren => .parenthesis, + .close_square => .square_bracket, + .close_curly => .curly_bracket, + else => null, + }; + } +}; + +pub const nth = struct { + const NthResult = struct { i32, i32 }; + /// Parse the *An+B* notation, as found in the `:nth-child()` selector. + /// The input is typically the arguments of a function, + /// in which case the caller needs to check if the arguments’ parser is exhausted. + /// Return `Ok((A, B))`, or `Err(())` for a syntax error. + pub fn parse_nth(input: *Parser) Result(NthResult) { + const tok = switch (input.next()) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + switch (tok.*) { + .number => { + if (tok.number.int_value) |b| return .{ .result = .{ 0, b } }; + }, + .dimension => { + if (tok.dimension.num.int_value) |a| { + // @compileError(todo_stuff.match_ignore_ascii_case); + const unit = tok.dimension.unit; + if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(unit, "n")) { + return parse_b(input, a); + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(unit, "n-")) { + return parse_signless_b(input, a, -1); + } else { + if (parse_n_dash_digits(input.allocator(), unit).asValue()) |b| { + return .{ .result = .{ a, b } }; + } else { + return .{ .err = input.newUnexpectedTokenError(.{ .ident = unit }) }; + } + } + } + }, + .ident => { + const value = tok.ident; + // @compileError(todo_stuff.match_ignore_ascii_case); + if (bun.strings.eqlCaseInsensitiveASCIIIgnoreLength(value, "even")) { + return .{ .result = .{ 2, 0 } }; + } else if (bun.strings.eqlCaseInsensitiveASCIIIgnoreLength(value, "odd")) { + return .{ .result = .{ 2, 1 } }; + } else if (bun.strings.eqlCaseInsensitiveASCIIIgnoreLength(value, "n")) { + return parse_b(input, 1); + } else if (bun.strings.eqlCaseInsensitiveASCIIIgnoreLength(value, "-n")) { + return parse_b(input, -1); + } else if (bun.strings.eqlCaseInsensitiveASCIIIgnoreLength(value, "n-")) { + return parse_signless_b(input, 1, -1); + } else if (bun.strings.eqlCaseInsensitiveASCIIIgnoreLength(value, "-n-")) { + return parse_signless_b(input, -1, -1); + } else { + const slice, const a: i32 = if (bun.strings.startsWithChar(value, '-')) .{ value[1..], -1 } else .{ value, 1 }; + if (parse_n_dash_digits(input.allocator(), slice).asValue()) |b| return .{ .result = .{ a, b } }; + return .{ .err = input.newUnexpectedTokenError(.{ .ident = value }) }; + } + }, + .delim => { + const next_tok = switch (input.nextIncludingWhitespace()) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + if (next_tok.* == .ident) { + const value = next_tok.ident; + if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(value, "n")) { + return parse_b(input, 1); + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(value, "-n")) { + return parse_signless_b(input, 1, -1); + } else { + if (parse_n_dash_digits(input.allocator(), value).asValue()) |b| { + return .{ .result = .{ 1, b } }; + } else { + return .{ .err = input.newUnexpectedTokenError(.{ .ident = value }) }; + } + } + } else { + return .{ .err = input.newUnexpectedTokenError(next_tok.*) }; + } + }, + else => {}, + } + return .{ .err = input.newUnexpectedTokenError(tok.*) }; + } + + fn parse_b(input: *Parser, a: i32) Result(NthResult) { + const start = input.state(); + const tok = switch (input.next()) { + .result => |v| v, + .err => { + input.reset(&start); + return .{ .result = .{ a, 0 } }; + }, + }; + + if (tok.* == .delim and tok.delim == '+') return parse_signless_b(input, a, 1); + if (tok.* == .delim and tok.delim == '-') return parse_signless_b(input, a, -1); + if (tok.* == .number and tok.number.has_sign and tok.number.int_value != null) return .{ .result = NthResult{ a, tok.number.int_value.? } }; + input.reset(&start); + return .{ .result = .{ a, 0 } }; + } + + fn parse_signless_b(input: *Parser, a: i32, b_sign: i32) Result(NthResult) { + const tok = switch (input.next()) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + if (tok.* == .number and !tok.number.has_sign and tok.number.int_value != null) { + const b = tok.number.int_value.?; + return .{ .result = .{ a, b_sign * b } }; + } + return .{ .err = input.newUnexpectedTokenError(tok.*) }; + } + + fn parse_n_dash_digits(allocator: Allocator, str: []const u8) Maybe(i32, void) { + const bytes = str; + if (bytes.len >= 3 and + bun.strings.eqlCaseInsensitiveASCIIICheckLength(bytes[0..2], "n-") and + brk: { + for (bytes[2..]) |b| { + if (b < '0' or b > '9') break :brk false; + } + break :brk true; + }) { + return parse_number_saturate(allocator, str[1..]); // Include the minus sign + } else { + return .{ .err = {} }; + } + } + + fn parse_number_saturate(allocator: Allocator, string: []const u8) Maybe(i32, void) { + var input = ParserInput.new(allocator, string); + var parser = Parser.new(&input, null); + const tok = switch (parser.nextIncludingWhitespaceAndComments()) { + .result => |v| v, + .err => { + return .{ .err = {} }; + }, + }; + const int = if (tok.* == .number and tok.number.int_value != null) tok.number.int_value.? else { + return .{ .err = {} }; + }; + if (!parser.isExhausted()) { + return .{ .err = {} }; + } + return .{ .result = int }; + } +}; + +const CachedToken = struct { + token: Token, + start_position: usize, + end_state: ParserState, +}; + +const Tokenizer = struct { + src: []const u8, + position: usize = 0, + source_map_url: ?[]const u8 = null, + current_line_start_position: usize = 0, + current_line_number: u32 = 0, + allocator: Allocator, + var_or_env_functions: SeenStatus = .dont_care, + current: Token = undefined, + previous: Token = undefined, + + const SeenStatus = enum { + dont_care, + looking_for_them, + seen_at_least_one, + }; + + const FORM_FEED_BYTE = 0x0C; + const REPLACEMENT_CHAR = 0xFFFD; + const REPLACEMENT_CHAR_UNICODE: [3]u8 = [3]u8{ 0xEF, 0xBF, 0xBD }; + const MAX_ONE_B: u32 = 0x80; + const MAX_TWO_B: u32 = 0x800; + const MAX_THREE_B: u32 = 0x10000; + + pub fn init(allocator: Allocator, src: []const u8) Tokenizer { + var lexer = Tokenizer{ + .src = src, + .allocator = allocator, + .position = 0, + }; + + // make current point to the first token + _ = lexer.next(); + lexer.position = 0; + + return lexer; + } + + pub fn currentSourceMapUrl(this: *const Tokenizer) ?[]const u8 { + return this.source_map_url; + } + + pub fn getPosition(this: *const Tokenizer) usize { + bun.debugAssert(bun.strings.isOnCharBoundary(this.src, this.position)); + return this.position; + } + + pub fn state(this: *const Tokenizer) ParserState { + return ParserState{ + .position = this.position, + .current_line_start_position = this.current_line_start_position, + .current_line_number = this.current_line_number, + .at_start_of = null, + .import_record_count = 0, + }; + } + + pub fn skipWhitespace(this: *Tokenizer) void { + while (!this.isEof()) { + // todo_stuff.match_byte + switch (this.nextByteUnchecked()) { + ' ', '\t' => this.advance(1), + '\n', 0x0C, '\r' => this.consumeNewline(), + '/' => { + if (this.startsWith("/*")) { + _ = this.consumeComment(); + } else return; + }, + else => return, + } + } + } + + pub fn currentSourceLocation(this: *const Tokenizer) SourceLocation { + return SourceLocation{ + .line = this.current_line_number, + .column = @intCast((this.position - this.current_line_start_position) + 1), + }; + } + + pub fn prev(this: *Tokenizer) Token { + bun.assert(this.position > 0); + return this.previous; + } + + pub inline fn isEof(this: *Tokenizer) bool { + return this.position >= this.src.len; + } + + pub fn seeFunction(this: *Tokenizer, name: []const u8) void { + if (this.var_or_env_functions == .looking_for_them) { + if (std.ascii.eqlIgnoreCase(name, "var") and std.ascii.eqlIgnoreCase(name, "env")) { + this.var_or_env_functions = .seen_at_least_one; + } + } + } + + /// TODO: fix this, remove the additional shit I added + /// return error if it is eof + pub inline fn next(this: *Tokenizer) Maybe(Token, void) { + return this.nextImpl(); + } + + pub fn nextImpl(this: *Tokenizer) Maybe(Token, void) { + if (this.isEof()) return .{ .err = {} }; + + // todo_stuff.match_byte; + const b = this.byteAt(0); + const token: Token = switch (b) { + ' ', '\t' => this.consumeWhitespace(false), + '\n', FORM_FEED_BYTE, '\r' => this.consumeWhitespace(true), + '"' => this.consumeString(false), + '#' => brk: { + this.advance(1); + if (this.isIdentStart()) break :brk .{ .idhash = this.consumeName() }; + if (!this.isEof() and switch (this.nextByteUnchecked()) { + // Any other valid case here already resulted in IDHash. + '0'...'9', '-' => true, + else => false, + }) break :brk .{ .hash = this.consumeName() }; + break :brk .{ .delim = '#' }; + }, + '$' => brk: { + if (this.startsWith("$=")) { + this.advance(2); + break :brk .suffix_match; + } + this.advance(1); + break :brk .{ .delim = '$' }; + }, + '\'' => this.consumeString(true), + '(' => brk: { + this.advance(1); + break :brk .open_paren; + }, + ')' => brk: { + this.advance(1); + break :brk .close_paren; + }, + '*' => brk: { + if (this.startsWith("*=")) { + this.advance(2); + break :brk .substring_match; + } + this.advance(1); + break :brk .{ .delim = '*' }; + }, + '+' => brk: { + if ((this.hasAtLeast(1) and switch (this.byteAt(1)) { + '0'...'9' => true, + else => false, + }) or (this.hasAtLeast(2) and + this.byteAt(1) == '.' and switch (this.byteAt(2)) { + '0'...'9' => true, + else => false, + })) { + break :brk this.consumeNumeric(); + } + + this.advance(1); + break :brk .{ .delim = '+' }; + }, + ',' => brk: { + this.advance(1); + break :brk .comma; + }, + '-' => brk: { + if ((this.hasAtLeast(1) and switch (this.byteAt(1)) { + '0'...'9' => true, + else => false, + }) or (this.hasAtLeast(2) and this.byteAt(1) == '.' and switch (this.byteAt(2)) { + '0'...'9' => true, + else => false, + })) break :brk this.consumeNumeric(); + + if (this.startsWith("-->")) { + this.advance(3); + break :brk .cdc; + } + + if (this.isIdentStart()) break :brk this.consumeIdentLike(); + + this.advance(1); + break :brk .{ .delim = '-' }; + }, + '.' => brk: { + if (this.hasAtLeast(1) and switch (this.byteAt(1)) { + '0'...'9' => true, + else => false, + }) { + break :brk this.consumeNumeric(); + } + this.advance(1); + break :brk .{ .delim = '.' }; + }, + '/' => brk: { + if (this.startsWith("/*")) break :brk .{ .comment = this.consumeComment() }; + this.advance(1); + break :brk .{ .delim = '/' }; + }, + '0'...'9' => this.consumeNumeric(), + ':' => brk: { + this.advance(1); + break :brk .colon; + }, + ';' => brk: { + this.advance(1); + break :brk .semicolon; + }, + '<' => brk: { + if (this.startsWith("")) this.advance(3) else return, + else => return, + } + } + } + + pub fn consumeNumeric(this: *Tokenizer) Token { + // Parse [+-]?\d*(\.\d+)?([eE][+-]?\d+)? + // But this is always called so that there is at least one digit in \d*(\.\d+)? + + // Do all the math in f64 so that large numbers overflow to +/-inf + // and i32::{MIN, MAX} are within range. + const has_sign: bool, const sign: f64 = brk: { + switch (this.nextByteUnchecked()) { + '-' => break :brk .{ true, -1.0 }, + '+' => break :brk .{ true, 1.0 }, + else => break :brk .{ false, 1.0 }, + } + }; + + if (has_sign) this.advance(1); + + var integral_part: f64 = 0.0; + while (byteToDecimalDigit(this.nextByteUnchecked())) |digit| { + integral_part = integral_part * 10.0 + @as(f64, @floatFromInt(digit)); + this.advance(1); + if (this.isEof()) break; + } + + var is_integer = true; + + var fractional_part: f64 = 0.0; + if (this.hasAtLeast(1) and this.nextByteUnchecked() == '.' and switch (this.byteAt(1)) { + '0'...'9' => true, + else => false, + }) { + is_integer = false; + this.advance(1); // Consume '.' + var factor: f64 = 0.1; + while (byteToDecimalDigit(this.nextByteUnchecked())) |digit| { + fractional_part += @as(f64, @floatFromInt(digit)) * factor; + factor *= 0.1; + this.advance(1); + if (this.isEof()) break; + } + } + + var value: f64 = sign * (integral_part + fractional_part); + + if (this.hasAtLeast(1) and switch (this.nextByteUnchecked()) { + 'e', 'E' => true, + else => false, + }) { + if (switch (this.byteAt(1)) { + '0'...'9' => true, + else => false, + } or (this.hasAtLeast(2) and switch (this.byteAt(1)) { + '+', '-' => true, + else => false, + } and switch (this.byteAt(2)) { + '0'...'9' => true, + else => false, + })) { + is_integer = false; + this.advance(1); + const has_sign2: bool, const sign2: f64 = brk: { + switch (this.nextByteUnchecked()) { + '-' => break :brk .{ true, -1.0 }, + '+' => break :brk .{ true, 1.0 }, + else => break :brk .{ false, 1.0 }, + } + }; + + if (has_sign2) this.advance(1); + + var exponent: f64 = 0.0; + while (byteToDecimalDigit(this.nextByteUnchecked())) |digit| { + exponent = exponent * 10.0 + @as(f64, @floatFromInt(digit)); + this.advance(1); + if (this.isEof()) break; + } + value *= bun.pow(10, sign2 * exponent); + } + } + + const int_value: ?i32 = brk: { + const i32_max = comptime std.math.maxInt(i32); + const i32_min = comptime std.math.minInt(i32); + if (is_integer) { + if (value >= @as(f64, @floatFromInt(i32_max))) { + break :brk i32_max; + } else if (value <= @as(f64, @floatFromInt(i32_min))) { + break :brk i32_min; + } else { + break :brk @intFromFloat(value); + } + } + + break :brk null; + }; + + if (!this.isEof() and this.nextByteUnchecked() == '%') { + this.advance(1); + return .{ .percentage = .{ .unit_value = @floatCast(value / 100), .int_value = int_value, .has_sign = has_sign } }; + } + + if (this.isIdentStart()) { + const unit = this.consumeName(); + return .{ + .dimension = .{ + .num = .{ .value = @floatCast(value), .int_value = int_value, .has_sign = has_sign }, + .unit = unit, + }, + }; + } + + return .{ + .number = .{ .value = @floatCast(value), .int_value = int_value, .has_sign = has_sign }, + }; + } + + pub fn consumeWhitespace(this: *Tokenizer, comptime newline: bool) Token { + const start_position = this.position; + if (newline) { + this.consumeNewline(); + } else { + this.advance(1); + } + + while (!this.isEof()) { + // todo_stuff.match_byte + const b = this.nextByteUnchecked(); + switch (b) { + ' ', '\t' => this.advance(1), + '\n', FORM_FEED_BYTE, '\r' => this.consumeNewline(), + else => break, + } + } + + return .{ .whitespace = this.sliceFrom(start_position) }; + } + + pub fn consumeString(this: *Tokenizer, comptime single_quote: bool) Token { + const quoted_string = this.consumeQuotedString(single_quote); + if (quoted_string.bad) return .{ .bad_string = quoted_string.str }; + return .{ .quoted_string = quoted_string.str }; + } + + pub fn consumeIdentLike(this: *Tokenizer) Token { + const value = this.consumeName(); + if (!this.isEof() and this.nextByteUnchecked() == '(') { + this.advance(1); + if (std.ascii.eqlIgnoreCase(value, "url")) return if (this.consumeUnquotedUrl()) |tok| return tok else .{ .function = value }; + this.seeFunction(value); + return .{ .function = value }; + } + return .{ .ident = value }; + } + + pub fn consumeName(this: *Tokenizer) []const u8 { + const start_pos = this.position; + var value_bytes: CopyOnWriteStr = undefined; + + while (true) { + if (this.isEof()) return this.sliceFrom(start_pos); + + // todo_stuff.match_byte + switch (this.nextByteUnchecked()) { + 'a'...'z', 'A'...'Z', '0'...'9', '_', '-' => this.advance(1), + '\\', 0 => { + // * The tokenizer’s input is UTF-8 since it’s `&str`. + // * start_pos is at a code point boundary + // * so is the current position (which is before '\\' or '\0' + // + // So `value_bytes` is well-formed UTF-8. + value_bytes = .{ .borrowed = this.sliceFrom(start_pos) }; + break; + }, + 0x80...0xBF => this.consumeContinuationByte(), + // This is the range of the leading byte of a 2-3 byte character + // encoding + 0xC0...0xEF => this.advance(1), + 0xF0...0xFF => this.consume4byteIntro(), + else => return this.sliceFrom(start_pos), + } + } + + while (!this.isEof()) { + const b = this.nextByteUnchecked(); + // todo_stuff.match_byte + switch (b) { + 'a'...'z', 'A'...'Z', '0'...'9', '_', '-' => { + this.advance(1); + value_bytes.append(this.allocator, &[_]u8{b}); + }, + '\\' => { + if (this.hasNewlineAt(1)) break; + this.advance(1); + this.consumeEscapeAndWrite(&value_bytes); + }, + 0 => { + this.advance(1); + value_bytes.append(this.allocator, REPLACEMENT_CHAR_UNICODE[0..]); + }, + 0x80...0xBF => { + // This byte *is* part of a multi-byte code point, + // we’ll end up copying the whole code point before this loop does something else. + this.consumeContinuationByte(); + value_bytes.append(this.allocator, &[_]u8{b}); + }, + 0xC0...0xEF => { + // This byte *is* part of a multi-byte code point, + // we’ll end up copying the whole code point before this loop does something else. + this.advance(1); + value_bytes.append(this.allocator, &[_]u8{b}); + }, + 0xF0...0xFF => { + this.consume4byteIntro(); + value_bytes.append(this.allocator, &[_]u8{b}); + }, + else => { + // ASCII + break; + }, + } + } + + return value_bytes.toSlice(); + } + + pub fn consumeQuotedString(this: *Tokenizer, comptime single_quote: bool) struct { str: []const u8, bad: bool = false } { + this.advance(1); // Skip the initial quote + const start_pos = this.position; + var string_bytes: CopyOnWriteStr = undefined; + + while (true) { + if (this.isEof()) return .{ .str = this.sliceFrom(start_pos) }; + + // todo_stuff.match_byte + switch (this.nextByteUnchecked()) { + '"' => { + if (!single_quote) { + const value = this.sliceFrom(start_pos); + this.advance(1); + return .{ .str = value }; + } + this.advance(1); + }, + '\'' => { + if (single_quote) { + const value = this.sliceFrom(start_pos); + this.advance(1); + return .{ .str = value }; + } + this.advance(1); + }, + // The CSS spec says NULL bytes ('\0') should be turned into replacement characters: 0xFFFD + '\\', 0 => { + // * The tokenizer’s input is UTF-8 since it’s `&str`. + // * start_pos is at a code point boundary + // * so is the current position (which is before '\\' or '\0' + // + // So `string_bytes` is well-formed UTF-8. + string_bytes = .{ .borrowed = this.sliceFrom(start_pos) }; + break; + }, + '\n', '\r', FORM_FEED_BYTE => return .{ .str = this.sliceFrom(start_pos), .bad = true }, + 0x80...0xBF => this.consumeContinuationByte(), + 0xF0...0xFF => this.consume4byteIntro(), + else => { + this.advance(1); + }, + } + } + + while (!this.isEof()) { + const b = this.nextByteUnchecked(); + // todo_stuff.match_byte + switch (b) { + // string_bytes is well-formed UTF-8, see other comments + '\n', '\r', FORM_FEED_BYTE => return .{ .str = string_bytes.toSlice(), .bad = true }, + '"' => { + this.advance(1); + if (!single_quote) break; + }, + '\'' => { + this.advance(1); + if (single_quote) break; + }, + '\\' => { + this.advance(1); + if (!this.isEof()) { + switch (this.nextByteUnchecked()) { + // Escaped newline + '\n', FORM_FEED_BYTE, '\r' => this.consumeNewline(), + else => this.consumeEscapeAndWrite(&string_bytes), + } + } + // else: escaped EOF, do nothing. + // continue; + }, + 0 => { + this.advance(1); + string_bytes.append(this.allocator, REPLACEMENT_CHAR_UNICODE[0..]); + continue; + }, + 0x80...0xBF => this.consumeContinuationByte(), + 0xF0...0xFF => this.consume4byteIntro(), + else => { + this.advance(1); + }, + } + + string_bytes.append(this.allocator, &[_]u8{b}); + } + + return .{ .str = string_bytes.toSlice() }; + } + + pub fn consumeUnquotedUrl(this: *Tokenizer) ?Token { + // This is only called after "url(", so the current position is a code point boundary. + const start_position = this.position; + const from_start = this.src[this.position..]; + var newlines: u32 = 0; + var last_newline: usize = 0; + var found_printable_char = false; + + var offset: usize = 0; + var b: u8 = undefined; + while (true) { + defer offset += 1; + + if (offset < from_start.len) { + b = from_start[offset]; + } else { + this.position = this.src.len; + break; + } + + // todo_stuff.match_byte + switch (b) { + ' ', '\t' => {}, + '\n', FORM_FEED_BYTE => { + newlines += 1; + last_newline = offset; + }, + '\r' => { + if (offset + 1 < from_start.len and from_start[offset + 1] != '\n') { + newlines += 1; + last_newline = offset; + } + }, + '"', '\'' => return null, // Do not advance + ')' => { + // Don't use advance, because we may be skipping + // newlines here, and we want to avoid the assert. + this.position += offset + 1; + break; + }, + else => { + // Don't use advance, because we may be skipping + // newlines here, and we want to avoid the assert. + this.position += offset; + found_printable_char = true; + break; + }, + } + } + + if (newlines > 0) { + this.current_line_number += newlines; + // No need for wrapping_add here, because there's no possible + // way to wrap. + this.current_line_start_position = start_position + last_newline + 1; + } + + if (found_printable_char) { + // This function only consumed ASCII (whitespace) bytes, + // so the current position is a code point boundary. + return this.consumeUnquotedUrlInternal(); + } + return .{ .unquoted_url = "" }; + } + + pub fn consumeUnquotedUrlInternal(this: *Tokenizer) Token { + // This function is only called with start_pos at a code point boundary.; + const start_pos = this.position; + var string_bytes: CopyOnWriteStr = undefined; + + while (true) { + if (this.isEof()) return .{ .unquoted_url = this.sliceFrom(start_pos) }; + + // todo_stuff.match_byte + switch (this.nextByteUnchecked()) { + ' ', '\t', '\n', '\r', FORM_FEED_BYTE => { + var value = .{ .borrowed = this.sliceFrom(start_pos) }; + return this.consumeUrlEnd(start_pos, &value); + }, + ')' => { + const value = this.sliceFrom(start_pos); + this.advance(1); + return .{ .unquoted_url = value }; + }, + // non-printable + 0x01...0x08, + 0x0B, + 0x0E...0x1F, + 0x7F, + + // not valid in this context + '"', + '\'', + '(', + => { + this.advance(1); + return this.consumeBadUrl(start_pos); + }, + '\\', 0 => { + // * The tokenizer’s input is UTF-8 since it’s `&str`. + // * start_pos is at a code point boundary + // * so is the current position (which is before '\\' or '\0' + // + // So `string_bytes` is well-formed UTF-8. + string_bytes = .{ .borrowed = this.sliceFrom(start_pos) }; + break; + }, + 0x80...0xBF => this.consumeContinuationByte(), + 0xF0...0xFF => this.consume4byteIntro(), + else => { + // ASCII or other leading byte. + this.advance(1); + }, + } + } + + while (!this.isEof()) { + const b = this.nextByteUnchecked(); + // todo_stuff.match_byte + switch (b) { + ' ', '\t', '\n', '\r', FORM_FEED_BYTE => { + // string_bytes is well-formed UTF-8, see other comments. + // const string = string_bytes.toSlice(); + // return this.consumeUrlEnd(start_pos, &string); + return this.consumeUrlEnd(start_pos, &string_bytes); + }, + ')' => { + this.advance(1); + break; + }, + // non-printable + 0x01...0x08, + 0x0B, + 0x0E...0x1F, + 0x7F, + + // invalid in this context + '"', + '\'', + '(', + => { + this.advance(1); + return this.consumeBadUrl(start_pos); + }, + '\\' => { + this.advance(1); + if (this.hasNewlineAt(0)) return this.consumeBadUrl(start_pos); + + // This pushes one well-formed code point to string_bytes + this.consumeEscapeAndWrite(&string_bytes); + }, + 0 => { + this.advance(1); + string_bytes.append(this.allocator, REPLACEMENT_CHAR_UNICODE[0..]); + }, + 0x80...0xBF => { + // We’ll end up copying the whole code point + // before this loop does something else. + this.consumeContinuationByte(); + string_bytes.append(this.allocator, &[_]u8{b}); + }, + 0xF0...0xFF => { + // We’ll end up copying the whole code point + // before this loop does something else. + this.consume4byteIntro(); + string_bytes.append(this.allocator, &[_]u8{b}); + }, + // If this byte is part of a multi-byte code point, + // we’ll end up copying the whole code point before this loop does something else. + else => { + // ASCII or other leading byte. + this.advance(1); + string_bytes.append(this.allocator, &[_]u8{b}); + }, + } + } + + // string_bytes is well-formed UTF-8, see other comments. + return .{ .unquoted_url = string_bytes.toSlice() }; + } + + pub fn consumeUrlEnd(this: *Tokenizer, start_pos: usize, string: *CopyOnWriteStr) Token { + while (!this.isEof()) { + // todo_stuff.match_byte + switch (this.nextByteUnchecked()) { + ')' => { + this.advance(1); + break; + }, + ' ', '\t' => this.advance(1), + '\n', FORM_FEED_BYTE, '\r' => this.consumeNewline(), + else => |b| { + this.consumeKnownByte(b); + return this.consumeBadUrl(start_pos); + }, + } + } + + return .{ .unquoted_url = string.toSlice() }; + } + + pub fn consumeBadUrl(this: *Tokenizer, start_pos: usize) Token { + // Consume up to the closing ) + while (!this.isEof()) { + // todo_stuff.match_byte + switch (this.nextByteUnchecked()) { + ')' => { + const contents = this.sliceFrom(start_pos); + this.advance(1); + return .{ .bad_url = contents }; + }, + '\\' => { + this.advance(1); + if (this.nextByte()) |b| { + if (b == ')' or b == '\\') this.advance(1); // Skip an escaped ')' or '\' + } + }, + '\n', FORM_FEED_BYTE, '\r' => this.consumeNewline(), + else => |b| this.consumeKnownByte(b), + } + } + return .{ .bad_url = this.sliceFrom(start_pos) }; + } + + pub fn consumeEscapeAndWrite(this: *Tokenizer, bytes: *CopyOnWriteStr) void { + const val = this.consumeEscape(); + var utf8bytes: [4]u8 = undefined; + const len = std.unicode.utf8Encode(@truncate(val), utf8bytes[0..]) catch @panic("Invalid"); + bytes.append(this.allocator, utf8bytes[0..len]); + } + + pub fn consumeEscape(this: *Tokenizer) u32 { + if (this.isEof()) return 0xFFFD; // Unicode replacement character + + // todo_stuff.match_byte + switch (this.nextByteUnchecked()) { + '0'...'9', 'A'...'F', 'a'...'f' => { + const c = this.consumeHexDigits().value; + if (!this.isEof()) { + // todo_stuff.match_byte + switch (this.nextByteUnchecked()) { + ' ', '\t' => this.advance(1), + '\n', FORM_FEED_BYTE, '\r' => this.consumeNewline(), + else => {}, + } + } + + if (c != 0 and std.unicode.utf8ValidCodepoint(@truncate(c))) return c; + return REPLACEMENT_CHAR; + }, + 0 => { + this.advance(1); + return REPLACEMENT_CHAR; + }, + else => return this.consumeChar(), + } + } + + pub fn consumeHexDigits(this: *Tokenizer) struct { value: u32, num_digits: u32 } { + var value: u32 = 0; + var digits: u32 = 0; + while (digits < 6 and !this.isEof()) { + if (byteToHexDigit(this.nextByteUnchecked())) |digit| { + value = value * 16 + digit; + digits += 1; + this.advance(1); + } else break; + } + + return .{ .value = value, .num_digits = digits }; + } + + pub fn consumeChar(this: *Tokenizer) u32 { + const c = this.nextChar(); + const len_utf8 = lenUtf8(c); + this.position += len_utf8; + // Note that due to the special case for the 4-byte sequence + // intro, we must use wrapping add here. + this.current_line_start_position +%= len_utf8 - lenUtf16(c); + return c; + } + + fn lenUtf8(code: u32) usize { + if (code < MAX_ONE_B) { + return 1; + } else if (code < MAX_TWO_B) { + return 2; + } else if (code < MAX_THREE_B) { + return 3; + } else { + return 4; + } + } + + fn lenUtf16(ch: u32) usize { + if ((ch & 0xFFFF) == ch) { + return 1; + } else { + return 2; + } + } + + fn byteToHexDigit(b: u8) ?u32 { + + // todo_stuff.match_byte + return switch (b) { + '0'...'9' => b - '0', + 'a'...'f' => b - 'a' + 10, + 'A'...'F' => b - 'A' + 10, + else => null, + }; + } + + fn byteToDecimalDigit(b: u8) ?u32 { + if (b >= '0' and b <= '9') { + return b - '0'; + } + return null; + } + + pub fn consumeComment(this: *Tokenizer) []const u8 { + this.advance(2); + const start_position = this.position; + while (!this.isEof()) { + const b = this.nextByteUnchecked(); + // todo_stuff.match_byte + switch (b) { + '*' => { + const end_position = this.position; + this.advance(1); + if (this.nextByte() == '/') { + this.advance(1); + const contents = this.src[start_position..end_position]; + this.checkForSourceMap(contents); + return contents; + } + }, + '\n', FORM_FEED_BYTE, '\r' => { + this.consumeNewline(); + }, + 0x80...0xBF => this.consumeContinuationByte(), + 0xF0...0xFF => this.consume4byteIntro(), + else => { + // ASCII or other leading byte + this.advance(1); + }, + } + } + const contents = this.sliceFrom(start_position); + this.checkForSourceMap(contents); + return contents; + } + + pub fn checkForSourceMap(this: *Tokenizer, contents: []const u8) void { + { + const directive = "# sourceMappingURL="; + const directive_old = "@ sourceMappingURL="; + if (std.mem.startsWith(u8, contents, directive) or std.mem.startsWith(u8, contents, directive_old)) { + this.source_map_url = splitSourceMap(contents[directive.len..]); + } + } + + { + const directive = "# sourceURL="; + const directive_old = "@ sourceURL="; + if (std.mem.startsWith(u8, contents, directive) or std.mem.startsWith(u8, contents, directive_old)) { + this.source_map_url = splitSourceMap(contents[directive.len..]); + } + } + } + + pub fn splitSourceMap(contents: []const u8) ?[]const u8 { + // FIXME: Use bun CodepointIterator + var iter = std.unicode.Utf8Iterator{ .bytes = contents, .i = 0 }; + while (iter.nextCodepoint()) |c| { + switch (c) { + ' ', '\t', FORM_FEED_BYTE, '\r', '\n' => { + const start = 0; + const end = iter.i; + return contents[start..end]; + }, + else => {}, + } + } + return null; + } + + pub fn consumeNewline(this: *Tokenizer) void { + const byte = this.nextByteUnchecked(); + if (bun.Environment.allow_assert) { + std.debug.assert(byte == '\r' or byte == '\n' or byte == FORM_FEED_BYTE); + } + this.position += 1; + if (byte == '\r' and this.nextByte() == '\n') { + this.position += 1; + } + this.current_line_start_position = this.position; + this.current_line_number += 1; + } + + /// Advance over a single byte; the byte must be a UTF-8 + /// continuation byte. + /// + /// Binary Hex Comments + /// 0xxxxxxx 0x00..0x7F Only byte of a 1-byte character encoding + /// 110xxxxx 0xC0..0xDF First byte of a 2-byte character encoding + /// 1110xxxx 0xE0..0xEF First byte of a 3-byte character encoding + /// 11110xxx 0xF0..0xF7 First byte of a 4-byte character encoding + /// 10xxxxxx 0x80..0xBF Continuation byte: one of 1-3 bytes following the first <-- + pub fn consumeContinuationByte(this: *Tokenizer) void { + if (bun.Environment.allow_assert) std.debug.assert(this.nextByteUnchecked() & 0xC0 == 0x80); + // Continuation bytes contribute to column overcount. Note + // that due to the special case for the 4-byte sequence intro, + // we must use wrapping add here. + this.current_line_start_position +%= 1; + this.position += 1; + } + + /// Advance over a single byte; the byte must be a UTF-8 sequence + /// leader for a 4-byte sequence. + /// + /// Binary Hex Comments + /// 0xxxxxxx 0x00..0x7F Only byte of a 1-byte character encoding + /// 110xxxxx 0xC0..0xDF First byte of a 2-byte character encoding + /// 1110xxxx 0xE0..0xEF First byte of a 3-byte character encoding + /// 11110xxx 0xF0..0xF7 First byte of a 4-byte character encoding <-- + /// 10xxxxxx 0x80..0xBF Continuation byte: one of 1-3 bytes following the first + pub fn consume4byteIntro(this: *Tokenizer) void { + if (bun.Environment.allow_assert) std.debug.assert(this.nextByteUnchecked() & 0xF0 == 0xF0); + // This takes two UTF-16 characters to represent, so we + // actually have an undercount. + // this.current_line_start_position = self.current_line_start_position.wrapping_sub(1); + this.current_line_start_position -%= 1; + this.position += 1; + } + + pub fn isIdentStart(this: *Tokenizer) bool { + + // todo_stuff.match_byte + return !this.isEof() and switch (this.nextByteUnchecked()) { + 'a'...'z', 'A'...'Z', '_', 0 => true, + + // todo_stuff.match_byte + '-' => this.hasAtLeast(1) and switch (this.byteAt(1)) { + 'a'...'z', 'A'...'Z', '-', '_', 0 => true, + '\\' => !this.hasNewlineAt(1), + else => |b| !std.ascii.isASCII(b), + }, + '\\' => !this.hasNewlineAt(1), + else => |b| !std.ascii.isASCII(b), + }; + } + + /// If true, the input has at least `n` bytes left *after* the current one. + /// That is, `tokenizer.char_at(n)` will not panic. + fn hasAtLeast(this: *Tokenizer, n: usize) bool { + return this.position + n < this.src.len; + } + + fn hasNewlineAt(this: *Tokenizer, offset: usize) bool { + return this.position + offset < this.src.len and switch (this.byteAt(offset)) { + '\n', '\r', FORM_FEED_BYTE => true, + else => false, + }; + } + + pub fn startsWith(this: *Tokenizer, comptime needle: []const u8) bool { + return std.mem.eql(u8, this.src[this.position .. this.position + needle.len], needle); + } + + /// Advance over N bytes in the input. This function can advance + /// over ASCII bytes (excluding newlines), or UTF-8 sequence + /// leaders (excluding leaders for 4-byte sequences). + pub fn advance(this: *Tokenizer, n: usize) void { + if (bun.Environment.allow_assert) { + // Each byte must either be an ASCII byte or a sequence + // leader, but not a 4-byte leader; also newlines are + // rejected. + for (0..n) |i| { + const b = this.byteAt(i); + std.debug.assert(std.ascii.isASCII(b) or (b & 0xF0 != 0xF0 and b & 0xC0 != 0x80)); + std.debug.assert(b != '\r' and b != '\n' and b != '\x0C'); + } + } + this.position += n; + } + + /// Advance over any kind of byte, excluding newlines. + pub fn consumeKnownByte(this: *Tokenizer, byte: u8) void { + if (bun.Environment.allow_assert) std.debug.assert(byte != '\r' and byte != '\n' and byte != FORM_FEED_BYTE); + this.position += 1; + // Continuation bytes contribute to column overcount. + if (byte & 0xF0 == 0xF0) { + // This takes two UTF-16 characters to represent, so we + // actually have an undercount. + this.current_line_start_position -%= 1; + } else if (byte & 0xC0 == 0x80) { + // Note that due to the special case for the 4-byte + // sequence intro, we must use wrapping add here. + this.current_line_start_position +%= 1; + } + } + + pub inline fn byteAt(this: *Tokenizer, n: usize) u8 { + return this.src[this.position + n]; + } + + pub inline fn nextByte(this: *Tokenizer) ?u8 { + if (this.isEof()) return null; + return this.src[this.position]; + } + + pub inline fn nextChar(this: *Tokenizer) u32 { + const len = bun.strings.utf8ByteSequenceLength(this.src[this.position]); + return bun.strings.decodeWTF8RuneT(this.src[this.position..].ptr[0..4], len, u32, bun.strings.unicode_replacement); + } + + pub inline fn nextByteUnchecked(this: *Tokenizer) u8 { + return this.src[this.position]; + } + + pub inline fn sliceFrom(this: *Tokenizer, start: usize) []const u8 { + return this.src[start..this.position]; + } +}; + +const TokenKind = enum { + /// An [](https://drafts.csswg.org/css-syntax/#typedef-ident-token) + ident, + + /// Value is the ident + function, + + /// Value is the ident + at_keyword, + + /// A [``](https://drafts.csswg.org/css-syntax/#hash-token-diagram) with the type flag set to "unrestricted" + /// + /// The value does not include the `#` marker. + hash, + + /// A [``](https://drafts.csswg.org/css-syntax/#hash-token-diagram) with the type flag set to "id" + /// + /// The value does not include the `#` marker. + idhash, + + quoted_string, + + bad_string, + + /// `url()` is represented by a `.function` token + unquoted_url, + + bad_url, + + /// Value of a single codepoint + delim, + + /// A can be fractional or an integer, and can contain an optional + or - sign + number, + + percentage, + + dimension, + + whitespace, + + /// `` + cdc, + + /// `~=` (https://www.w3.org/TR/selectors-4/#attribute-representation) + include_match, + + /// `|=` (https://www.w3.org/TR/selectors-4/#attribute-representation) + dash_match, + + /// `^=` (https://www.w3.org/TR/selectors-4/#attribute-substrings) + prefix_match, + + /// `$=`(https://www.w3.org/TR/selectors-4/#attribute-substrings) + suffix_match, + + /// `*=` (https://www.w3.org/TR/selectors-4/#attribute-substrings) + substring_match, + + colon, + semicolon, + comma, + open_square, + close_square, + open_paren, + close_paren, + open_curly, + close_curly, + + /// Not an actual token in the spec, but we keep it anyway + comment, + + pub fn toString(this: TokenKind) []const u8 { + return switch (this) { + .at_keyword => "@-keyword", + .bad_string => "bad string token", + .bad_url => "bad URL token", + .cdc => "\"-->\"", + .cdo => "\"` + cdc, + + /// `~=` (https://www.w3.org/TR/selectors-4/#attribute-representation) + include_match, + + /// `|=` (https://www.w3.org/TR/selectors-4/#attribute-representation) + dash_match, + + /// `^=` (https://www.w3.org/TR/selectors-4/#attribute-substrings) + prefix_match, + + /// `$=`(https://www.w3.org/TR/selectors-4/#attribute-substrings) + suffix_match, + + /// `*=` (https://www.w3.org/TR/selectors-4/#attribute-substrings) + substring_match, + + colon, + semicolon, + comma, + open_square, + close_square, + open_paren, + close_paren, + open_curly, + close_curly, + + /// Not an actual token in the spec, but we keep it anyway + comment: []const u8, + + pub fn eql(lhs: *const Token, rhs: *const Token) bool { + return implementEql(Token, lhs, rhs); + } + + pub fn hash(this: *const @This(), hasher: *std.hash.Wyhash) void { + return implementHash(@This(), this, hasher); + } + + /// Return whether this token represents a parse error. + /// + /// `BadUrl` and `BadString` are tokenizer-level parse errors. + /// + /// `CloseParenthesis`, `CloseSquareBracket`, and `CloseCurlyBracket` are *unmatched* + /// and therefore parse errors when returned by one of the `Parser::next*` methods. + pub fn isParseError(this: *const Token) bool { + return switch (this.*) { + .bad_url, .bad_string, .close_paren, .close_square, .close_curly => true, + else => false, + }; + } + + pub fn format( + this: *const Token, + comptime fmt: []const u8, + opts: std.fmt.FormatOptions, + writer: anytype, + ) !void { + _ = fmt; // autofix + _ = opts; // autofix + return switch (this.*) { + inline .ident, + .function, + .at_keyword, + .hash, + .idhash, + .quoted_string, + .bad_string, + .unquoted_url, + .bad_url, + .whitespace, + .comment, + => |str| { + try writer.print("{s} = {s}", .{ @tagName(this.*), str }); + }, + .delim => |d| { + try writer.print("'{c}'", .{@as(u8, @truncate(d))}); + }, + else => { + try writer.print("{s}", .{@tagName(this.*)}); + }, + }; + } + + pub fn raw(this: Token) []const u8 { + return switch (this) { + .ident => this.ident, + // .function => + }; + } + + pub inline fn kind(this: Token) TokenKind { + return @as(TokenKind, this); + } + + pub inline fn kindString(this: Token) []const u8 { + return this.kind.toString(); + } + + // ~toCssImpl + const This = @This(); + + pub fn toCssGeneric(this: *const This, writer: anytype) !void { + return switch (this.*) { + .ident => { + try serializer.serializeIdentifier(this.ident, writer); + }, + .at_keyword => { + try writer.writeAll("@"); + try serializer.serializeIdentifier(this.at_keyword, writer); + }, + .hash => { + try writer.writeAll("#"); + try serializer.serializeName(this.hash, writer); + }, + .idhash => { + try writer.writeAll("#"); + try serializer.serializeName(this.idhash, writer); + }, + .quoted_string => |x| { + try serializer.serializeName(x, writer); + }, + .unquoted_url => |x| { + try writer.writeAll("url("); + try serializer.serializeUnquotedUrl(x, writer); + try writer.writeAll(")"); + }, + .delim => |x| { + bun.assert(x <= 0x7F); + try writer.writeByte(@intCast(x)); + }, + .number => |n| { + try serializer.writeNumeric(n.value, n.int_value, n.has_sign, writer); + }, + .percentage => |p| { + try serializer.writeNumeric(p.unit_value * 100.0, p.int_value, p.has_sign, writer); + }, + .dimension => |d| { + try serializer.writeNumeric(d.num.value, d.num.int_value, d.num.has_sign, writer); + // Disambiguate with scientific notation. + const unit = d.unit; + // TODO(emilio): This doesn't handle e.g. 100E1m, which gets us + // an unit of "E1m"... + if ((unit.len == 1 and unit[0] == 'e') or + (unit.len == 1 and unit[0] == 'E') or + bun.strings.startsWith(unit, "e-") or + bun.strings.startsWith(unit, "E-")) + { + try writer.writeAll("\\65 "); + try serializer.serializeName(unit[1..], writer); + } else { + try serializer.serializeIdentifier(unit, writer); + } + }, + .whitespace => |content| { + try writer.writeAll(content); + }, + .comment => |content| { + try writer.writeAll("/*"); + try writer.writeAll(content); + try writer.writeAll("*/"); + }, + .colon => try writer.writeAll(":"), + .semicolon => try writer.writeAll(";"), + .comma => try writer.writeAll(","), + .include_match => try writer.writeAll("~="), + .dash_match => try writer.writeAll("|="), + .prefix_match => try writer.writeAll("^="), + .suffix_match => try writer.writeAll("$="), + .substring_match => try writer.writeAll("*="), + .cdo => try writer.writeAll(""), + + .function => |name| { + try serializer.serializeIdentifier(name, writer); + try writer.writeAll("("); + }, + .open_paren => try writer.writeAll("("), + .open_square => try writer.writeAll("["), + .open_curly => try writer.writeAll("{"), + + .bad_url => |contents| { + try writer.writeAll("url("); + try writer.writeAll(contents); + try writer.writeByte(')'); + }, + .bad_string => |value| { + // During tokenization, an unescaped newline after a quote causes + // the token to be a BadString instead of a QuotedString. + // The BadString token ends just before the newline + // (which is in a separate WhiteSpace token), + // and therefore does not have a closing quote. + try writer.writeByte('"'); + var string_writer = serializer.CssStringWriter(@TypeOf(writer)).new(writer); + try string_writer.writeStr(value); + }, + .close_paren => try writer.writeAll(")"), + .close_square => try writer.writeAll("]"), + .close_curly => try writer.writeAll("}"), + }; + } + + pub fn toCss(this: *const This, comptime W: type, dest: *Printer(W)) PrintErr!void { + // zack is here: verify this is correct + return switch (this.*) { + .ident => |value| serializer.serializeIdentifier(value, dest) catch return dest.addFmtError(), + .at_keyword => |value| { + try dest.writeStr("@"); + return serializer.serializeIdentifier(value, dest) catch return dest.addFmtError(); + }, + .hash => |value| { + try dest.writeStr("#"); + return serializer.serializeName(value, dest) catch return dest.addFmtError(); + }, + .idhash => |value| { + try dest.writeStr("#"); + return serializer.serializeIdentifier(value, dest) catch return dest.addFmtError(); + }, + .quoted_string => |value| serializer.serializeString(value, dest) catch return dest.addFmtError(), + .unquoted_url => |value| { + try dest.writeStr("url("); + serializer.serializeUnquotedUrl(value, dest) catch return dest.addFmtError(); + return dest.writeStr(")"); + }, + .delim => |value| { + // See comment for this variant in declaration of Token + // The value of delim is only ever ascii + bun.debugAssert(value <= 0x7F); + return dest.writeChar(@truncate(value)); + }, + .number => |num| serializer.writeNumeric(num.value, num.int_value, num.has_sign, dest) catch return dest.addFmtError(), + .percentage => |num| { + serializer.writeNumeric(num.unit_value * 100, num.int_value, num.has_sign, dest) catch return dest.addFmtError(); + return dest.writeStr("%"); + }, + .dimension => |dim| { + serializer.writeNumeric(dim.num.value, dim.num.int_value, dim.num.has_sign, dest) catch return dest.addFmtError(); + // Disambiguate with scientific notation. + const unit = dim.unit; + if (std.mem.eql(u8, unit, "e") or std.mem.eql(u8, unit, "E") or + std.mem.startsWith(u8, unit, "e-") or std.mem.startsWith(u8, unit, "E-")) + { + try dest.writeStr("\\65 "); + serializer.serializeName(unit[1..], dest) catch return dest.addFmtError(); + } else { + serializer.serializeIdentifier(unit, dest) catch return dest.addFmtError(); + } + return; + }, + .whitespace => |content| dest.writeStr(content), + .comment => |content| { + try dest.writeStr("/*"); + try dest.writeStr(content); + return dest.writeStr("*/"); + }, + .colon => dest.writeStr(":"), + .semicolon => dest.writeStr(";"), + .comma => dest.writeStr(","), + .include_match => dest.writeStr("~="), + .dash_match => dest.writeStr("|="), + .prefix_match => dest.writeStr("^="), + .suffix_match => dest.writeStr("$="), + .substring_match => dest.writeStr("*="), + .cdo => dest.writeStr(""), + .function => |name| { + serializer.serializeIdentifier(name, dest) catch return dest.addFmtError(); + return dest.writeStr("("); + }, + .open_paren => dest.writeStr("("), + .open_square => dest.writeStr("["), + .open_curly => dest.writeStr("{"), + .bad_url => |contents| { + try dest.writeStr("url("); + try dest.writeStr(contents); + return dest.writeChar(')'); + }, + .bad_string => |value| { + try dest.writeChar('"'); + var writer = serializer.CssStringWriter(*Printer(W)).new(dest); + return writer.writeStr(value) catch return dest.addFmtError(); + }, + .close_paren => dest.writeStr(")"), + .close_square => dest.writeStr("]"), + .close_curly => dest.writeStr("}"), + }; + } +}; + +const Num = struct { + has_sign: bool, + value: f32, + int_value: ?i32, + + pub fn eql(lhs: *const Num, rhs: *const Num) bool { + return implementEql(Num, lhs, rhs); + } + + pub fn hash(this: *const @This(), hasher: *std.hash.Wyhash) void { + return implementHash(@This(), this, hasher); + } +}; + +const Dimension = struct { + num: Num, + /// e.g. "px" + unit: []const u8, + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return implementEql(@This(), lhs, rhs); + } + + pub fn hash(this: *const @This(), hasher: *std.hash.Wyhash) void { + return implementHash(@This(), this, hasher); + } +}; + +const CopyOnWriteStr = union(enum) { + borrowed: []const u8, + owned: std.ArrayList(u8), + + pub fn append(this: *@This(), allocator: Allocator, slice: []const u8) void { + switch (this.*) { + .borrowed => { + var list = std.ArrayList(u8).initCapacity(allocator, this.borrowed.len + slice.len) catch bun.outOfMemory(); + list.appendSliceAssumeCapacity(this.borrowed); + list.appendSliceAssumeCapacity(slice); + this.* = .{ .owned = list }; + }, + .owned => { + this.owned.appendSlice(slice) catch bun.outOfMemory(); + }, + } + } + + pub fn toSlice(this: *@This()) []const u8 { + return switch (this.*) { + .borrowed => this.borrowed, + .owned => this.owned.items[0..], + }; + } +}; + +pub const color = struct { + /// The opaque alpha value of 1.0. + pub const OPAQUE: f32 = 1.0; + + const ColorError = error{ + parse, + }; + + /// Either an angle or a number. + pub const AngleOrNumber = union(enum) { + /// ``. + number: struct { + /// The numeric value parsed, as a float. + value: f32, + }, + /// `` + angle: struct { + /// The value as a number of degrees. + degrees: f32, + }, + }; + + const RGB = struct { u8, u8, u8 }; + pub const named_colors = bun.ComptimeStringMap(RGB, .{ + .{ "aliceblue", .{ 240, 248, 255 } }, + .{ "antiquewhite", .{ 250, 235, 215 } }, + .{ "aqua", .{ 0, 255, 255 } }, + .{ "aquamarine", .{ 127, 255, 212 } }, + .{ "azure", .{ 240, 255, 255 } }, + .{ "beige", .{ 245, 245, 220 } }, + .{ "bisque", .{ 255, 228, 196 } }, + .{ "black", .{ 0, 0, 0 } }, + .{ "blanchedalmond", .{ 255, 235, 205 } }, + .{ "blue", .{ 0, 0, 255 } }, + .{ "blueviolet", .{ 138, 43, 226 } }, + .{ "brown", .{ 165, 42, 42 } }, + .{ "burlywood", .{ 222, 184, 135 } }, + .{ "cadetblue", .{ 95, 158, 160 } }, + .{ "chartreuse", .{ 127, 255, 0 } }, + .{ "chocolate", .{ 210, 105, 30 } }, + .{ "coral", .{ 255, 127, 80 } }, + .{ "cornflowerblue", .{ 100, 149, 237 } }, + .{ "cornsilk", .{ 255, 248, 220 } }, + .{ "crimson", .{ 220, 20, 60 } }, + .{ "cyan", .{ 0, 255, 255 } }, + .{ "darkblue", .{ 0, 0, 139 } }, + .{ "darkcyan", .{ 0, 139, 139 } }, + .{ "darkgoldenrod", .{ 184, 134, 11 } }, + .{ "darkgray", .{ 169, 169, 169 } }, + .{ "darkgreen", .{ 0, 100, 0 } }, + .{ "darkgrey", .{ 169, 169, 169 } }, + .{ "darkkhaki", .{ 189, 183, 107 } }, + .{ "darkmagenta", .{ 139, 0, 139 } }, + .{ "darkolivegreen", .{ 85, 107, 47 } }, + .{ "darkorange", .{ 255, 140, 0 } }, + .{ "darkorchid", .{ 153, 50, 204 } }, + .{ "darkred", .{ 139, 0, 0 } }, + .{ "darksalmon", .{ 233, 150, 122 } }, + .{ "darkseagreen", .{ 143, 188, 143 } }, + .{ "darkslateblue", .{ 72, 61, 139 } }, + .{ "darkslategray", .{ 47, 79, 79 } }, + .{ "darkslategrey", .{ 47, 79, 79 } }, + .{ "darkturquoise", .{ 0, 206, 209 } }, + .{ "darkviolet", .{ 148, 0, 211 } }, + .{ "deeppink", .{ 255, 20, 147 } }, + .{ "deepskyblue", .{ 0, 191, 255 } }, + .{ "dimgray", .{ 105, 105, 105 } }, + .{ "dimgrey", .{ 105, 105, 105 } }, + .{ "dodgerblue", .{ 30, 144, 255 } }, + .{ "firebrick", .{ 178, 34, 34 } }, + .{ "floralwhite", .{ 255, 250, 240 } }, + .{ "forestgreen", .{ 34, 139, 34 } }, + .{ "fuchsia", .{ 255, 0, 255 } }, + .{ "gainsboro", .{ 220, 220, 220 } }, + .{ "ghostwhite", .{ 248, 248, 255 } }, + .{ "gold", .{ 255, 215, 0 } }, + .{ "goldenrod", .{ 218, 165, 32 } }, + .{ "gray", .{ 128, 128, 128 } }, + .{ "green", .{ 0, 128, 0 } }, + .{ "greenyellow", .{ 173, 255, 47 } }, + .{ "grey", .{ 128, 128, 128 } }, + .{ "honeydew", .{ 240, 255, 240 } }, + .{ "hotpink", .{ 255, 105, 180 } }, + .{ "indianred", .{ 205, 92, 92 } }, + .{ "indigo", .{ 75, 0, 130 } }, + .{ "ivory", .{ 255, 255, 240 } }, + .{ "khaki", .{ 240, 230, 140 } }, + .{ "lavender", .{ 230, 230, 250 } }, + .{ "lavenderblush", .{ 255, 240, 245 } }, + .{ "lawngreen", .{ 124, 252, 0 } }, + .{ "lemonchiffon", .{ 255, 250, 205 } }, + .{ "lightblue", .{ 173, 216, 230 } }, + .{ "lightcoral", .{ 240, 128, 128 } }, + .{ "lightcyan", .{ 224, 255, 255 } }, + .{ "lightgoldenrodyellow", .{ 250, 250, 210 } }, + .{ "lightgray", .{ 211, 211, 211 } }, + .{ "lightgreen", .{ 144, 238, 144 } }, + .{ "lightgrey", .{ 211, 211, 211 } }, + .{ "lightpink", .{ 255, 182, 193 } }, + .{ "lightsalmon", .{ 255, 160, 122 } }, + .{ "lightseagreen", .{ 32, 178, 170 } }, + .{ "lightskyblue", .{ 135, 206, 250 } }, + .{ "lightslategray", .{ 119, 136, 153 } }, + .{ "lightslategrey", .{ 119, 136, 153 } }, + .{ "lightsteelblue", .{ 176, 196, 222 } }, + .{ "lightyellow", .{ 255, 255, 224 } }, + .{ "lime", .{ 0, 255, 0 } }, + .{ "limegreen", .{ 50, 205, 50 } }, + .{ "linen", .{ 250, 240, 230 } }, + .{ "magenta", .{ 255, 0, 255 } }, + .{ "maroon", .{ 128, 0, 0 } }, + .{ "mediumaquamarine", .{ 102, 205, 170 } }, + .{ "mediumblue", .{ 0, 0, 205 } }, + .{ "mediumorchid", .{ 186, 85, 211 } }, + .{ "mediumpurple", .{ 147, 112, 219 } }, + .{ "mediumseagreen", .{ 60, 179, 113 } }, + .{ "mediumslateblue", .{ 123, 104, 238 } }, + .{ "mediumspringgreen", .{ 0, 250, 154 } }, + .{ "mediumturquoise", .{ 72, 209, 204 } }, + .{ "mediumvioletred", .{ 199, 21, 133 } }, + .{ "midnightblue", .{ 25, 25, 112 } }, + .{ "mintcream", .{ 245, 255, 250 } }, + .{ "mistyrose", .{ 255, 228, 225 } }, + .{ "moccasin", .{ 255, 228, 181 } }, + .{ "navajowhite", .{ 255, 222, 173 } }, + .{ "navy", .{ 0, 0, 128 } }, + .{ "oldlace", .{ 253, 245, 230 } }, + .{ "olive", .{ 128, 128, 0 } }, + .{ "olivedrab", .{ 107, 142, 35 } }, + .{ "orange", .{ 255, 165, 0 } }, + .{ "orangered", .{ 255, 69, 0 } }, + .{ "orchid", .{ 218, 112, 214 } }, + .{ "palegoldenrod", .{ 238, 232, 170 } }, + .{ "palegreen", .{ 152, 251, 152 } }, + .{ "paleturquoise", .{ 175, 238, 238 } }, + .{ "palevioletred", .{ 219, 112, 147 } }, + .{ "papayawhip", .{ 255, 239, 213 } }, + .{ "peachpuff", .{ 255, 218, 185 } }, + .{ "peru", .{ 205, 133, 63 } }, + .{ "pink", .{ 255, 192, 203 } }, + .{ "plum", .{ 221, 160, 221 } }, + .{ "powderblue", .{ 176, 224, 230 } }, + .{ "purple", .{ 128, 0, 128 } }, + .{ "rebeccapurple", .{ 102, 51, 153 } }, + .{ "red", .{ 255, 0, 0 } }, + .{ "rosybrown", .{ 188, 143, 143 } }, + .{ "royalblue", .{ 65, 105, 225 } }, + .{ "saddlebrown", .{ 139, 69, 19 } }, + .{ "salmon", .{ 250, 128, 114 } }, + .{ "sandybrown", .{ 244, 164, 96 } }, + .{ "seagreen", .{ 46, 139, 87 } }, + .{ "seashell", .{ 255, 245, 238 } }, + .{ "sienna", .{ 160, 82, 45 } }, + .{ "silver", .{ 192, 192, 192 } }, + .{ "skyblue", .{ 135, 206, 235 } }, + .{ "slateblue", .{ 106, 90, 205 } }, + .{ "slategray", .{ 112, 128, 144 } }, + .{ "slategrey", .{ 112, 128, 144 } }, + .{ "snow", .{ 255, 250, 250 } }, + .{ "springgreen", .{ 0, 255, 127 } }, + .{ "steelblue", .{ 70, 130, 180 } }, + .{ "tan", .{ 210, 180, 140 } }, + .{ "teal", .{ 0, 128, 128 } }, + .{ "thistle", .{ 216, 191, 216 } }, + .{ "tomato", .{ 255, 99, 71 } }, + .{ "turquoise", .{ 64, 224, 208 } }, + .{ "violet", .{ 238, 130, 238 } }, + .{ "wheat", .{ 245, 222, 179 } }, + .{ "white", .{ 255, 255, 255 } }, + .{ "whitesmoke", .{ 245, 245, 245 } }, + .{ "yellow", .{ 255, 255, 0 } }, + .{ "yellowgreen", .{ 154, 205, 50 } }, + }); + + /// Returns the named color with the given name. + /// + pub fn parseNamedColor(ident: []const u8) ?struct { u8, u8, u8 } { + return named_colors.get(ident); + } + + /// Parse a color hash, without the leading '#' character. + pub fn parseHashColor(value: []const u8) ?struct { u8, u8, u8, f32 } { + return parseHashColorImpl(value) catch return null; + } + + pub fn parseHashColorImpl(value: []const u8) ColorError!struct { u8, u8, u8, f32 } { + return switch (value.len) { + 8 => .{ + (try fromHex(value[0])) * 16 + (try fromHex(value[1])), + (try fromHex(value[2])) * 16 + (try fromHex(value[3])), + (try fromHex(value[4])) * 16 + (try fromHex(value[5])), + @as(f32, @floatFromInt((try fromHex(value[6])) * 16 + (try fromHex(value[7])))) / 255.0, + }, + 6 => { + const r = (try fromHex(value[0])) * 16 + (try fromHex(value[1])); + const g = (try fromHex(value[2])) * 16 + (try fromHex(value[3])); + const b = (try fromHex(value[4])) * 16 + (try fromHex(value[5])); + return .{ + r, g, b, + + OPAQUE, + }; + }, + 4 => .{ + (try fromHex(value[0])) * 17, + (try fromHex(value[1])) * 17, + (try fromHex(value[2])) * 17, + @as(f32, @floatFromInt((try fromHex(value[3])) * 17)) / 255.0, + }, + 3 => .{ + (try fromHex(value[0])) * 17, + (try fromHex(value[1])) * 17, + (try fromHex(value[2])) * 17, + OPAQUE, + }, + else => ColorError.parse, + }; + } + + pub fn fromHex(c: u8) ColorError!u8 { + return switch (c) { + '0'...'9' => c - '0', + 'a'...'f' => c - 'a' + 10, + 'A'...'F' => c - 'A' + 10, + else => ColorError.parse, + }; + } + + /// + /// except with h pre-multiplied by 3, to avoid some rounding errors. + pub fn hslToRgb(hue: f32, saturation: f32, lightness: f32) struct { f32, f32, f32 } { + bun.debugAssert(saturation >= 0.0 and saturation <= 1.0); + const Helpers = struct { + pub fn hueToRgb(m1: f32, m2: f32, _h3: f32) f32 { + var h3 = _h3; + if (h3 < 0.0) { + h3 += 3.0; + } + if (h3 > 3.0) { + h3 -= 3.0; + } + if (h3 * 2.0 < 1.0) { + return m1 + (m2 - m1) * h3 * 2.0; + } else if (h3 * 2.0 < 3.0) { + return m2; + } else if (h3 < 2.0) { + return m1 + (m2 - m1) * (2.0 - h3) * 2.0; + } else { + return m1; + } + } + }; + const m2 = if (lightness <= 0.5) + lightness * (saturation + 1.0) + else + lightness + saturation - lightness * saturation; + const m1 = lightness * 2.0 - m2; + const hue_times_3 = hue * 3.0; + const red = Helpers.hueToRgb(m1, m2, hue_times_3 + 1.0); + const green = Helpers.hueToRgb(m1, m2, hue_times_3); + const blue = Helpers.hueToRgb(m1, m2, hue_times_3 - 1.0); + return .{ red, green, blue }; + } +}; + +// pub const Bitflags + +pub const serializer = struct { + /// Write a CSS name, like a custom property name. + /// + /// You should only use this when you know what you're doing, when in doubt, + /// consider using `serialize_identifier`. + pub fn serializeName(value: []const u8, writer: anytype) !void { + var chunk_start: usize = 0; + for (value, 0..) |b, i| { + const escaped: ?[]const u8 = switch (b) { + '0'...'9', 'A'...'Z', 'a'...'z', '_', '-' => continue, + // the unicode replacement character + 0 => bun.strings.encodeUTF8Comptime(0xFFD), + else => if (!std.ascii.isASCII(b)) continue else null, + }; + + try writer.writeAll(value[chunk_start..i]); + if (escaped) |esc| { + try writer.writeAll(esc); + } else if ((b >= 0x01 and b <= 0x1F) or b == 0x7F) { + try hexEscape(b, writer); + } else { + try charEscape(b, writer); + } + chunk_start = i + 1; + } + return writer.writeAll(value[chunk_start..]); + } + + /// Write a double-quoted CSS string token, escaping content as necessary. + pub fn serializeString(value: []const u8, writer: anytype) !void { + try writer.writeAll("\""); + var string_writer = CssStringWriter(@TypeOf(writer)).new(writer); + try string_writer.writeStr(value); + return writer.writeAll("\""); + } + + pub fn serializeDimension(value: f32, unit: []const u8, comptime W: type, dest: *Printer(W)) PrintErr!void { + const int_value: ?i32 = if (fract(value) == 0.0) @intFromFloat(value) else null; + const token = Token{ .dimension = .{ + .num = .{ + .has_sign = value < 0.0, + .value = value, + .int_value = int_value, + }, + .unit = unit, + } }; + if (value != 0.0 and @abs(value) < 1.0) { + // TODO: calculate the actual number of chars here + var buf: [64]u8 = undefined; + var fbs = std.io.fixedBufferStream(&buf); + token.toCssGeneric(fbs.writer()) catch return dest.addFmtError(); + const s = fbs.getWritten(); + if (value < 0.0) { + try dest.writeStr("-"); + return dest.writeStr(bun.strings.trimLeadingPattern2(s, '-', '0')); + } else { + return dest.writeStr(bun.strings.trimLeadingChar(s, '0')); + } + } else { + return token.toCssGeneric(dest) catch return dest.addFmtError(); + } + } + + /// Write a CSS identifier, escaping characters as necessary. + pub fn serializeIdentifier(value: []const u8, writer: anytype) !void { + if (value.len == 0) { + return; + } + + if (bun.strings.startsWith(value, "--")) { + try writer.writeAll("--"); + return serializeName(value[2..], writer); + } else if (bun.strings.eql(value, "-")) { + return writer.writeAll("\\-"); + } else { + var slice = value; + if (slice[0] == '-') { + try writer.writeAll("-"); + slice = slice[1..]; + } + if (slice.len > 0 and slice[0] >= '0' and slice[0] <= '9') { + try hexEscape(slice[0], writer); + slice = slice[1..]; + } + return serializeName(slice, writer); + } + } + + pub fn serializeUnquotedUrl(value: []const u8, writer: anytype) !void { + var chunk_start: usize = 0; + for (value, 0..) |b, i| { + const hex = switch (b) { + 0...' ', 0x7F => true, + '(', ')', '"', '\'', '\\' => false, + else => continue, + }; + try writer.writeAll(value[chunk_start..i]); + if (hex) { + try hexEscape(b, writer); + } else { + try charEscape(b, writer); + } + chunk_start = i + 1; + } + return writer.writeAll(value[chunk_start..]); + } + + // pub fn writeNumeric(value: f32, int_value: ?i32, has_sign: bool, writer: anytype) !void { + // // `value >= 0` is true for negative 0. + // if (has_sign and !std.math.signbit(value)) { + // try writer.writeAll("+"); + // } + + // if (value == 0.0 and signfns.isSignNegative(value)) { + // // Negative zero. Work around #20596. + // try writer.writeAll("-0"); + // if (int_value == null and @mod(value, 1) == 0) { + // try writer.writeAll(".0"); + // } + // } else { + // var buf: [124]u8 = undefined; + // const bytes = bun.fmt.FormatDouble.dtoa(&buf, @floatCast(value)); + // try writer.writeAll(bytes); + // } + // } + + pub fn writeNumeric(value: f32, int_value: ?i32, has_sign: bool, writer: anytype) !void { + // `value >= 0` is true for negative 0. + if (has_sign and !std.math.signbit(value)) { + try writer.writeAll("+"); + } + + const notation: Notation = if (value == 0.0 and std.math.signbit(value)) notation: { + // Negative zero. Work around #20596. + try writer.writeAll("-0"); + break :notation Notation{ + .decimal_point = false, + .scientific = false, + }; + } else notation: { + var buf: [129]u8 = undefined; + const str, const notation = dtoa_short(&buf, value, 6); + try writer.writeAll(str); + break :notation notation; + }; + + if (int_value == null and fract(value) == 0) { + if (!notation.decimal_point and !notation.scientific) { + try writer.writeAll(".0"); + } + } + + return; + } + + pub fn hexEscape(ascii_byte: u8, writer: anytype) !void { + const HEX_DIGITS = "0123456789abcdef"; + var bytes: [4]u8 = undefined; + const slice: []const u8 = if (ascii_byte > 0x0F) slice: { + const high: usize = @intCast(ascii_byte >> 4); + const low: usize = @intCast(ascii_byte & 0x0F); + bytes[0] = '\\'; + bytes[1] = HEX_DIGITS[high]; + bytes[2] = HEX_DIGITS[low]; + bytes[3] = ' '; + break :slice bytes[0..4]; + } else slice: { + bytes[0] = '\\'; + bytes[1] = HEX_DIGITS[ascii_byte]; + bytes[2] = ' '; + break :slice bytes[0..3]; + }; + return writer.writeAll(slice); + } + + pub fn charEscape(ascii_byte: u8, writer: anytype) !void { + const bytes = [_]u8{ '\\', ascii_byte }; + return writer.writeAll(&bytes); + } + + pub fn CssStringWriter(comptime W: type) type { + return struct { + inner: W, + + /// Wrap a text writer to create a `CssStringWriter`. + pub fn new(inner: W) @This() { + return .{ .inner = inner }; + } + + pub fn writeStr(this: *@This(), str: []const u8) !void { + var chunk_start: usize = 0; + for (str, 0..) |b, i| { + const escaped = switch (b) { + '"' => "\\\"", + '\\' => "\\\\", + // replacement character + 0 => bun.strings.encodeUTF8Comptime(0xFFD), + 0x01...0x1F, 0x7F => null, + else => continue, + }; + try this.inner.writeAll(str[chunk_start..i]); + if (escaped) |e| { + try this.inner.writeAll(e); + } else { + try serializer.hexEscape(b, this.inner); + } + chunk_start = i + 1; + } + return this.inner.writeAll(str[chunk_start..]); + } + }; + } +}; + +pub inline fn implementDeepClone(comptime T: type, this: *const T, allocator: Allocator) T { + const tyinfo = @typeInfo(T); + + if (comptime bun.meta.isSimpleCopyType(T)) { + return this.*; + } + + if (comptime bun.meta.looksLikeListContainerType(T)) |result| { + return switch (result) { + .array_list => deepClone(result.child, allocator, this), + .baby_list => @panic("Not implemented."), + .small_list => this.deepClone(allocator), + }; + } + + if (comptime T == []const u8) { + return this.*; + } + + if (comptime @typeInfo(T) == .Pointer) { + const TT = std.meta.Child(T); + return implementEql(TT, this.*); + } + + return switch (tyinfo) { + .Struct => { + var strct: T = undefined; + inline for (tyinfo.Struct.fields) |field| { + if (comptime generic.canTransitivelyImplementDeepClone(field.type) and @hasDecl(field.type, "__generateDeepClone")) { + @field(strct, field.name) = implementDeepClone(field.type, &field(this, field.name, allocator)); + } else { + @field(strct, field.name) = generic.deepClone(field.type, &@field(this, field.name), allocator); + } + } + return strct; + }, + .Union => { + inline for (bun.meta.EnumFields(T), tyinfo.Union.fields) |enum_field, union_field| { + if (@intFromEnum(this.*) == enum_field.value) { + if (comptime generic.canTransitivelyImplementDeepClone(union_field.type) and @hasDecl(union_field.type, "__generateDeepClone")) { + return @unionInit(T, enum_field.name, implementDeepClone(union_field.type, &@field(this, enum_field.name), allocator)); + } + return @unionInit(T, enum_field.name, generic.deepClone(union_field.type, &@field(this, enum_field.name), allocator)); + } + } + unreachable; + }, + else => @compileError("Unhandled type " ++ @typeName(T)), + }; +} + +/// A function to implement `lhs.eql(&rhs)` for the many types in the CSS parser that needs this. +/// +/// This is the equivalent of doing `#[derive(PartialEq])` in Rust. +/// +/// This function only works on simple types like: +/// - Simple equality types (e.g. integers, floats, strings, enums, etc.) +/// - Types which implement a `.eql(lhs: *const @This(), rhs: *const @This()) bool` function +/// +/// Or compound types composed of simple types such as: +/// - Pointers to simple types +/// - Optional simple types +/// - Structs, Arrays, and Unions +pub fn implementEql(comptime T: type, this: *const T, other: *const T) bool { + const tyinfo = @typeInfo(T); + if (comptime bun.meta.isSimpleEqlType(T)) { + return this.* == other.*; + } + if (comptime T == []const u8) { + return bun.strings.eql(this.*, other.*); + } + if (comptime @typeInfo(T) == .Pointer) { + const TT = std.meta.Child(T); + return implementEql(TT, this.*, other.*); + } + if (comptime @typeInfo(T) == .Optional) { + const TT = std.meta.Child(T); + if (this.* != null and other.* != null) return implementEql(TT, &this.*.?, &other.*.?); + return false; + } + return switch (tyinfo) { + .Optional => @compileError("Handled above, this means Zack wrote a bug."), + .Pointer => @compileError("Handled above, this means Zack wrote a bug."), + .Array => { + const Child = std.meta.Child(T); + if (comptime bun.meta.isSimpleEqlType(Child)) { + return std.mem.eql(Child, &this.*, &other.*); + } + if (this.len != other.len) return false; + if (comptime generic.canTransitivelyImplementEql(Child) and @hasDecl(Child, "__generateEql")) { + for (this.*, other.*) |*a, *b| { + if (!implementEql(Child, &a, &b)) return false; + } + } else { + for (this.*, other.*) |*a, *b| { + if (!generic.eql(Child, a, b)) return false; + } + } + return true; + }, + .Struct => { + inline for (tyinfo.Struct.fields) |field| { + if (!generic.eql(field.type, &@field(this, field.name), &@field(other, field.name))) return false; + } + return true; + }, + .Union => { + if (tyinfo.Union.tag_type == null) @compileError("Unions must have a tag type"); + if (@intFromEnum(this.*) != @intFromEnum(other.*)) return false; + const enum_fields = bun.meta.EnumFields(T); + inline for (enum_fields, std.meta.fields(T)) |enum_field, union_field| { + if (enum_field.value == @intFromEnum(this.*)) { + if (union_field.type != void) { + if (comptime generic.canTransitivelyImplementEql(union_field.type) and @hasDecl(union_field.type, "__generateEql")) { + return implementEql(union_field.type, &@field(this, enum_field.name), &@field(other, enum_field.name)); + } + return generic.eql(union_field.type, &@field(this, enum_field.name), &@field(other, enum_field.name)); + } else { + return true; + } + } + } + unreachable; + }, + else => @compileError("Unsupported type: " ++ @typeName(T)), + }; +} + +pub fn implementHash(comptime T: type, this: *const T, hasher: *std.hash.Wyhash) void { + const tyinfo = @typeInfo(T); + if (comptime T == void) return; + if (comptime bun.meta.isSimpleEqlType(T)) { + return hasher.update(std.mem.asBytes(&this)); + } + if (comptime T == []const u8) { + return hasher.update(this.*); + } + if (comptime @typeInfo(T) == .Pointer) { + @compileError("Invalid type for implementHash(): " ++ @typeName(T)); + } + if (comptime @typeInfo(T) == .Optional) { + @compileError("Invalid type for implementHash(): " ++ @typeName(T)); + } + return switch (tyinfo) { + .Optional => unreachable, + .Pointer => unreachable, + .Array => { + if (comptime @typeInfo(T) == .Optional) { + @compileError("Invalid type for implementHash(): " ++ @typeName(T)); + } + }, + .Struct => { + inline for (tyinfo.Struct.fields) |field| { + if (comptime generic.hasHash(field.type)) { + generic.hash(field.type, &@field(this, field.name), hasher); + } else if (@hasDecl(field.type, "__generateHash") and @typeInfo(field.type) == .Struct) { + implementHash(field.type, &@field(this, field.name), hasher); + } else { + @compileError("Can't hash these fields: " ++ @typeName(field.type) ++ ". On " ++ @typeName(T)); + } + } + return; + }, + .Union => { + if (tyinfo.Union.tag_type == null) @compileError("Unions must have a tag type"); + const enum_fields = bun.meta.EnumFields(T); + inline for (enum_fields, std.meta.fields(T)) |enum_field, union_field| { + if (enum_field.value == @intFromEnum(this.*)) { + const field = union_field; + if (comptime generic.hasHash(field.type)) { + generic.hash(field.type, &@field(this, field.name), hasher); + } else if (@hasDecl(field.type, "__generateHash") and @typeInfo(field.type) == .Struct) { + implementHash(field.type, &@field(this, field.name), hasher); + } else { + @compileError("Can't hash these fields: " ++ @typeName(field.type) ++ ". On " ++ @typeName(T)); + } + } + } + return; + }, + else => @compileError("Unsupported type: " ++ @typeName(T)), + }; +} + +pub const parse_utility = struct { + /// Parse a value from a string. + /// + /// (This is a convenience wrapper for `parse` and probably should not be overridden.) + /// + /// NOTE: `input` should live as long as the returned value. Otherwise, strings in the + /// returned parsed value will point to undefined memory. + pub fn parseString( + allocator: Allocator, + comptime T: type, + input: []const u8, + comptime parse_one: *const fn (*Parser) Result(T), + ) Result(T) { + // I hope this is okay + var import_records = bun.BabyList(bun.ImportRecord){}; + defer import_records.deinitWithAllocator(allocator); + var i = ParserInput.new(allocator, input); + var parser = Parser.new(&i, &import_records); + const result = switch (parse_one(&parser)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + if (parser.expectExhausted().asErr()) |e| return .{ .err = e }; + return .{ .result = result }; + } +}; + +pub const to_css = struct { + /// Serialize `self` in CSS syntax and return a string. + /// + /// (This is a convenience wrapper for `to_css` and probably should not be overridden.) + pub fn string( + allocator: Allocator, + comptime T: type, + this: *const T, + options: PrinterOptions, + import_records: ?*const bun.BabyList(ImportRecord), + ) PrintErr![]const u8 { + var s = ArrayList(u8){}; + errdefer s.deinit(allocator); + const writer = s.writer(allocator); + const W = @TypeOf(writer); + // PERF: think about how cheap this is to create + var printer = Printer(W).new(allocator, std.ArrayList(u8).init(allocator), writer, options, import_records); + defer printer.deinit(); + switch (T) { + CSSString => try CSSStringFns.toCss(this, W, &printer), + else => try this.toCss(W, &printer), + } + return s.items; + } + + pub fn fromList(comptime T: type, this: *const ArrayList(T), comptime W: type, dest: *Printer(W)) PrintErr!void { + const len = this.items.len; + for (this.items, 0..) |*val, idx| { + try val.toCss(W, dest); + if (idx < len - 1) { + try dest.delim(',', false); + } + } + return; + } + + pub fn fromBabyList(comptime T: type, this: *const bun.BabyList(T), comptime W: type, dest: *Printer(W)) PrintErr!void { + const len = this.len; + for (this.sliceConst(), 0..) |*val, idx| { + try val.toCss(W, dest); + if (idx < len - 1) { + try dest.delim(',', false); + } + } + return; + } + + pub fn integer(comptime T: type, this: T, comptime W: type, dest: *Printer(W)) PrintErr!void { + const MAX_LEN = comptime maxDigits(T); + var buf: [MAX_LEN]u8 = undefined; + const str = std.fmt.bufPrint(buf[0..], "{d}", .{this}) catch unreachable; + return dest.writeStr(str); + } + + pub fn float32(this: f32, writer: anytype) !void { + var scratch: [64]u8 = undefined; + // PERF/TODO: Compare this to Rust dtoa-short crate + const floats = std.fmt.formatFloat(scratch[0..], this, .{ + .mode = .decimal, + }) catch unreachable; + return writer.writeAll(floats); + } + + fn maxDigits(comptime T: type) usize { + const max_val = std.math.maxInt(T); + return std.fmt.count("{d}", .{max_val}); + } +}; + +/// Parse `!important`. +/// +/// Typical usage is `input.try_parse(parse_important).is_ok()` +/// at the end of a `DeclarationParser::parse_value` implementation. +pub fn parseImportant(input: *Parser) Result(void) { + if (input.expectDelim('!').asErr()) |e| return .{ .err = e }; + return switch (input.expectIdentMatching("important")) { + .result => |v| .{ .result = v }, + .err => |e| .{ .err = e }, + }; +} + +pub const signfns = struct { + pub inline fn isSignPositive(x: f32) bool { + return !isSignNegative(x); + } + pub inline fn isSignNegative(x: f32) bool { + // IEEE754 says: isSignMinus(x) is true if and only if x has negative sign. isSignMinus + // applies to zeros and NaNs as well. + // SAFETY: This is just transmuting to get the sign bit, it's fine. + return @as(u32, @bitCast(x)) & 0x8000_0000 != 0; + } + /// Returns a number that represents the sign of `self`. + /// + /// - `1.0` if the number is positive, `+0.0` or `INFINITY` + /// - `-1.0` if the number is negative, `-0.0` or `NEG_INFINITY` + /// - NaN if the number is NaN + pub fn signum(x: f32) f32 { + if (std.math.isNan(x)) return std.math.nan(f32); + return copysign(1, x); + } + + pub inline fn signF32(x: f32) f32 { + if (x == 0.0) return if (isSignNegative(x)) 0.0 else -0.0; + return signum(x); + } +}; + +/// TODO(zack) is this correct +/// Copies the sign of `sign` to `self`, returning a new f32 value +pub inline fn copysign(self: f32, sign: f32) f32 { + // Convert both floats to their bit representations + const self_bits = @as(u32, @bitCast(self)); + const sign_bits = @as(u32, @bitCast(sign)); + + // Clear the sign bit of self and combine with the sign bit of sign + const result_bits = (self_bits & 0x7FFFFFFF) | (sign_bits & 0x80000000); + + // Convert the result back to f32 + return @as(f32, @bitCast(result_bits)); +} + +pub fn deepClone(comptime V: type, allocator: Allocator, list: *const ArrayList(V)) ArrayList(V) { + var newlist = ArrayList(V).initCapacity(allocator, list.items.len) catch bun.outOfMemory(); + + for (list.items) |*item| { + newlist.appendAssumeCapacity(generic.deepClone(V, item, allocator)); + } + + return newlist; +} + +pub fn deepDeinit(comptime V: type, allocator: Allocator, list: *ArrayList(V)) void { + if (comptime !@hasDecl(V, "deinit")) return; + for (list.items) |*item| { + item.deinit(allocator); + } + + list.deinit(allocator); +} + +const Notation = struct { + decimal_point: bool, + scientific: bool, + + pub fn integer() Notation { + return .{ + .decimal_point = false, + .scientific = false, + }; + } +}; + +pub fn dtoa_short(buf: *[129]u8, value: f32, comptime precision: u8) struct { []u8, Notation } { + buf[0] = '0'; + const buf_len = bun.fmt.FormatDouble.dtoa(@ptrCast(buf[1..].ptr), @floatCast(value)).len; + return restrict_prec(buf[0 .. buf_len + 1], precision); +} + +fn restrict_prec(buf: []u8, comptime prec: u8) struct { []u8, Notation } { + const len: u8 = @intCast(buf.len); + + // Put a leading zero to capture any carry. + // Caller must prepare an empty byte for us; + bun.debugAssert(buf[0] == '0'); + buf[0] = '0'; + // Remove the sign for now. We will put it back at the end. + const sign = switch (buf[1]) { + '+', '-' => brk: { + const s = buf[1]; + buf[1] = '0'; + break :brk s; + }, + else => null, + }; + + // Locate dot, exponent, and the first significant digit. + var _pos_dot: ?u8 = null; + var pos_exp: ?u8 = null; + var _prec_start: ?u8 = null; + for (1..len) |i| { + if (buf[i] == '.') { + bun.debugAssert(_pos_dot == null); + _pos_dot = @intCast(i); + } else if (buf[i] == 'e') { + pos_exp = @intCast(i); + // We don't change exponent part, so stop here. + break; + } else if (_prec_start == null and buf[i] != '0') { + bun.debugAssert(buf[i] >= '1' and buf[i] <= '9'); + _prec_start = @intCast(i); + } + } + + const prec_start = if (_prec_start) |i| + i + else + // If there is no non-zero digit at all, it is just zero. + return .{ + buf[0..1], + Notation.integer(), + }; + + // Coefficient part ends at 'e' or the length. + const coeff_end = pos_exp orelse len; + // Decimal dot is effectively at the end of coefficient part if no + // dot presents before that. + const had_pos_dot = _pos_dot != null; + const pos_dot = _pos_dot orelse coeff_end; + // Find the end position of the number within the given precision. + const prec_end: u8 = brk: { + const end = prec_start + prec; + break :brk if (pos_dot > prec_start and pos_dot <= end) end + 1 else end; + }; + var new_coeff_end = coeff_end; + if (prec_end < coeff_end) { + // Round to the given precision. + const next_char = buf[prec_end]; + new_coeff_end = prec_end; + if (next_char >= '5') { + var i = prec_end; + while (i != 0) { + i -= 1; + if (buf[i] == '.') { + continue; + } + if (buf[i] != '9') { + buf[i] += 1; + new_coeff_end = i + 1; + break; + } + buf[i] = '0'; + } + } + } + if (new_coeff_end < pos_dot) { + // If the precision isn't enough to reach the dot, set all digits + // in-between to zero and keep the number until the dot. + for (new_coeff_end..pos_dot) |i| { + buf[i] = '0'; + } + new_coeff_end = pos_dot; + } else if (had_pos_dot) { + // Strip any trailing zeros. + var i = new_coeff_end; + while (i != 0) { + i -= 1; + if (buf[i] != '0') { + if (buf[i] == '.') { + new_coeff_end = i; + } + break; + } + new_coeff_end = i; + } + } + // Move exponent part if necessary. + const real_end = if (pos_exp) |posexp| brk: { + const exp_len = len - posexp; + if (new_coeff_end != posexp) { + for (0..exp_len) |i| { + buf[new_coeff_end + i] = buf[posexp + i]; + } + } + break :brk new_coeff_end + exp_len; + } else new_coeff_end; + // Add back the sign and strip the leading zero. + const result = if (sign) |sgn| brk: { + if (buf[1] == '0' and buf[2] != '.') { + buf[1] = sgn; + break :brk buf[1..real_end]; + } + bun.debugAssert(buf[0] == '0'); + buf[0] = sgn; + break :brk buf[0..real_end]; + } else brk: { + if (buf[0] == '0' and buf[1] != '.') { + break :brk buf[1..real_end]; + } + break :brk buf[0..real_end]; + }; + // Generate the notation info. + const notation = Notation{ + .decimal_point = pos_dot < new_coeff_end, + .scientific = pos_exp != null, + }; + return .{ result, notation }; +} + +pub inline fn fract(val: f32) f32 { + return val - @trunc(val); +} diff --git a/src/css/declaration.zig b/src/css/declaration.zig new file mode 100644 index 0000000000..f84c4f9c11 --- /dev/null +++ b/src/css/declaration.zig @@ -0,0 +1,379 @@ +const std = @import("std"); +const Allocator = std.mem.Allocator; +const bun = @import("root").bun; +const logger = bun.logger; +const Log = logger.Log; + +pub const css = @import("./css_parser.zig"); +pub const Error = css.Error; +const Printer = css.Printer; +const PrintErr = css.PrintErr; +const PrintResult = css.PrintResult; +const Result = css.Result; + +const ArrayList = std.ArrayListUnmanaged; +pub const DeclarationList = ArrayList(css.Property); + +const BackgroundHandler = css.css_properties.background.BackgroundHandler; +const FallbackHandler = css.css_properties.prefix_handler.FallbackHandler; +const MarginHandler = css.css_properties.margin_padding.MarginHandler; +const PaddingHandler = css.css_properties.margin_padding.PaddingHandler; +const ScrollMarginHandler = css.css_properties.margin_padding.ScrollMarginHandler; +const InsetHandler = css.css_properties.margin_padding.InsetHandler; +const SizeHandler = css.css_properties.size.SizeHandler; + +/// A CSS declaration block. +/// +/// Properties are separated into a list of `!important` declararations, +/// and a list of normal declarations. This reduces memory usage compared +/// with storing a boolean along with each property. +/// +/// TODO: multiarraylist will probably be faster here, as it makes one allocation +/// instead of two. +pub const DeclarationBlock = struct { + /// A list of `!important` declarations in the block. + important_declarations: ArrayList(css.Property) = .{}, + /// A list of normal declarations in the block. + declarations: ArrayList(css.Property) = .{}, + + const This = @This(); + + const DebugFmt = struct { + self: *const DeclarationBlock, + + pub fn format(this: @This(), comptime fmt: []const u8, options: std.fmt.FormatOptions, writer: anytype) !void { + _ = fmt; // autofix + _ = options; // autofix + var arraylist = ArrayList(u8){}; + const w = arraylist.writer(bun.default_allocator); + defer arraylist.deinit(bun.default_allocator); + var printer = css.Printer(@TypeOf(w)).new(bun.default_allocator, std.ArrayList(u8).init(bun.default_allocator), w, .{}, null); + defer printer.deinit(); + this.self.toCss(@TypeOf(w), &printer) catch |e| return try writer.print("\n", .{@errorName(e)}); + try writer.writeAll(arraylist.items); + } + }; + + pub fn debug(this: *const @This()) DebugFmt { + return DebugFmt{ .self = this }; + } + + pub fn isEmpty(this: *const This) bool { + return this.declarations.items.len == 0 and this.important_declarations.items.len == 0; + } + + pub fn parse(input: *css.Parser, options: *const css.ParserOptions) Result(DeclarationBlock) { + var important_declarations = DeclarationList{}; + var declarations = DeclarationList{}; + var decl_parser = PropertyDeclarationParser{ + .important_declarations = &important_declarations, + .declarations = &declarations, + .options = options, + }; + errdefer decl_parser.deinit(); + + var parser = css.RuleBodyParser(PropertyDeclarationParser).new(input, &decl_parser); + + while (parser.next()) |res| { + if (res.asErr()) |e| { + if (options.error_recovery) { + options.warn(e); + continue; + } + return .{ .err = e }; + } + } + + return .{ .result = DeclarationBlock{ + .important_declarations = important_declarations, + .declarations = declarations, + } }; + } + + pub fn len(this: *const DeclarationBlock) usize { + return this.declarations.items.len + this.important_declarations.items.len; + } + + pub fn toCss(this: *const This, comptime W: type, dest: *Printer(W)) PrintErr!void { + const length = this.len(); + var i: usize = 0; + + const DECLS: []const []const u8 = &[_][]const u8{ "declarations", "important_declarations" }; + + inline for (DECLS) |decl_field_name| { + const decls = &@field(this, decl_field_name); + const is_important = comptime std.mem.eql(u8, decl_field_name, "important_declarations"); + + for (decls.items) |*decl| { + try decl.toCss(W, dest, is_important); + if (i != length - 1) { + try dest.writeChar(';'); + try dest.whitespace(); + } + i += 1; + } + } + + return; + } + + /// Writes the declarations to a CSS block, including starting and ending braces. + pub fn toCssBlock(this: *const This, comptime W: type, dest: *Printer(W)) PrintErr!void { + try dest.whitespace(); + try dest.writeChar('{'); + dest.indent(); + + var i: usize = 0; + const length = this.len(); + + const DECLS: []const []const u8 = &[_][]const u8{ "declarations", "important_declarations" }; + + inline for (DECLS) |decl_field_name| { + const decls = &@field(this, decl_field_name); + const is_important = comptime std.mem.eql(u8, decl_field_name, "important_declarations"); + for (decls.items) |*decl| { + try dest.newline(); + try decl.toCss(W, dest, is_important); + if (i != length - 1 or !dest.minify) { + try dest.writeChar(';'); + } + i += 1; + } + } + + dest.dedent(); + try dest.newline(); + return dest.writeChar('}'); + } + + pub fn minify( + this: *This, + handler: *DeclarationHandler, + important_handler: *DeclarationHandler, + context: *css.PropertyHandlerContext, + ) void { + const handle = struct { + inline fn handle( + self: *This, + ctx: *css.PropertyHandlerContext, + hndlr: *DeclarationHandler, + comptime decl_field: []const u8, + comptime important: bool, + ) void { + for (@field(self, decl_field).items) |*prop| { + ctx.is_important = important; + + const handled = hndlr.handleProperty(prop, ctx); + + if (!handled) { + hndlr.decls.append(ctx.allocator, prop.*) catch bun.outOfMemory(); + // replacing with a property which does not require allocation + // to "delete" + prop.* = css.Property{ .all = .@"revert-layer" }; + } + } + } + }.handle; + + handle(this, context, important_handler, "important_declarations", true); + handle(this, context, handler, "declarations", false); + + handler.finalize(context); + important_handler.finalize(context); + var old_import = this.important_declarations; + var old_declarations = this.declarations; + this.important_declarations = .{}; + this.declarations = .{}; + defer { + old_import.deinit(context.allocator); + old_declarations.deinit(context.allocator); + } + this.important_declarations = important_handler.decls; + this.declarations = handler.decls; + important_handler.decls = .{}; + handler.decls = .{}; + } + + pub fn hashPropertyIds(this: *const @This(), hasher: *std.hash.Wyhash) void { + for (this.declarations.items) |*decl| { + decl.propertyId().hash(hasher); + } + + for (this.important_declarations.items) |*decl| { + decl.propertyId().hash(hasher); + } + } + + pub fn eql(this: *const This, other: *const This) bool { + return css.implementEql(@This(), this, other); + } + + pub fn deepClone(this: *const This, allocator: std.mem.Allocator) This { + return css.implementDeepClone(@This(), this, allocator); + } +}; + +pub const PropertyDeclarationParser = struct { + important_declarations: *ArrayList(css.Property), + declarations: *ArrayList(css.Property), + options: *const css.ParserOptions, + + const This = @This(); + + pub const AtRuleParser = struct { + pub const Prelude = void; + pub const AtRule = void; + + pub fn parsePrelude(_: *This, name: []const u8, input: *css.Parser) Result(Prelude) { + return .{ + .err = input.newError(css.BasicParseErrorKind{ .at_rule_invalid = name }), + }; + } + + pub fn parseBlock(_: *This, _: Prelude, _: *const css.ParserState, input: *css.Parser) Result(AtRule) { + return .{ .err = input.newError(css.BasicParseErrorKind.at_rule_body_invalid) }; + } + + pub fn ruleWithoutBlock(_: *This, _: Prelude, _: *const css.ParserState) css.Maybe(AtRule, void) { + return .{ .err = {} }; + } + }; + + pub const QualifiedRuleParser = struct { + pub const Prelude = void; + pub const QualifiedRule = void; + + pub fn parsePrelude(this: *This, input: *css.Parser) Result(Prelude) { + _ = this; // autofix + return .{ .err = input.newError(css.BasicParseErrorKind.qualified_rule_invalid) }; + } + + pub fn parseBlock(this: *This, prelude: Prelude, start: *const css.ParserState, input: *css.Parser) Result(QualifiedRule) { + _ = this; // autofix + _ = prelude; // autofix + _ = start; // autofix + return .{ .err = input.newError(css.BasicParseErrorKind.qualified_rule_invalid) }; + } + }; + + pub const DeclarationParser = struct { + pub const Declaration = void; + + pub fn parseValue(this: *This, name: []const u8, input: *css.Parser) Result(Declaration) { + return parse_declaration( + name, + input, + this.declarations, + this.important_declarations, + this.options, + ); + } + }; + + pub const RuleBodyItemParser = struct { + pub fn parseQualified(this: *This) bool { + _ = this; // autofix + return false; + } + + pub fn parseDeclarations(this: *This) bool { + _ = this; // autofix + return true; + } + }; +}; + +pub fn parse_declaration( + name: []const u8, + input: *css.Parser, + declarations: *DeclarationList, + important_declarations: *DeclarationList, + options: *const css.ParserOptions, +) Result(void) { + const property_id = css.PropertyId.fromStr(name); + var delimiters = css.Delimiters{ .bang = true }; + if (property_id != .custom or property_id.custom != .custom) { + delimiters.curly_bracket = true; + } + const Closure = struct { + property_id: css.PropertyId, + options: *const css.ParserOptions, + }; + var closure = Closure{ + .property_id = property_id, + .options = options, + }; + const property = switch (input.parseUntilBefore(delimiters, css.Property, &closure, struct { + pub fn parseFn(this: *Closure, input2: *css.Parser) Result(css.Property) { + return css.Property.parse(this.property_id, input2, this.options); + } + }.parseFn)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + const important = input.tryParse(struct { + pub fn parsefn(i: *css.Parser) Result(void) { + if (i.expectDelim('!').asErr()) |e| return .{ .err = e }; + return i.expectIdentMatching("important"); + } + }.parsefn, .{}).isOk(); + if (input.expectExhausted().asErr()) |e| return .{ .err = e }; + if (important) { + important_declarations.append(input.allocator(), property) catch bun.outOfMemory(); + } else { + declarations.append(input.allocator(), property) catch bun.outOfMemory(); + } + + return .{ .result = {} }; +} + +pub const DeclarationHandler = struct { + background: BackgroundHandler = .{}, + size: SizeHandler = .{}, + margin: MarginHandler = .{}, + padding: PaddingHandler = .{}, + scroll_margin: ScrollMarginHandler = .{}, + inset: InsetHandler = .{}, + fallback: FallbackHandler = .{}, + direction: ?css.css_properties.text.Direction, + decls: DeclarationList, + + pub fn finalize(this: *DeclarationHandler, context: *css.PropertyHandlerContext) void { + const allocator = context.allocator; + _ = allocator; // autofix + if (this.direction) |direction| { + this.direction = null; + this.decls.append(context.allocator, css.Property{ .direction = direction }) catch bun.outOfMemory(); + } + // if (this.unicode_bidi) |unicode_bidi| { + // this.unicode_bidi = null; + // this.decls.append(context.allocator, css.Property{ .unicode_bidi = unicode_bidi }) catch bun.outOfMemory(); + // } + + this.background.finalize(&this.decls, context); + this.size.finalize(&this.decls, context); + this.margin.finalize(&this.decls, context); + this.padding.finalize(&this.decls, context); + this.scroll_margin.finalize(&this.decls, context); + this.inset.finalize(&this.decls, context); + this.fallback.finalize(&this.decls, context); + } + + pub fn handleProperty(this: *DeclarationHandler, property: *const css.Property, context: *css.PropertyHandlerContext) bool { + // return this.background.handleProperty(property, &this.decls, context); + return this.background.handleProperty(property, &this.decls, context) or + this.size.handleProperty(property, &this.decls, context) or + this.margin.handleProperty(property, &this.decls, context) or + this.padding.handleProperty(property, &this.decls, context) or + this.scroll_margin.handleProperty(property, &this.decls, context) or + this.inset.handleProperty(property, &this.decls, context) or + this.fallback.handleProperty(property, &this.decls, context); + } + + pub fn default() DeclarationHandler { + return .{ + .decls = .{}, + .direction = null, + }; + } +}; diff --git a/src/css/dependencies.zig b/src/css/dependencies.zig new file mode 100644 index 0000000000..7d922244dd --- /dev/null +++ b/src/css/dependencies.zig @@ -0,0 +1,155 @@ +const std = @import("std"); +const Allocator = std.mem.Allocator; +const bun = @import("root").bun; +const logger = bun.logger; +const Log = logger.Log; + +pub const css = @import("./css_parser.zig"); +pub const css_values = @import("./values/values.zig"); +const DashedIdent = css_values.ident.DashedIdent; +const Url = css_values.url.Url; +const Ident = css_values.ident.Ident; +pub const Error = css.Error; +// const Location = css.Location; + +const ArrayList = std.ArrayListUnmanaged; + +/// Options for `analyze_dependencies` in `PrinterOptions`. +pub const DependencyOptions = struct { + /// Whether to remove `@import` rules. + remove_imports: bool, +}; + +/// A dependency. +pub const Dependency = union(enum) { + /// An `@import` dependency. + import: ImportDependency, + /// A `url()` dependency. + url: UrlDependency, +}; + +/// A line and column position within a source file. +pub const Location = struct { + /// The line number, starting from 1. + line: u32, + /// The column number, starting from 1. + column: u32, + + pub fn fromSourceLocation(loc: css.SourceLocation) Location { + return Location{ + .line = loc.line + 1, + .column = loc.column, + }; + } + + pub fn hash(this: *const @This(), hasher: *std.hash.Wyhash) void { + return css.implementHash(@This(), this, hasher); + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } +}; + +/// An `@import` dependency. +pub const ImportDependency = struct { + /// The url to import. + url: []const u8, + /// The placeholder that the URL was replaced with. + placeholder: []const u8, + /// An optional `supports()` condition. + supports: ?[]const u8, + /// A media query. + media: ?[]const u8, + /// The location of the dependency in the source file. + loc: SourceRange, + + pub fn new(allocator: Allocator, rule: *const css.css_rules.import.ImportRule, filename: []const u8) ImportDependency { + const supports = if (rule.supports) |*supports| brk: { + const s = css.to_css.string( + allocator, + css.css_rules.supports.SupportsCondition, + supports, + css.PrinterOptions{}, + null, + ) catch bun.Output.panic( + "Unreachable code: failed to stringify SupportsCondition.\n\nThis is a bug in Bun's CSS printer. Please file a bug report at https://github.com/oven-sh/bun/issues/new/choose", + .{}, + ); + break :brk s; + } else null; + + const media = if (rule.media.media_queries.items.len > 0) media: { + const s = css.to_css.string(allocator, css.MediaList, &rule.media, css.PrinterOptions{}, null) catch bun.Output.panic( + "Unreachable code: failed to stringify MediaList.\n\nThis is a bug in Bun's CSS printer. Please file a bug report at https://github.com/oven-sh/bun/issues/new/choose", + .{}, + ); + break :media s; + } else null; + + const placeholder = css.css_modules.hash(allocator, "{s}_{s}", .{ filename, rule.url }, false); + + return ImportDependency{ + // TODO(zack): should we clone this? lightningcss does that + .url = rule.url, + .placeholder = placeholder, + .supports = supports, + .media = media, + .loc = SourceRange.new( + filename, + css.dependencies.Location{ .line = rule.loc.line + 1, .column = rule.loc.column }, + 8, + rule.url.len + 2, + ), // TODO: what about @import url(...)? + }; + } +}; + +/// A `url()` dependency. +pub const UrlDependency = struct { + /// The url of the dependency. + url: []const u8, + /// The placeholder that the URL was replaced with. + placeholder: []const u8, + /// The location of the dependency in the source file. + loc: SourceRange, + + pub fn new(allocator: Allocator, url: *const Url, filename: []const u8, import_records: *const bun.BabyList(bun.ImportRecord)) UrlDependency { + const theurl = import_records.at(url.import_record_idx).path.pretty; + const placeholder = css.css_modules.hash( + allocator, + "{s}_{s}", + .{ filename, theurl }, + false, + ); + return UrlDependency{ + .url = theurl, + .placeholder = placeholder, + .loc = SourceRange.new(filename, url.loc, 4, theurl.len), + }; + } +}; + +/// Represents the range of source code where a dependency was found. +pub const SourceRange = struct { + /// The filename in which the dependency was found. + file_path: []const u8, + /// The starting line and column position of the dependency. + start: Location, + /// The ending line and column position of the dependency. + end: Location, + + pub fn new(filename: []const u8, loc: Location, offset: u32, len: usize) SourceRange { + return SourceRange{ + .file_path = filename, + .start = Location{ + .line = loc.line, + .column = loc.column + offset, + }, + .end = Location{ + .line = loc.line, + .column = loc.column + offset + @as(u32, @intCast(len)) - 1, + }, + }; + } +}; diff --git a/src/css/error.zig b/src/css/error.zig new file mode 100644 index 0000000000..3132aa21a2 --- /dev/null +++ b/src/css/error.zig @@ -0,0 +1,355 @@ +const std = @import("std"); +const Allocator = std.mem.Allocator; +const bun = @import("root").bun; +const logger = bun.logger; +const Log = logger.Log; + +pub const css = @import("./css_parser.zig"); +pub const css_values = @import("./values/values.zig"); +const DashedIdent = css_values.ident.DashedIdent; +const Ident = css_values.ident.Ident; +pub const Error = css.Error; +const Location = css.Location; + +const ArrayList = std.ArrayListUnmanaged; + +/// A printer error. +pub const PrinterError = Err(PrinterErrorKind); + +pub fn fmtPrinterError() PrinterError { + return .{ + .kind = .fmt_error, + .loc = null, + }; +} + +/// An error with a source location. +pub fn Err(comptime T: type) type { + return struct { + /// The type of error that occurred. + kind: T, + /// The location where the error occurred. + loc: ?ErrorLocation, + + pub fn fromParseError(err: ParseError(ParserError), filename: []const u8) Err(ParserError) { + if (T != ParserError) { + @compileError("Called .fromParseError() when T is not ParserError"); + } + + const kind = switch (err.kind) { + .basic => |b| switch (b) { + .unexpected_token => |t| ParserError{ .unexpected_token = t }, + .end_of_input => ParserError.end_of_input, + .at_rule_invalid => |a| ParserError{ .at_rule_invalid = a }, + .at_rule_body_invalid => ParserError.at_rule_body_invalid, + .qualified_rule_invalid => ParserError.qualified_rule_invalid, + }, + .custom => |c| c, + }; + + return .{ + .kind = kind, + .loc = ErrorLocation{ + .filename = filename, + .line = err.location.line, + .column = err.location.column, + }, + }; + } + }; +} + +/// Extensible parse errors that can be encountered by client parsing implementations. +pub fn ParseError(comptime T: type) type { + return struct { + /// Details of this error + kind: ParserErrorKind(T), + /// Location where this error occurred + location: css.SourceLocation, + + pub fn basic(this: @This()) BasicParseError { + return switch (this.kind) { + .basic => |kind| BasicParseError{ + .kind = kind, + .location = this.location, + }, + .custom => @panic("Not a basic parse error. This is a bug in Bun's css parser."), + }; + } + }; +} + +pub fn ParserErrorKind(comptime T: type) type { + return union(enum) { + /// A fundamental parse error from a built-in parsing routine. + basic: BasicParseErrorKind, + /// A parse error reported by downstream consumer code. + custom: T, + + pub fn format(this: @This(), comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { + return switch (this) { + .basic => |basic| writer.print("basic: {}", .{basic}), + .custom => |custom| writer.print("custom: {}", .{custom}), + }; + } + }; +} + +/// Details about a `BasicParseError` +pub const BasicParseErrorKind = union(enum) { + /// An unexpected token was encountered. + unexpected_token: css.Token, + /// The end of the input was encountered unexpectedly. + end_of_input, + /// An `@` rule was encountered that was invalid. + at_rule_invalid: []const u8, + /// The body of an '@' rule was invalid. + at_rule_body_invalid, + /// A qualified rule was encountered that was invalid. + qualified_rule_invalid, + + pub fn format(this: *const BasicParseErrorKind, comptime fmt: []const u8, opts: std.fmt.FormatOptions, writer: anytype) !void { + _ = fmt; // autofix + _ = opts; // autofix + return switch (this.*) { + .unexpected_token => |token| { + try writer.print("unexpected token: {}", .{token}); + }, + .end_of_input => { + try writer.print("unexpected end of input", .{}); + }, + .at_rule_invalid => |rule| { + try writer.print("invalid @ rule encountered: '@{s}'", .{rule}); + }, + .at_rule_body_invalid => { + // try writer.print("invalid @ body rule encountered: '@{s}'", .{}); + try writer.print("invalid @ body rule encountered", .{}); + }, + .qualified_rule_invalid => { + try writer.print("invalid qualified rule encountered", .{}); + }, + }; + } +}; + +/// A line and column location within a source file. +pub const ErrorLocation = struct { + /// The filename in which the error occurred. + filename: []const u8, + /// The line number, starting from 0. + line: u32, + /// The column number, starting from 1. + column: u32, + + pub fn withFilename(this: ErrorLocation, filename: []const u8) ErrorLocation { + return ErrorLocation{ + .filename = filename, + .line = this.line, + .column = this.column, + }; + } + + pub fn format(this: *const @This(), comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { + try writer.print("{s}:{d}:{d}", .{ this.filename, this.line, this.column }); + } +}; + +/// A printer error type. +pub const PrinterErrorKind = union(enum) { + /// An ambiguous relative `url()` was encountered in a custom property declaration. + ambiguous_url_in_custom_property: struct { + /// The ambiguous URL. + url: []const u8, + }, + /// A [std::fmt::Error](std::fmt::Error) was encountered in the underlying destination. + fmt_error, + /// The CSS modules `composes` property cannot be used within nested rules. + invalid_composes_nesting, + /// The CSS modules `composes` property cannot be used with a simple class selector. + invalid_composes_selector, + /// The CSS modules pattern must end with `[local]` for use in CSS grid. + invalid_css_modules_pattern_in_grid, + no_import_records, +}; + +/// A parser error. +pub const ParserError = union(enum) { + /// An at rule body was invalid. + at_rule_body_invalid, + /// An at rule prelude was invalid. + at_rule_prelude_invalid, + /// An unknown or unsupported at rule was encountered. + at_rule_invalid: []const u8, + /// Unexpectedly encountered the end of input data. + end_of_input, + /// A declaration was invalid. + invalid_declaration, + /// A media query was invalid. + invalid_media_query, + /// Invalid CSS nesting. + invalid_nesting, + /// The @nest rule is deprecated. + deprecated_nest_rule, + /// An invalid selector in an `@page` rule. + invalid_page_selector, + /// An invalid value was encountered. + invalid_value, + /// Invalid qualified rule. + qualified_rule_invalid, + /// A selector was invalid. + selector_error: SelectorError, + /// An `@import` rule was encountered after any rule besides `@charset` or `@layer`. + unexpected_import_rule, + /// A `@namespace` rule was encountered after any rules besides `@charset`, `@import`, or `@layer`. + unexpected_namespace_rule, + /// An unexpected token was encountered. + unexpected_token: css.Token, + /// Maximum nesting depth was reached. + maximum_nesting_depth, + + pub fn format(this: @This(), comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { + return switch (this) { + .at_rule_invalid => |name| writer.print("at_rule_invalid: {s}", .{name}), + .unexpected_token => |token| writer.print("unexpected_token: {}", .{token}), + .selector_error => |err| writer.print("selector_error: {}", .{err}), + else => writer.print("{s}", .{@tagName(this)}), + }; + } +}; + +/// The fundamental parsing errors that can be triggered by built-in parsing routines. +pub const BasicParseError = struct { + /// Details of this error + kind: BasicParseErrorKind, + /// Location where this error occurred + location: css.SourceLocation, + + pub fn intoParseError( + this: @This(), + comptime T: type, + ) ParseError(T) { + return ParseError(T){ + .kind = .{ .basic = this.kind }, + .location = this.location, + }; + } + + pub inline fn intoDefaultParseError( + this: @This(), + ) ParseError(ParserError) { + return ParseError(ParserError){ + .kind = .{ .basic = this.kind }, + .location = this.location, + }; + } +}; + +/// A selector parsing error. +pub const SelectorError = union(enum) { + /// An unexpected token was found in an attribute selector. + bad_value_in_attr: css.Token, + /// An unexpected token was found in a class selector. + class_needs_ident: css.Token, + /// A dangling combinator was found. + dangling_combinator, + /// An empty selector. + empty_selector, + /// A `|` was expected in an attribute selector. + expected_bar_in_attr: css.Token, + /// A namespace was expected. + expected_namespace: []const u8, + /// An unexpected token was encountered in a namespace. + explicit_namespace_unexpected_token: css.Token, + /// An invalid pseudo class was encountered after a pseudo element. + invalid_pseudo_class_after_pseudo_element, + /// An invalid pseudo class was encountered after a `-webkit-scrollbar` pseudo element. + invalid_pseudo_class_after_webkit_scrollbar, + /// A `-webkit-scrollbar` state was encountered before a `-webkit-scrollbar` pseudo element. + invalid_pseudo_class_before_webkit_scrollbar, + /// Invalid qualified name in attribute selector. + invalid_qual_name_in_attr: css.Token, + /// The current token is not allowed in this state. + invalid_state, + /// The selector is required to have the `&` nesting selector at the start. + missing_nesting_prefix, + /// The selector is missing a `&` nesting selector. + missing_nesting_selector, + /// No qualified name in attribute selector. + no_qualified_name_in_attribute_selector: css.Token, + /// An invalid token was encountered in a pseudo element. + pseudo_element_expected_ident: css.Token, + /// An unexpected identifier was encountered. + unexpected_ident: []const u8, + /// An unexpected token was encountered inside an attribute selector. + unexpected_token_in_attribute_selector: css.Token, + /// An unsupported pseudo class or pseudo element was encountered. + unsupported_pseudo_class_or_element: []const u8, + unexpected_selector_after_pseudo_element: css.Token, + + pub fn format(this: @This(), comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { + return switch (this) { + .dangling_combinator, .empty_selector, .invalid_state, .missing_nesting_prefix, .missing_nesting_selector => { + try writer.print("{s}", .{@tagName(this)}); + }, + inline .expected_namespace, .unexpected_ident, .unsupported_pseudo_class_or_element => |str| { + try writer.print("{s}: {s}", .{ @tagName(this), str }); + }, + inline .bad_value_in_attr, + .class_needs_ident, + .expected_bar_in_attr, + .explicit_namespace_unexpected_token, + .invalid_qual_name_in_attr, + .no_qualified_name_in_attribute_selector, + .pseudo_element_expected_ident, + .unexpected_token_in_attribute_selector, + => |tok| { + try writer.print("{s}: {s}", .{ @tagName(this), @tagName(tok) }); + }, + else => try writer.print("{s}", .{@tagName(this)}), + }; + } +}; + +pub fn ErrorWithLocation(comptime T: type) type { + return struct { + kind: T, + loc: css.Location, + }; +} + +pub const MinifyErr = error{ + minify_err, +}; +pub const MinifyError = ErrorWithLocation(MinifyErrorKind); +/// A transformation error. +pub const MinifyErrorKind = union(enum) { + /// A circular `@custom-media` rule was detected. + circular_custom_media: struct { + /// The name of the `@custom-media` rule that was referenced circularly. + name: []const u8, + }, + /// Attempted to reference a custom media rule that doesn't exist. + custom_media_not_defined: struct { + /// The name of the `@custom-media` rule that was not defined. + name: []const u8, + }, + /// Boolean logic with media types in @custom-media rules is not supported. + unsupported_custom_media_boolean_logic: struct { + /// The source location of the `@custom-media` rule with unsupported boolean logic. + custom_media_loc: Location, + }, + + pub fn format(this: *const @This(), comptime _: []const u8, _: anytype, writer: anytype) !void { + return switch (this.*) { + .circular_custom_media => |name| try writer.print("Circular @custom-media rule: \"{s}\"", .{name.name}), + .custom_media_not_defined => |name| try writer.print("Custom media rule \"{s}\" not defined", .{name.name}), + .unsupported_custom_media_boolean_logic => |custom_media_loc| try writer.print( + "Unsupported boolean logic in custom media rule at line {d}, column {d}", + .{ + custom_media_loc.custom_media_loc.line, + custom_media_loc.custom_media_loc.column, + }, + ), + }; + } +}; diff --git a/src/css/generics.zig b/src/css/generics.zig new file mode 100644 index 0000000000..7dc3a08878 --- /dev/null +++ b/src/css/generics.zig @@ -0,0 +1,425 @@ +const std = @import("std"); +const Allocator = std.mem.Allocator; +const bun = @import("root").bun; +const logger = bun.logger; +const Log = logger.Log; + +const ArrayList = std.ArrayListUnmanaged; + +const css = @import("./css_parser.zig"); +const css_values = css.css_values; + +const Parser = css.Parser; +const ParserOptions = css.ParserOptions; +const Result = css.Result; +const Printer = css.Printer; +const PrintErr = css.PrintErr; +const CSSNumber = css.CSSNumber; +const CSSNumberFns = css.CSSNumberFns; +const CSSInteger = css.CSSInteger; +const CSSIntegerFns = css.CSSIntegerFns; +const CustomIdent = css.CustomIdent; +const CustomIdentFns = css.CustomIdentFns; +const DashedIdent = css.DashedIdent; +const DashedIdentFns = css.DashedIdentFns; +const Ident = css.Ident; +const IdentFns = css.IdentFns; + +pub inline fn parseWithOptions(comptime T: type, input: *Parser, options: *const ParserOptions) Result(T) { + if (T != f32 and T != i32 and @hasDecl(T, "parseWithOptions")) return T.parseWithOptions(input, options); + if (comptime bun.meta.looksLikeListContainerType(T)) |result| { + switch (result.list) { + .array_list => return input.parseCommaSeparated(result.child, parseFor(result.child)), + .baby_list => {}, + .small_list => {}, + } + } + return switch (T) { + f32 => CSSNumberFns.parse(input), + CSSInteger => CSSIntegerFns.parse(input), + CustomIdent => CustomIdentFns.parse(input), + DashedIdent => DashedIdentFns.parse(input), + Ident => IdentFns.parse(input), + else => T.parse(input), + }; +} + +pub inline fn parse(comptime T: type, input: *Parser) Result(T) { + if (comptime @typeInfo(T) == .Pointer) { + const TT = std.meta.Child(T); + return switch (parse(TT, input)) { + .result => |v| .{ .result = bun.create(input.allocator(), TT, v) }, + .err => |e| .{ .err = e }, + }; + } + if (comptime @typeInfo(T) == .Optional) { + const TT = std.meta.Child(T); + return .{ .result = parse(TT, input).asValue() }; + } + if (comptime bun.meta.looksLikeListContainerType(T)) |result| { + switch (result.list) { + .array_list => return input.parseCommaSeparated(result.child, parseFor(result.child)), + .baby_list => {}, + .small_list => {}, + } + } + return switch (T) { + f32 => CSSNumberFns.parse(input), + CSSInteger => CSSIntegerFns.parse(input), + CustomIdent => CustomIdentFns.parse(input), + DashedIdent => DashedIdentFns.parse(input), + Ident => IdentFns.parse(input), + else => T.parse(input), + }; +} + +pub inline fn parseFor(comptime T: type) @TypeOf(struct { + fn parsefn(input: *Parser) Result(T) { + return parse(T, input); + } +}.parsefn) { + return struct { + fn parsefn(input: *Parser) Result(T) { + return parse(T, input); + } + }.parsefn; +} + +pub fn hasToCss(comptime T: type) bool { + const tyinfo = @typeInfo(T); + if (comptime T == []const u8) return false; + if (tyinfo == .Pointer) { + const TT = std.meta.Child(T); + return hasToCss(TT); + } + if (tyinfo == .Optional) { + const TT = std.meta.Child(T); + return hasToCss(TT); + } + if (comptime bun.meta.looksLikeListContainerType(T)) |result| { + switch (result.list) { + .array_list => return true, + .baby_list => return true, + .small_list => return true, + } + } + return switch (T) { + f32 => true, + else => @hasDecl(T, "toCss"), + }; +} + +pub inline fn toCss(comptime T: type, this: *const T, comptime W: type, dest: *Printer(W)) PrintErr!void { + if (@typeInfo(T) == .Pointer) { + const TT = std.meta.Child(T); + return toCss(TT, this.*, W, dest); + } + if (@typeInfo(T) == .Optional) { + const TT = std.meta.Child(T); + + if (this.*) |*val| { + return toCss(TT, val, W, dest); + } + return; + } + if (comptime bun.meta.looksLikeListContainerType(T)) |result| { + switch (result.list) { + .array_list => { + return css.to_css.fromList(result.child, this, W, dest); + }, + .baby_list => {}, + .small_list => {}, + } + } + return switch (T) { + f32 => CSSNumberFns.toCss(this, W, dest), + CSSInteger => CSSIntegerFns.toCss(this, W, dest), + CustomIdent => CustomIdentFns.toCss(this, W, dest), + DashedIdent => DashedIdentFns.toCss(this, W, dest), + Ident => IdentFns.toCss(this, W, dest), + else => T.toCss(this, W, dest), + }; +} + +pub fn eqlList(comptime T: type, lhs: *const ArrayList(T), rhs: *const ArrayList(T)) bool { + if (lhs.items.len != rhs.items.len) return false; + for (lhs.items, 0..) |*item, i| { + if (!eql(T, item, &rhs.items[i])) return false; + } + return true; +} + +pub fn canTransitivelyImplementEql(comptime T: type) bool { + return switch (@typeInfo(T)) { + .Struct, .Union => true, + else => false, + }; +} + +pub inline fn eql(comptime T: type, lhs: *const T, rhs: *const T) bool { + const tyinfo = comptime @typeInfo(T); + if (comptime tyinfo == .Pointer) { + if (comptime T == []const u8) return bun.strings.eql(lhs.*, rhs.*); + if (comptime tyinfo.Pointer.size == .One) { + const TT = std.meta.Child(T); + return eql(TT, lhs.*, rhs.*); + } else if (comptime tyinfo.Pointer.size == .Slice) { + if (lhs.*.len != rhs.*.len) return false; + for (lhs.*[0..], rhs.*[0..]) |*a, *b| { + if (!eql(tyinfo.Pointer.child, a, b)) return false; + } + return true; + } else { + @compileError("Unsupported pointer size: " ++ @tagName(tyinfo.Pointer.size) ++ " (" ++ @typeName(T) ++ ")"); + } + } + if (comptime tyinfo == .Optional) { + const TT = std.meta.Child(T); + if (lhs.* != null and rhs.* != null) return eql(TT, &lhs.*.?, &rhs.*.?); + return false; + } + if (comptime bun.meta.isSimpleEqlType(T)) { + return lhs.* == rhs.*; + } + if (comptime bun.meta.looksLikeListContainerType(T)) |result| { + return switch (result.list) { + .array_list => eqlList(result.child, lhs, rhs), + .baby_list => return lhs.eql(rhs), + .small_list => lhs.eql(rhs), + }; + } + return switch (T) { + f32 => lhs.* == rhs.*, + CSSInteger => lhs.* == rhs.*, + CustomIdent, DashedIdent, Ident => bun.strings.eql(lhs.v, rhs.v), + []const u8 => bun.strings.eql(lhs.*, rhs.*), + css.VendorPrefix => css.VendorPrefix.eq(lhs.*, rhs.*), + else => T.eql(lhs, rhs), + }; +} + +pub fn canTransitivelyImplementDeepClone(comptime T: type) bool { + return switch (@typeInfo(T)) { + .Struct, .Union => true, + else => false, + }; +} + +pub inline fn deepClone(comptime T: type, this: *const T, allocator: Allocator) T { + const tyinfo = comptime @typeInfo(T); + if (comptime tyinfo == .Pointer) { + if (comptime tyinfo.Pointer.size == .One) { + const TT = std.meta.Child(T); + return bun.create(allocator, TT, deepClone(TT, this.*, allocator)); + } + if (comptime tyinfo.Pointer.size == .Slice) { + var slice = allocator.alloc(tyinfo.Pointer.child, this.len) catch bun.outOfMemory(); + if (comptime bun.meta.isSimpleCopyType(tyinfo.Pointer.child) or tyinfo.Pointer.child == []const u8) { + @memcpy(slice, this.*); + } else { + for (this.*, 0..) |*e, i| { + slice[i] = deepClone(tyinfo.Pointer.child, e, allocator); + } + } + return slice; + } + @compileError("Deep clone not supported for this kind of pointer: " ++ @tagName(tyinfo.Pointer.size) ++ " (" ++ @typeName(T) ++ ")"); + } + if (comptime tyinfo == .Optional) { + const TT = std.meta.Child(T); + if (this.* != null) return deepClone(TT, &this.*.?, allocator); + return null; + } + if (comptime bun.meta.isSimpleCopyType(T)) { + return this.*; + } + if (comptime bun.meta.looksLikeListContainerType(T)) |result| { + return switch (result.list) { + .array_list => css.deepClone(result.child, allocator, this), + .baby_list => { + var ret = bun.BabyList(result.child){ + .ptr = (allocator.alloc(result.child, this.len) catch bun.outOfMemory()).ptr, + .len = this.len, + .cap = this.len, + }; + for (this.sliceConst(), ret.ptr[0..this.len]) |*old, *new| { + new.* = bun.css.generic.deepClone(result.child, old, allocator); + } + return ret; + }, + .small_list => this.deepClone(allocator), + }; + } + // Strings in the CSS parser are always arena allocated + // So it is safe to skip const strings as they will never be mutated + if (comptime T == []const u8) { + return this.*; + } + + if (!@hasDecl(T, "deepClone")) { + @compileError(@typeName(T) ++ " does not have a deepClone() function"); + } + + return T.deepClone(this, allocator); +} + +const Angle = css_values.angle.Angle; +pub inline fn tryFromAngle(comptime T: type, angle: Angle) ?T { + return switch (T) { + CSSNumber => CSSNumberFns.tryFromAngle(angle), + Angle => return Angle.tryFromAngle(angle), + else => T.tryFromAngle(angle), + }; +} + +pub inline fn trySign(comptime T: type, val: *const T) ?f32 { + return switch (T) { + CSSNumber => CSSNumberFns.sign(val), + else => { + if (@hasDecl(T, "sign")) return T.sign(val); + return T.trySign(val); + }, + }; +} + +pub inline fn tryMap( + comptime T: type, + val: *const T, + comptime map_fn: *const fn (a: f32) f32, +) ?T { + return switch (T) { + CSSNumber => map_fn(val.*), + else => { + if (@hasDecl(T, "map")) return T.map(val, map_fn); + return T.tryMap(val, map_fn); + }, + }; +} + +pub inline fn tryOpTo( + comptime T: type, + comptime R: type, + lhs: *const T, + rhs: *const T, + ctx: anytype, + comptime op_fn: *const fn (@TypeOf(ctx), a: f32, b: f32) R, +) ?R { + return switch (T) { + CSSNumber => op_fn(ctx, lhs.*, rhs.*), + else => { + if (@hasDecl(T, "opTo")) return T.opTo(lhs, rhs, R, ctx, op_fn); + return T.tryOpTo(lhs, rhs, R, ctx, op_fn); + }, + }; +} + +pub inline fn tryOp( + comptime T: type, + lhs: *const T, + rhs: *const T, + ctx: anytype, + comptime op_fn: *const fn (@TypeOf(ctx), a: f32, b: f32) f32, +) ?T { + return switch (T) { + Angle => Angle.tryOp(lhs, rhs, ctx, op_fn), + CSSNumber => op_fn(ctx, lhs.*, rhs.*), + else => { + if (@hasDecl(T, "op")) return T.op(lhs, rhs, ctx, op_fn); + return T.tryOp(lhs, rhs, ctx, op_fn); + }, + }; +} + +pub inline fn partialCmp(comptime T: type, lhs: *const T, rhs: *const T) ?std.math.Order { + return switch (T) { + f32 => partialCmpF32(lhs, rhs), + CSSInteger => std.math.order(lhs.*, rhs.*), + css_values.angle.Angle => css_values.angle.Angle.partialCmp(lhs, rhs), + else => T.partialCmp(lhs, rhs), + }; +} + +pub inline fn partialCmpF32(lhs: *const f32, rhs: *const f32) ?std.math.Order { + const lte = lhs.* <= rhs.*; + const rte = lhs.* >= rhs.*; + if (!lte and !rte) return null; + if (!lte and rte) return .gt; + if (lte and !rte) return .lt; + return .eq; +} + +pub const HASH_SEED: u64 = 0; + +pub fn hashArrayList(comptime V: type, this: *const ArrayList(V), hasher: *std.hash.Wyhash) void { + for (this.items) |*item| { + hash(V, item, hasher); + } +} +pub fn hashBabyList(comptime V: type, this: *const bun.BabyList(V), hasher: *std.hash.Wyhash) void { + for (this.sliceConst()) |*item| { + hash(V, item, hasher); + } +} + +pub fn hasHash(comptime T: type) bool { + const tyinfo = @typeInfo(T); + if (comptime T == []const u8) return true; + if (comptime bun.meta.isSimpleEqlType(T)) return true; + if (tyinfo == .Pointer) { + const TT = std.meta.Child(T); + return hasHash(TT); + } + if (tyinfo == .Optional) { + const TT = std.meta.Child(T); + return hasHash(TT); + } + if (comptime bun.meta.looksLikeListContainerType(T)) |result| { + switch (result.list) { + .array_list => return true, + .baby_list => return true, + .small_list => return true, + } + } + return switch (T) { + else => @hasDecl(T, "hash"), + }; +} + +pub fn hash(comptime T: type, this: *const T, hasher: *std.hash.Wyhash) void { + if (comptime T == void) return; + const tyinfo = @typeInfo(T); + if (comptime tyinfo == .Pointer and T != []const u8) { + const TT = std.meta.Child(T); + if (tyinfo.Pointer.size == .One) { + return hash(TT, this.*, hasher); + } else if (tyinfo.Pointer.size == .Slice) { + for (this.*) |*item| { + hash(TT, item, hasher); + } + return; + } else { + @compileError("Can't hash this pointer type: " ++ @typeName(T)); + } + } + if (comptime @typeInfo(T) == .Optional) { + const TT = std.meta.Child(T); + if (this.* != null) return hash(TT, &this.*.?, hasher); + return; + } + if (comptime bun.meta.looksLikeListContainerType(T)) |result| { + switch (result.list) { + .array_list => return hashArrayList(result.child, this, hasher), + .baby_list => return hashBabyList(result.child, this, hasher), + .small_list => return this.hash(hasher), + } + } + if (comptime bun.meta.isSimpleEqlType(T)) { + const bytes = std.mem.asBytes(&this); + hasher.update(bytes); + return; + } + return switch (T) { + []const u8 => hasher.update(this.*), + else => T.hash(this, hasher), + }; +} diff --git a/src/css/logical.zig b/src/css/logical.zig new file mode 100644 index 0000000000..adcbf8a346 --- /dev/null +++ b/src/css/logical.zig @@ -0,0 +1,34 @@ +const std = @import("std"); +const Allocator = std.mem.Allocator; +const bun = @import("root").bun; +const logger = bun.logger; +const Log = logger.Log; + +pub const css = @import("./css_parser.zig"); +pub const Error = css.Error; + +const ArrayList = std.ArrayListUnmanaged; + +pub const PropertyCategory = enum { + logical, + physical, + + pub fn default() PropertyCategory { + return .physical; + } +}; + +pub const LogicalGroup = enum { + border_color, + border_style, + border_width, + border_radius, + margin, + scroll_margin, + padding, + scroll_padding, + inset, + size, + min_size, + max_size, +}; diff --git a/src/css/media_query.zig b/src/css/media_query.zig new file mode 100644 index 0000000000..1f56da703e --- /dev/null +++ b/src/css/media_query.zig @@ -0,0 +1,1518 @@ +const std = @import("std"); +const Allocator = std.mem.Allocator; +const bun = @import("root").bun; +const logger = bun.logger; +const Log = logger.Log; + +pub const css = @import("./css_parser.zig"); +pub const Error = css.Error; +const ArrayList = std.ArrayListUnmanaged; + +const Length = css.css_values.length.Length; +const CSSNumber = css.css_values.number.CSSNumber; +const Integer = css.css_values.number.Integer; +const CSSNumberFns = css.css_values.number.CSSNumberFns; +const CSSInteger = css.css_values.number.CSSInteger; +const CSSIntegerFns = css.css_values.number.CSSIntegerFns; +const Resolution = css.css_values.resolution.Resolution; +const Ratio = css.css_values.ratio.Ratio; +const Ident = css.css_values.ident.Ident; +const IdentFns = css.css_values.ident.IdentFns; +const EnvironmentVariable = css.css_properties.custom.EnvironmentVariable; +const DashedIdent = css.css_values.ident.DashedIdent; +const DashedIdentFns = css.css_values.ident.DashedIdentFns; + +const Printer = css.Printer; +const PrintErr = css.PrintErr; +const PrintResult = css.PrintResult; +const Result = css.Result; + +pub fn ValidQueryCondition(comptime T: type) void { + // fn parse_feature<'t>(input: &mut Parser<'i, 't>) -> Result>>; + _ = T.parseFeature; + // fn create_negation(condition: Box) -> Self; + _ = T.createNegation; + // fn create_operation(operator: Operator, conditions: Vec) -> Self; + _ = T.createOperation; + // fn parse_style_query<'t>(input: &mut Parser<'i, 't>) -> Result>> { + _ = T.parseStyleQuery; + // fn needs_parens(&self, parent_operator: Option, targets: &Targets) -> bool; + _ = T.needsParens; +} + +/// A [media query list](https://drafts.csswg.org/mediaqueries/#mq-list). +pub const MediaList = struct { + /// The list of media queries. + media_queries: ArrayList(MediaQuery) = .{}, + + /// Parse a media query list from CSS. + pub fn parse(input: *css.Parser) Result(MediaList) { + var media_queries = ArrayList(MediaQuery){}; + while (true) { + const mq = switch (input.parseUntilBefore(css.Delimiters{ .comma = true }, MediaQuery, {}, css.voidWrap(MediaQuery, MediaQuery.parse))) { + .result => |v| v, + .err => |e| { + if (e.kind == .basic and e.kind.basic == .end_of_input) break; + return .{ .err = e }; + }, + }; + media_queries.append(input.allocator(), mq) catch bun.outOfMemory(); + + if (input.next().asValue()) |tok| { + if (tok.* != .comma) { + bun.Output.panic("Unreachable code: expected a comma after parsing a MediaQuery.\n\nThis is a bug in Bun's CSS parser. Please file a bug report at https://github.com/oven-sh/bun/issues/new/choose", .{}); + } + } else break; + } + + return .{ .result = MediaList{ .media_queries = media_queries } }; + } + + pub fn toCss(this: *const MediaList, comptime W: type, dest: *css.Printer(W)) PrintErr!void { + if (this.media_queries.items.len == 0) { + return dest.writeStr("not all"); + } + + var first = true; + for (this.media_queries.items) |*query| { + if (!first) { + try dest.delim(',', false); + } + first = false; + try query.toCss(W, dest); + } + return; + } + + pub fn eql(lhs: *const MediaList, rhs: *const MediaList) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub fn deepClone(this: *const MediaList, allocator: std.mem.Allocator) MediaList { + return MediaList{ + .media_queries = css.deepClone(MediaQuery, allocator, &this.media_queries), + }; + } + + /// Returns whether the media query list always matches. + pub fn alwaysMatches(this: *const MediaList) bool { + // If the media list is empty, it always matches. + return this.media_queries.items.len == 0 or brk: { + for (this.media_queries.items) |*query| { + if (!query.alwaysMatches()) break :brk false; + } + break :brk true; + }; + } +}; + +/// A binary `and` or `or` operator. +pub const Operator = enum { + /// The `and` operator. + @"and", + /// The `or` operator. + @"or", + + pub fn asStr(this: *const @This()) []const u8 { + return css.enum_property_util.asStr(@This(), this); + } + + pub fn parse(input: *css.Parser) Result(@This()) { + return css.enum_property_util.parse(@This(), input); + } + + pub fn toCss(this: *const @This(), comptime W: type, dest: *Printer(W)) PrintErr!void { + return css.enum_property_util.toCss(@This(), this, W, dest); + } +}; + +/// A [media query](https://drafts.csswg.org/mediaqueries/#media). +pub const MediaQuery = struct { + /// The qualifier for this query. + qualifier: ?Qualifier, + /// The media type for this query, that can be known, unknown, or "all". + media_type: MediaType, + /// The condition that this media query contains. This cannot have `or` + /// in the first level. + condition: ?MediaCondition, + // ~toCssImpl + const This = @This(); + + pub fn deepClone(this: *const MediaQuery, allocator: std.mem.Allocator) MediaQuery { + return MediaQuery{ + .qualifier = if (this.qualifier) |q| q else null, + .media_type = this.media_type, + .condition = if (this.condition) |*c| c.deepClone(allocator) else null, + }; + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + + /// Returns whether the media query is guaranteed to always match. + pub fn alwaysMatches(this: *const MediaQuery) bool { + return this.qualifier == null and this.media_type == .all and this.condition == null; + } + + pub fn parse(input: *css.Parser) Result(MediaQuery) { + const Fn = struct { + pub fn tryParseFn(i: *css.Parser) Result(struct { ?Qualifier, ?MediaType }) { + const qualifier = switch (i.tryParse(Qualifier.parse, .{})) { + .result => |vv| vv, + .err => null, + }; + const media_type = switch (MediaType.parse(i)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + return .{ .result = .{ qualifier, media_type } }; + } + }; + const qualifier, const explicit_media_type = switch (input.tryParse(Fn.tryParseFn, .{})) { + .result => |v| v, + .err => .{ null, null }, + }; + + const condition = if (explicit_media_type == null) + switch (MediaCondition.parseWithFlags(input, QueryConditionFlags{ .allow_or = true })) { + .result => |v| v, + .err => |e| return .{ .err = e }, + } + else if (input.tryParse(css.Parser.expectIdentMatching, .{"and"}).isOk()) + switch (MediaCondition.parseWithFlags(input, QueryConditionFlags.empty())) { + .result => |v| v, + .err => |e| return .{ .err = e }, + } + else + null; + + const media_type = explicit_media_type orelse MediaType.all; + + return .{ + .result = MediaQuery{ + .qualifier = qualifier, + .media_type = media_type, + .condition = condition, + }, + }; + } + + pub fn toCss(this: *const This, comptime W: type, dest: *Printer(W)) PrintErr!void { + if (this.qualifier) |qual| { + try qual.toCss(W, dest); + try dest.writeChar(' '); + } + + switch (this.media_type) { + .all => { + // We need to print "all" if there's a qualifier, or there's + // just an empty list of expressions. + // + // Otherwise, we'd serialize media queries like "(min-width: + // 40px)" in "all (min-width: 40px)", which is unexpected. + if (this.qualifier != null or this.condition == null) { + try dest.writeStr("all"); + } + }, + .print => { + try dest.writeStr("print"); + }, + .screen => { + try dest.writeStr("screen"); + }, + .custom => |desc| { + try dest.writeStr(desc); + }, + } + + const condition = if (this.condition) |*cond| cond else return; + + const needs_parens = if (this.media_type != .all or this.qualifier != null) needs_parens: { + try dest.writeStr(" and "); + break :needs_parens condition.* == .operation and condition.operation.operator != .@"and"; + } else false; + + return toCssWithParensIfNeeded(condition, W, dest, needs_parens); + } +}; + +/// Flags for `parse_query_condition`. +pub const QueryConditionFlags = packed struct(u8) { + /// Whether to allow top-level "or" boolean logic. + allow_or: bool = false, + /// Whether to allow style container queries. + allow_style: bool = false, + __unused: u6 = 0, + + pub usingnamespace css.Bitflags(@This()); +}; + +pub fn toCssWithParensIfNeeded( + v: anytype, + comptime W: type, + dest: *Printer(W), + needs_parens: bool, +) PrintErr!void { + if (needs_parens) { + try dest.writeChar('('); + } + try v.toCss(W, dest); + if (needs_parens) { + try dest.writeChar(')'); + } + return; +} + +/// A [media query qualifier](https://drafts.csswg.org/mediaqueries/#mq-prefix). +pub const Qualifier = enum { + /// Prevents older browsers from matching the media query. + only, + /// Negates a media query. + not, + + pub fn asStr(this: *const @This()) []const u8 { + return css.enum_property_util.asStr(@This(), this); + } + + pub fn parse(input: *css.Parser) Result(@This()) { + return css.enum_property_util.parse(@This(), input); + } + + pub fn toCss(this: *const @This(), comptime W: type, dest: *Printer(W)) PrintErr!void { + return css.enum_property_util.toCss(@This(), this, W, dest); + } +}; + +/// A [media type](https://drafts.csswg.org/mediaqueries/#media-types) within a media query. +pub const MediaType = union(enum) { + /// Matches all devices. + all, + /// Matches printers, and devices intended to reproduce a printed + /// display, such as a web browser showing a document in “Print Preview”. + print, + /// Matches all devices that aren’t matched by print. + screen, + /// An unknown media type. + custom: []const u8, + + pub fn parse(input: *css.Parser) Result(MediaType) { + const name = switch (input.expectIdent()) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + return .{ .result = MediaType.fromStr(name) }; + } + + pub fn fromStr(name: []const u8) MediaType { + const Enumerations = enum { all, print, screen }; + const Map = comptime bun.ComptimeEnumMap(Enumerations); + if (Map.getASCIIICaseInsensitive(name)) |x| return switch (x) { + .all => .all, + .print => .print, + .screen => .screen, + }; + return .{ .custom = name }; + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } +}; + +pub fn operationToCss(comptime QueryCondition: type, operator: Operator, conditions: *const ArrayList(QueryCondition), comptime W: type, dest: *Printer(W)) PrintErr!void { + ValidQueryCondition(QueryCondition); + const first = &conditions.items[0]; + try toCssWithParensIfNeeded(first, W, dest, first.needsParens(operator, &dest.targets)); + if (conditions.items.len == 1) return; + for (conditions.items[1..]) |*item| { + try dest.writeChar(' '); + try operator.toCss(W, dest); + try dest.writeChar(' '); + try toCssWithParensIfNeeded(item, W, dest, item.needsParens(operator, &dest.targets)); + } + return; +} + +/// Represents a media condition. +/// +/// Implements QueryCondition interface. +pub const MediaCondition = union(enum) { + feature: MediaFeature, + not: *MediaCondition, + operation: struct { + operator: Operator, + conditions: ArrayList(MediaCondition), + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + }, + + const This = @This(); + + pub fn deepClone(this: *const MediaCondition, allocator: std.mem.Allocator) MediaCondition { + return switch (this.*) { + .feature => |*f| MediaCondition{ .feature = f.deepClone(allocator) }, + .not => |c| MediaCondition{ .not = bun.create(allocator, MediaCondition, c.deepClone(allocator)) }, + .operation => |op| MediaCondition{ + .operation = .{ + .operator = op.operator, + .conditions = css.deepClone(MediaCondition, allocator, &op.conditions), + }, + }, + }; + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub fn toCss(this: *const This, comptime W: type, dest: *Printer(W)) PrintErr!void { + switch (this.*) { + .feature => |*f| { + try f.toCss(W, dest); + }, + .not => |c| { + try dest.writeStr("not "); + try toCssWithParensIfNeeded(c, W, dest, c.needsParens(null, &dest.targets)); + }, + .operation => |operation| { + try operationToCss(MediaCondition, operation.operator, &operation.conditions, W, dest); + }, + } + + return; + } + + /// QueryCondition.parseFeature + pub fn parseFeature(input: *css.Parser) Result(MediaCondition) { + const feature = switch (MediaFeature.parse(input)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + return .{ .result = MediaCondition{ .feature = feature } }; + } + + /// QueryCondition.createNegation + pub fn createNegation(condition: *MediaCondition) MediaCondition { + return MediaCondition{ .not = condition }; + } + + /// QueryCondition.createOperation + pub fn createOperation(operator: Operator, conditions: ArrayList(MediaCondition)) MediaCondition { + return MediaCondition{ + .operation = .{ + .operator = operator, + .conditions = conditions, + }, + }; + } + + /// QueryCondition.parseStyleQuery + pub fn parseStyleQuery(input: *css.Parser) Result(MediaCondition) { + return .{ .err = input.newErrorForNextToken() }; + } + + /// QueryCondition.needsParens + pub fn needsParens(this: *const MediaCondition, parent_operator: ?Operator, targets: *const css.targets.Targets) bool { + return switch (this.*) { + .not => true, + .operation => |operation| operation.operator != parent_operator, + .feature => |f| f.needsParens(parent_operator, targets), + }; + } + + pub fn parseWithFlags(input: *css.Parser, flags: QueryConditionFlags) Result(MediaCondition) { + return parseQueryCondition(MediaCondition, input, flags); + } +}; + +/// Parse a single query condition. +pub fn parseQueryCondition( + comptime QueryCondition: type, + input: *css.Parser, + flags: QueryConditionFlags, +) Result(QueryCondition) { + const location = input.currentSourceLocation(); + const is_negation, const is_style = brk: { + const tok = switch (input.next()) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + switch (tok.*) { + .open_paren => break :brk .{ false, false }, + .ident => |ident| { + if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(ident, "not")) break :brk .{ true, false }; + }, + .function => |f| { + if (flags.contains(QueryConditionFlags{ .allow_style = true }) and + bun.strings.eqlCaseInsensitiveASCIIICheckLength(f, "style")) + { + break :brk .{ false, true }; + } + }, + else => {}, + } + return .{ .err = location.newUnexpectedTokenError(tok.*) }; + }; + + const first_condition: QueryCondition = first_condition: { + const val: u8 = @as(u8, @intFromBool(is_negation)) << 1 | @as(u8, @intFromBool(is_style)); + // (is_negation, is_style) + switch (val) { + // (true, false) + 0b10 => { + const inner_condition = switch (parseParensOrFunction(QueryCondition, input, flags)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + return .{ .result = QueryCondition.createNegation(bun.create(input.allocator(), QueryCondition, inner_condition)) }; + }, + // (true, true) + 0b11 => { + const inner_condition = switch (QueryCondition.parseStyleQuery(input)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + return .{ .result = QueryCondition.createNegation(bun.create(input.allocator(), QueryCondition, inner_condition)) }; + }, + 0b00 => break :first_condition switch (parseParenBlock(QueryCondition, input, flags)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }, + 0b01 => break :first_condition switch (QueryCondition.parseStyleQuery(input)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }, + else => unreachable, + } + }; + + const operator: Operator = if (input.tryParse(Operator.parse, .{}).asValue()) |op| + op + else + return .{ .result = first_condition }; + + if (!flags.contains(QueryConditionFlags{ .allow_or = true }) and operator == .@"or") { + return .{ .err = location.newUnexpectedTokenError(css.Token{ .ident = "or" }) }; + } + + var conditions = ArrayList(QueryCondition){}; + conditions.append( + input.allocator(), + first_condition, + ) catch unreachable; + conditions.append( + input.allocator(), + switch (parseParensOrFunction(QueryCondition, input, flags)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }, + ) catch unreachable; + + const delim = switch (operator) { + .@"and" => "and", + .@"or" => "or", + }; + + while (true) { + if (input.tryParse(css.Parser.expectIdentMatching, .{delim}).isErr()) { + return .{ .result = QueryCondition.createOperation(operator, conditions) }; + } + + conditions.append( + input.allocator(), + switch (parseParensOrFunction(QueryCondition, input, flags)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }, + ) catch unreachable; + } +} + +/// Parse a media condition in parentheses, or a style() function. +pub fn parseParensOrFunction( + comptime QueryCondition: type, + input: *css.Parser, + flags: QueryConditionFlags, +) Result(QueryCondition) { + const location = input.currentSourceLocation(); + const t = switch (input.next()) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + switch (t.*) { + .open_paren => return parseParenBlock(QueryCondition, input, flags), + .function => |f| { + if (flags.contains(QueryConditionFlags{ .allow_style = true }) and + bun.strings.eqlCaseInsensitiveASCIIICheckLength(f, "style")) + { + return QueryCondition.parseStyleQuery(input); + } + }, + else => {}, + } + return .{ .err = location.newUnexpectedTokenError(t.*) }; +} + +fn parseParenBlock( + comptime QueryCondition: type, + input: *css.Parser, + flags: QueryConditionFlags, +) Result(QueryCondition) { + const Closure = struct { + flags: QueryConditionFlags, + pub fn parseNestedBlockFn(this: *@This(), i: *css.Parser) Result(QueryCondition) { + if (i.tryParse(@This().tryParseFn, .{this}).asValue()) |inner| { + return .{ .result = inner }; + } + + return QueryCondition.parseFeature(i); + } + + pub fn tryParseFn(i: *css.Parser, this: *@This()) Result(QueryCondition) { + return parseQueryCondition(QueryCondition, i, this.flags); + } + }; + + var closure = Closure{ + .flags = flags, + }; + return input.parseNestedBlock(QueryCondition, &closure, Closure.parseNestedBlockFn); +} + +/// A [media feature](https://drafts.csswg.org/mediaqueries/#typedef-media-feature) +pub const MediaFeature = QueryFeature(MediaFeatureId); + +pub const MediaFeatureId = enum { + /// The [width](https://w3c.github.io/csswg-drafts/mediaqueries-5/#width) media feature. + width, + /// The [height](https://w3c.github.io/csswg-drafts/mediaqueries-5/#height) media feature. + height, + /// The [aspect-ratio](https://w3c.github.io/csswg-drafts/mediaqueries-5/#aspect-ratio) media feature. + @"aspect-ratio", + /// The [orientation](https://w3c.github.io/csswg-drafts/mediaqueries-5/#orientation) media feature. + orientation, + /// The [overflow-block](https://w3c.github.io/csswg-drafts/mediaqueries-5/#overflow-block) media feature. + @"overflow-block", + /// The [overflow-inline](https://w3c.github.io/csswg-drafts/mediaqueries-5/#overflow-inline) media feature. + @"overflow-inline", + /// The [horizontal-viewport-segments](https://w3c.github.io/csswg-drafts/mediaqueries-5/#horizontal-viewport-segments) media feature. + @"horizontal-viewport-segments", + /// The [vertical-viewport-segments](https://w3c.github.io/csswg-drafts/mediaqueries-5/#vertical-viewport-segments) media feature. + @"vertical-viewport-segments", + /// The [display-mode](https://w3c.github.io/csswg-drafts/mediaqueries-5/#display-mode) media feature. + @"display-mode", + /// The [resolution](https://w3c.github.io/csswg-drafts/mediaqueries-5/#resolution) media feature. + resolution, + /// The [scan](https://w3c.github.io/csswg-drafts/mediaqueries-5/#scan) media feature. + scan, + /// The [grid](https://w3c.github.io/csswg-drafts/mediaqueries-5/#grid) media feature. + grid, + /// The [update](https://w3c.github.io/csswg-drafts/mediaqueries-5/#update) media feature. + update, + /// The [environment-blending](https://w3c.github.io/csswg-drafts/mediaqueries-5/#environment-blending) media feature. + @"environment-blending", + /// The [color](https://w3c.github.io/csswg-drafts/mediaqueries-5/#color) media feature. + color, + /// The [color-index](https://w3c.github.io/csswg-drafts/mediaqueries-5/#color-index) media feature. + @"color-index", + /// The [monochrome](https://w3c.github.io/csswg-drafts/mediaqueries-5/#monochrome) media feature. + monochrome, + /// The [color-gamut](https://w3c.github.io/csswg-drafts/mediaqueries-5/#color-gamut) media feature. + @"color-gamut", + /// The [dynamic-range](https://w3c.github.io/csswg-drafts/mediaqueries-5/#dynamic-range) media feature. + @"dynamic-range", + /// The [inverted-colors](https://w3c.github.io/csswg-drafts/mediaqueries-5/#inverted-colors) media feature. + @"inverted-colors", + /// The [pointer](https://w3c.github.io/csswg-drafts/mediaqueries-5/#pointer) media feature. + pointer, + /// The [hover](https://w3c.github.io/csswg-drafts/mediaqueries-5/#hover) media feature. + hover, + /// The [any-pointer](https://w3c.github.io/csswg-drafts/mediaqueries-5/#any-pointer) media feature. + @"any-pointer", + /// The [any-hover](https://w3c.github.io/csswg-drafts/mediaqueries-5/#any-hover) media feature. + @"any-hover", + /// The [nav-controls](https://w3c.github.io/csswg-drafts/mediaqueries-5/#nav-controls) media feature. + @"nav-controls", + /// The [video-color-gamut](https://w3c.github.io/csswg-drafts/mediaqueries-5/#video-color-gamut) media feature. + @"video-color-gamut", + /// The [video-dynamic-range](https://w3c.github.io/csswg-drafts/mediaqueries-5/#video-dynamic-range) media feature. + @"video-dynamic-range", + /// The [scripting](https://w3c.github.io/csswg-drafts/mediaqueries-5/#scripting) media feature. + scripting, + /// The [prefers-reduced-motion](https://w3c.github.io/csswg-drafts/mediaqueries-5/#prefers-reduced-motion) media feature. + @"prefers-reduced-motion", + /// The [prefers-reduced-transparency](https://w3c.github.io/csswg-drafts/mediaqueries-5/#prefers-reduced-transparency) media feature. + @"prefers-reduced-transparency", + /// The [prefers-contrast](https://w3c.github.io/csswg-drafts/mediaqueries-5/#prefers-contrast) media feature. + @"prefers-contrast", + /// The [forced-colors](https://w3c.github.io/csswg-drafts/mediaqueries-5/#forced-colors) media feature. + @"forced-colors", + /// The [prefers-color-scheme](https://w3c.github.io/csswg-drafts/mediaqueries-5/#prefers-color-scheme) media feature. + @"prefers-color-scheme", + /// The [prefers-reduced-data](https://w3c.github.io/csswg-drafts/mediaqueries-5/#prefers-reduced-data) media feature. + @"prefers-reduced-data", + /// The [device-width](https://w3c.github.io/csswg-drafts/mediaqueries-5/#device-width) media feature. + @"device-width", + /// The [device-height](https://w3c.github.io/csswg-drafts/mediaqueries-5/#device-height) media feature. + @"device-height", + /// The [device-aspect-ratio](https://w3c.github.io/csswg-drafts/mediaqueries-5/#device-aspect-ratio) media feature. + @"device-aspect-ratio", + + /// The non-standard -webkit-device-pixel-ratio media feature. + @"-webkit-device-pixel-ratio", + /// The non-standard -moz-device-pixel-ratio media feature. + @"-moz-device-pixel-ratio", + + pub usingnamespace css.DeriveValueType(@This()); + + pub const ValueTypeMap = .{ + .width = MediaFeatureType.length, + .height = MediaFeatureType.length, + .@"aspect-ratio" = MediaFeatureType.ratio, + .orientation = MediaFeatureType.ident, + .@"overflow-block" = MediaFeatureType.ident, + .@"overflow-inline" = MediaFeatureType.ident, + .@"horizontal-viewport-segments" = MediaFeatureType.integer, + .@"vertical-viewport-segments" = MediaFeatureType.integer, + .@"display-mode" = MediaFeatureType.ident, + .resolution = MediaFeatureType.resolution, + .scan = MediaFeatureType.ident, + .grid = MediaFeatureType.boolean, + .update = MediaFeatureType.ident, + .@"environment-blending" = MediaFeatureType.ident, + .color = MediaFeatureType.integer, + .@"color-index" = MediaFeatureType.integer, + .monochrome = MediaFeatureType.integer, + .@"color-gamut" = MediaFeatureType.ident, + .@"dynamic-range" = MediaFeatureType.ident, + .@"inverted-colors" = MediaFeatureType.ident, + .pointer = MediaFeatureType.ident, + .hover = MediaFeatureType.ident, + .@"any-pointer" = MediaFeatureType.ident, + .@"any-hover" = MediaFeatureType.ident, + .@"nav-controls" = MediaFeatureType.ident, + .@"video-color-gamut" = MediaFeatureType.ident, + .@"video-dynamic-range" = MediaFeatureType.ident, + .scripting = MediaFeatureType.ident, + .@"prefers-reduced-motion" = MediaFeatureType.ident, + .@"prefers-reduced-transparency" = MediaFeatureType.ident, + .@"prefers-contrast" = MediaFeatureType.ident, + .@"forced-colors" = MediaFeatureType.ident, + .@"prefers-color-scheme" = MediaFeatureType.ident, + .@"prefers-reduced-data" = MediaFeatureType.ident, + .@"device-width" = MediaFeatureType.length, + .@"device-height" = MediaFeatureType.length, + .@"device-aspect-ratio" = MediaFeatureType.ratio, + .@"-webkit-device-pixel-ratio" = MediaFeatureType.number, + .@"-moz-device-pixel-ratio" = MediaFeatureType.number, + }; + + pub fn toCssWithPrefix( + this: *const MediaFeatureId, + prefix: []const u8, + comptime W: type, + dest: *Printer(W), + ) PrintErr!void { + switch (this.*) { + .@"-webkit-device-pixel-ratio" => { + return dest.writeFmt("-webkit-{s}device-pixel-ratio", .{prefix}); + }, + else => { + try dest.writeStr(prefix); + return this.toCss(W, dest); + }, + } + } + + pub inline fn asStr(this: *const @This()) []const u8 { + return css.enum_property_util.asStr(@This(), this); + } + + pub fn parse(input: *css.Parser) Result(@This()) { + return css.enum_property_util.parse(@This(), input); + } + + pub fn toCss(this: *const @This(), comptime W: type, dest: *Printer(W)) PrintErr!void { + return css.enum_property_util.toCss(@This(), this, W, dest); + } +}; + +pub fn QueryFeature(comptime FeatureId: type) type { + return union(enum) { + /// A plain media feature, e.g. `(min-width: 240px)`. + plain: struct { + /// The name of the feature. + name: MediaFeatureName(FeatureId), + /// The feature value. + value: MediaFeatureValue, + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + }, + + /// A boolean feature, e.g. `(hover)`. + boolean: struct { + /// The name of the feature. + name: MediaFeatureName(FeatureId), + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + }, + + /// A range, e.g. `(width > 240px)`. + range: struct { + /// The name of the feature. + name: MediaFeatureName(FeatureId), + /// A comparator. + operator: MediaFeatureComparison, + /// The feature value. + value: MediaFeatureValue, + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + }, + + /// An interval, e.g. `(120px < width < 240px)`. + interval: struct { + /// The name of the feature. + name: MediaFeatureName(FeatureId), + /// A start value. + start: MediaFeatureValue, + /// A comparator for the start value. + start_operator: MediaFeatureComparison, + /// The end value. + end: MediaFeatureValue, + /// A comparator for the end value. + end_operator: MediaFeatureComparison, + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + }, + + const This = @This(); + + pub fn deepClone(this: *const This, allocator: std.mem.Allocator) This { + return switch (this.*) { + .plain => .{ + .plain = .{ + .name = this.plain.name, + .value = this.plain.value.deepClone(allocator), + }, + }, + .boolean => .{ + .boolean = .{ + .name = this.boolean.name, + }, + }, + .range => .{ + .range = .{ + .name = this.range.name, + .operator = this.range.operator, + .value = this.range.value.deepClone(allocator), + }, + }, + .interval => .{ + .interval = .{ + .name = this.interval.name, + .start = this.interval.start.deepClone(allocator), + .start_operator = this.interval.start_operator, + .end = this.interval.end.deepClone(allocator), + .end_operator = this.interval.end_operator, + }, + }, + }; + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub fn needsParens(this: *const This, parent_operator: ?Operator, targets: *const css.Targets) bool { + return parent_operator != .@"and" and + this.* == .interval and + targets.shouldCompileSame(.media_interval_syntax); + } + + pub fn toCss(this: *const This, comptime W: type, dest: *Printer(W)) PrintErr!void { + try dest.writeChar('('); + + switch (this.*) { + .boolean => { + try this.boolean.name.toCss(W, dest); + }, + .plain => { + try this.plain.name.toCss(W, dest); + try dest.delim(':', false); + try this.plain.value.toCss(W, dest); + }, + .range => { + // If range syntax is unsupported, use min/max prefix if possible. + if (dest.targets.shouldCompileSame(.media_range_syntax)) { + return writeMinMax( + &this.range.operator, + FeatureId, + &this.range.name, + &this.range.value, + W, + dest, + ); + } + try this.range.name.toCss(W, dest); + try this.range.operator.toCss(W, dest); + try this.range.value.toCss(W, dest); + }, + .interval => |interval| { + if (dest.targets.shouldCompileSame(.media_interval_syntax)) { + try writeMinMax( + &interval.start_operator.opposite(), + FeatureId, + &interval.name, + &interval.start, + W, + dest, + ); + try dest.writeStr(" and ("); + return writeMinMax( + &interval.end_operator, + FeatureId, + &interval.name, + &interval.end, + W, + dest, + ); + } + + try interval.start.toCss(W, dest); + try interval.start_operator.toCss(W, dest); + try interval.name.toCss(W, dest); + try interval.end_operator.toCss(W, dest); + try interval.end.toCss(W, dest); + }, + } + + return dest.writeChar(')'); + } + + pub fn parse(input: *css.Parser) Result(This) { + switch (input.tryParse(parseNameFirst, .{})) { + .result => |res| { + return .{ .result = res }; + }, + .err => |e| { + if (e.kind == .custom and e.kind.custom == .invalid_media_query) { + return .{ .err = e }; + } + return parseValueFirst(input); + }, + } + return Result(This).success; + } + + pub fn parseNameFirst(input: *css.Parser) Result(This) { + const name, const legacy_op = switch (MediaFeatureName(FeatureId).parse(input)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + + const operator = if (input.tryParse(consumeOperationOrColon, .{true}).asValue()) |operator| operator else return .{ + .result = .{ + .boolean = .{ .name = name }, + }, + }; + + if (operator != null and legacy_op != null) { + return .{ .err = input.newCustomError(css.ParserError.invalid_media_query) }; + } + + const value = switch (MediaFeatureValue.parse(input, name.valueType())) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + if (!value.checkType(name.valueType())) { + return .{ .err = input.newCustomError(css.ParserError.invalid_media_query) }; + } + + if (operator orelse legacy_op) |op| { + if (!name.valueType().allowsRanges()) { + return .{ .err = input.newCustomError(css.ParserError.invalid_media_query) }; + } + + return .{ .result = .{ + .range = .{ + .name = name, + .operator = op, + .value = value, + }, + } }; + } else { + return .{ .result = .{ + .plain = .{ + .name = name, + .value = value, + }, + } }; + } + } + + pub fn parseValueFirst(input: *css.Parser) Result(This) { + // We need to find the feature name first so we know the type. + const start = input.state(); + const name = name: { + while (true) { + if (MediaFeatureName(FeatureId).parse(input).asValue()) |result| { + const name: MediaFeatureName(FeatureId) = result[0]; + const legacy_op: ?MediaFeatureComparison = result[1]; + if (legacy_op != null) { + return .{ .err = input.newCustomError(css.ParserError.invalid_media_query) }; + } + break :name name; + } + if (input.isExhausted()) { + return .{ .err = input.newCustomError(css.ParserError.invalid_media_query) }; + } + } + }; + + input.reset(&start); + + // Now we can parse the first value. + const value = switch (MediaFeatureValue.parse(input, name.valueType())) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + const operator = switch (consumeOperationOrColon(input, false)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + + // Skip over the feature name again. + { + const feature_name, const blah = switch (MediaFeatureName(FeatureId).parse(input)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + _ = blah; + bun.debugAssert(feature_name.eql(&name)); + } + + if (!name.valueType().allowsRanges() or !value.checkType(name.valueType())) { + return .{ .err = input.newCustomError(css.ParserError.invalid_media_query) }; + } + + if (input.tryParse(consumeOperationOrColon, .{false}).asValue()) |end_operator_| { + const start_operator = operator.?; + const end_operator = end_operator_.?; + // Start and end operators must be matching. + const GT: u8 = comptime @intFromEnum(MediaFeatureComparison.@"greater-than"); + const GTE: u8 = comptime @intFromEnum(MediaFeatureComparison.@"greater-than-equal"); + const LT: u8 = comptime @intFromEnum(MediaFeatureComparison.@"less-than"); + const LTE: u8 = comptime @intFromEnum(MediaFeatureComparison.@"less-than-equal"); + const check_val: u8 = @intFromEnum(start_operator) | @intFromEnum(end_operator); + switch (check_val) { + GT | GT, + GT | GTE, + GTE | GTE, + LT | LT, + LT | LTE, + LTE | LTE, + => {}, + else => return .{ .err = input.newCustomError(css.ParserError.invalid_media_query) }, + } + + const end_value = switch (MediaFeatureValue.parse(input, name.valueType())) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + if (!end_value.checkType(name.valueType())) { + return .{ .err = input.newCustomError(css.ParserError.invalid_media_query) }; + } + + return .{ .result = .{ + .interval = .{ + .name = name, + .start = value, + .start_operator = start_operator, + .end = end_value, + .end_operator = end_operator, + }, + } }; + } else { + const final_operator = operator.?.opposite(); + return .{ .result = .{ + .range = .{ + .name = name, + .operator = final_operator, + .value = value, + }, + } }; + } + } + }; +} + +/// Consumes an operation or a colon, or returns an error. +fn consumeOperationOrColon(input: *css.Parser, allow_colon: bool) Result(?MediaFeatureComparison) { + const location = input.currentSourceLocation(); + const first_delim = first_delim: { + const loc = input.currentSourceLocation(); + const next_token = switch (input.next()) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + switch (next_token.*) { + .colon => if (allow_colon) return .{ .result = null }, + .delim => |oper| break :first_delim oper, + else => {}, + } + return .{ .err = loc.newUnexpectedTokenError(next_token.*) }; + }; + + switch (first_delim) { + '=' => return .{ .result = .equal }, + '>' => { + if (input.tryParse(css.Parser.expectDelim, .{'='}).isOk()) { + return .{ .result = .@"greater-than-equal" }; + } + return .{ .result = .@"greater-than" }; + }, + '<' => { + if (input.tryParse(css.Parser.expectDelim, .{'='}).isOk()) { + return .{ .result = .@"less-than-equal" }; + } + return .{ .result = .@"less-than" }; + }, + else => return .{ .err = location.newUnexpectedTokenError(.{ .delim = first_delim }) }, + } +} + +pub const MediaFeatureComparison = enum(u8) { + /// `=` + equal = 1, + /// `>` + @"greater-than" = 2, + /// `>=` + @"greater-than-equal" = 4, + /// `<` + @"less-than" = 8, + /// `<=` + @"less-than-equal" = 16, + + pub fn asStr(this: *const @This()) []const u8 { + return css.enum_property_util.asStr(@This(), this); + } + + pub fn toCss(this: *const @This(), comptime W: type, dest: *Printer(W)) PrintErr!void { + switch (this.*) { + .equal => { + try dest.delim('-', true); + }, + .@"greater-than" => { + try dest.delim('>', true); + }, + .@"greater-than-equal" => { + try dest.whitespace(); + try dest.writeStr(">="); + try dest.whitespace(); + }, + .@"less-than" => { + try dest.delim('<', true); + }, + .@"less-than-equal" => { + try dest.whitespace(); + try dest.writeStr("<="); + try dest.whitespace(); + }, + } + } + + pub fn opposite(self: @This()) @This() { + return switch (self) { + .@"greater-than" => .@"less-than", + .@"greater-than-equal" => .@"less-than-equal", + .@"less-than" => .@"greater-than", + .@"less-than-equal" => .@"greater-than-equal", + .equal => .equal, + }; + } +}; + +/// [media feature value](https://drafts.csswg.org/mediaqueries/#typedef-mf-value) within a media query. +/// +/// See [MediaFeature](MediaFeature). +pub const MediaFeatureValue = union(enum) { + /// A length value. + length: Length, + /// A number value. + number: CSSNumber, + /// An integer value. + integer: CSSInteger, + /// A boolean value. + boolean: bool, + /// A resolution. + resolution: Resolution, + /// A ratio. + ratio: Ratio, + /// An identifier. + ident: Ident, + /// An environment variable reference. + env: EnvironmentVariable, + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub fn deepClone(this: *const MediaFeatureValue, allocator: std.mem.Allocator) MediaFeatureValue { + return switch (this.*) { + .length => |*l| .{ .length = l.deepClone(allocator) }, + .number => |n| .{ .number = n }, + .integer => |i| .{ .integer = i }, + .boolean => |b| .{ .boolean = b }, + .resolution => |r| .{ .resolution = r }, + .ratio => |r| .{ .ratio = r }, + .ident => |i| .{ .ident = i }, + .env => |*e| .{ .env = e.deepClone(allocator) }, + }; + } + + pub fn deinit(this: *MediaFeatureValue, allocator: std.mem.Allocator) void { + return switch (this.*) { + .length => |l| l.deinit(allocator), + .number => {}, + .integer => {}, + .boolean => {}, + .resolution => {}, + .ratio => {}, + .ident => {}, + .env => |*env| env.deinit(allocator), + }; + } + + pub fn toCss( + this: *const MediaFeatureValue, + comptime W: type, + dest: *Printer(W), + ) PrintErr!void { + switch (this.*) { + .length => |len| return len.toCss(W, dest), + .number => |num| return CSSNumberFns.toCss(&num, W, dest), + .integer => |int| return CSSIntegerFns.toCss(&int, W, dest), + .boolean => |b| { + if (b) { + return dest.writeChar('1'); + } else { + return dest.writeChar('0'); + } + }, + .resolution => |res| return res.toCss(W, dest), + .ratio => |ratio| return ratio.toCss(W, dest), + .ident => |id| return IdentFns.toCss(&id, W, dest), + .env => |*env| return EnvironmentVariable.toCss(env, W, dest, false), + } + } + + pub fn checkType(this: *const @This(), expected_type: MediaFeatureType) bool { + const vt = this.valueType(); + if (expected_type == .unknown or vt == .unknown) return true; + return expected_type == vt; + } + + /// Parses a single media query feature value, with an expected type. + /// If the type is unknown, pass MediaFeatureType::Unknown instead. + pub fn parse(input: *css.Parser, expected_type: MediaFeatureType) Result(MediaFeatureValue) { + if (input.tryParse(parseKnown, .{expected_type}).asValue()) |value| { + return .{ .result = value }; + } + + return parseUnknown(input); + } + + pub fn parseKnown(input: *css.Parser, expected_type: MediaFeatureType) Result(MediaFeatureValue) { + return .{ + .result = switch (expected_type) { + .boolean => { + const value = switch (CSSIntegerFns.parse(input)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + if (value != 0 and value != 1) return .{ .err = input.newCustomError(css.ParserError.invalid_value) }; + return .{ .result = .{ .boolean = value == 1 } }; + }, + .number => .{ .number = switch (CSSNumberFns.parse(input)) { + .result => |v| v, + .err => |e| return .{ .err = e }, + } }, + .integer => .{ .integer = switch (CSSIntegerFns.parse(input)) { + .result => |v| v, + .err => |e| return .{ .err = e }, + } }, + .length => .{ .length = switch (Length.parse(input)) { + .result => |v| v, + .err => |e| return .{ .err = e }, + } }, + .resolution => .{ .resolution = switch (Resolution.parse(input)) { + .result => |v| v, + .err => |e| return .{ .err = e }, + } }, + .ratio => .{ .ratio = switch (Ratio.parse(input)) { + .result => |v| v, + .err => |e| return .{ .err = e }, + } }, + .ident => .{ .ident = switch (IdentFns.parse(input)) { + .result => |v| v, + .err => |e| return .{ .err = e }, + } }, + .unknown => return .{ .err = input.newCustomError(.invalid_value) }, + }, + }; + } + + pub fn parseUnknown(input: *css.Parser) Result(MediaFeatureValue) { + // Ratios are ambiguous with numbers because the second param is optional (e.g. 2/1 == 2). + // We require the / delimiter when parsing ratios so that 2/1 ends up as a ratio and 2 is + // parsed as a number. + if (input.tryParse(Ratio.parseRequired, .{}).asValue()) |ratio| return .{ .result = .{ .ratio = ratio } }; + + // Parse number next so that unitless values are not parsed as lengths. + if (input.tryParse(CSSNumberFns.parse, .{}).asValue()) |num| return .{ .result = .{ .number = num } }; + + if (input.tryParse(Length.parse, .{}).asValue()) |res| return .{ .result = .{ .length = res } }; + + if (input.tryParse(Resolution.parse, .{}).asValue()) |res| return .{ .result = .{ .resolution = res } }; + + if (input.tryParse(EnvironmentVariable.parse, .{}).asValue()) |env| return .{ .result = .{ .env = env } }; + + const ident = switch (IdentFns.parse(input)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + return .{ .result = .{ .ident = ident } }; + } + + pub fn addF32(this: MediaFeatureValue, allocator: Allocator, other: f32) MediaFeatureValue { + return switch (this) { + .length => |len| .{ .length = len.add(allocator, Length.px(other)) }, + // .length => |len| .{ + // .length = .{ + // .value = .{ .px = other }, + // }, + // }, + .number => |num| .{ .number = num + other }, + .integer => |num| .{ .integer = num + if (css.signfns.isSignPositive(other)) @as(i32, 1) else @as(i32, -1) }, + .boolean => |v| .{ .boolean = v }, + .resolution => |res| .{ .resolution = res.addF32(allocator, other) }, + .ratio => |ratio| .{ .ratio = ratio.addF32(allocator, other) }, + .ident => |id| .{ .ident = id }, + .env => |env| .{ .env = env }, // TODO: calc support + }; + } + + pub fn valueType(this: *const MediaFeatureValue) MediaFeatureType { + return switch (this.*) { + .length => .length, + .number => .number, + .integer => .integer, + .boolean => .boolean, + .resolution => .resolution, + .ratio => .ratio, + .ident => .ident, + .env => .unknown, + }; + } +}; + +/// The type of a media feature. +pub const MediaFeatureType = enum { + /// A length value. + length, + /// A number value. + number, + /// An integer value. + integer, + /// A boolean value, either 0 or 1. + boolean, + /// A resolution. + resolution, + /// A ratio. + ratio, + /// An identifier. + ident, + /// An unknown type. + unknown, + + pub fn allowsRanges(this: MediaFeatureType) bool { + return switch (this) { + .length, .number, .integer, .resolution, .ratio, .unknown => true, + .boolean, .ident => false, + }; + } +}; + +pub fn MediaFeatureName(comptime FeatureId: type) type { + return union(enum) { + /// A standard media query feature identifier. + standard: FeatureId, + + /// A custom author-defined environment variable. + custom: DashedIdent, + + /// An unknown environment variable. + unknown: Ident, + + const This = @This(); + + pub fn eql(lhs: *const This, rhs: *const This) bool { + if (@intFromEnum(lhs.*) != @intFromEnum(rhs.*)) return false; + return switch (lhs.*) { + .standard => |fid| fid == rhs.standard, + .custom => |ident| bun.strings.eql(ident.v, rhs.custom.v), + .unknown => |ident| bun.strings.eql(ident.v, rhs.unknown.v), + }; + } + + pub fn valueType(this: *const This) MediaFeatureType { + return switch (this.*) { + .standard => |standard| standard.valueType(), + else => .unknown, + }; + } + + pub fn toCss(this: *const This, comptime W: type, dest: *Printer(W)) PrintErr!void { + return switch (this.*) { + .standard => |v| v.toCss(W, dest), + .custom => |d| DashedIdentFns.toCss(&d, W, dest), + .unknown => |v| IdentFns.toCss(&v, W, dest), + }; + } + + pub fn toCssWithPrefix(this: *const This, prefix: []const u8, comptime W: type, dest: *Printer(W)) PrintErr!void { + return switch (this.*) { + .standard => |v| v.toCssWithPrefix(prefix, W, dest), + .custom => |d| { + try dest.writeStr(prefix); + return DashedIdentFns.toCss(&d, W, dest); + }, + .unknown => |v| { + try dest.writeStr(prefix); + return IdentFns.toCss(&v, W, dest); + }, + }; + } + + /// Parses a media feature name. + pub fn parse(input: *css.Parser) Result(struct { This, ?MediaFeatureComparison }) { + const ident = switch (input.expectIdent()) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + + if (bun.strings.startsWith(ident, "--")) { + return .{ .result = .{ + .{ + .custom = .{ .v = ident }, + }, + null, + } }; + } + + var name = ident; + + // Webkit places its prefixes before "min" and "max". Remove it first, and + // re-add after removing min/max. + const is_webkit = bun.strings.startsWithCaseInsensitiveAscii(name, "-webkit-"); + if (is_webkit) { + name = name[8..]; + } + + const comparator: ?MediaFeatureComparison = comparator: { + if (bun.strings.startsWithCaseInsensitiveAscii(name, "min-")) { + name = name[4..]; + break :comparator .@"greater-than-equal"; + } else if (bun.strings.startsWithCaseInsensitiveAscii(name, "max-")) { + name = name[4..]; + break :comparator .@"less-than-equal"; + } else break :comparator null; + }; + + var free_str = false; + const final_name = if (is_webkit) name: { + // PERF: stack buffer here? + free_str = true; + break :name std.fmt.allocPrint(input.allocator(), "-webkit-{s}", .{name}) catch bun.outOfMemory(); + } else name; + + defer if (is_webkit) { + // If we made an allocation let's try to free it, + // this only works if FeatureId doesn't hold any references to the input string. + // i.e. it is an enum + comptime { + std.debug.assert(@typeInfo(FeatureId) == .Enum); + } + input.allocator().free(final_name); + }; + + if (css.parse_utility.parseString( + input.allocator(), + FeatureId, + final_name, + FeatureId.parse, + ).asValue()) |standard| { + return .{ .result = .{ + .{ .standard = standard }, + comparator, + } }; + } + + return .{ .result = .{ + .{ + .unknown = .{ .v = ident }, + }, + null, + } }; + } + }; +} + +fn writeMinMax( + operator: *const MediaFeatureComparison, + comptime FeatureId: type, + name: *const MediaFeatureName(FeatureId), + value: *const MediaFeatureValue, + comptime W: type, + dest: *Printer(W), +) PrintErr!void { + const prefix = switch (operator.*) { + .@"greater-than", .@"greater-than-equal" => "min-", + .@"less-than", .@"less-than-equal" => "max-", + .equal => null, + }; + + if (prefix) |p| { + try name.toCssWithPrefix(p, W, dest); + } else { + try name.toCss(W, dest); + } + + try dest.delim(':', false); + + var adjusted: ?MediaFeatureValue = switch (operator.*) { + .@"greater-than" => value.deepClone(dest.allocator).addF32(dest.allocator, 0.001), + .@"less-than" => value.deepClone(dest.allocator).addF32(dest.allocator, -0.001), + else => null, + }; + + if (adjusted) |*val| { + defer val.deinit(dest.allocator); + try val.toCss(W, dest); + } else { + try value.toCss(W, dest); + } + + return dest.writeChar(')'); +} diff --git a/src/css/prefixes.zig b/src/css/prefixes.zig new file mode 100644 index 0000000000..2a9b29edaf --- /dev/null +++ b/src/css/prefixes.zig @@ -0,0 +1,2201 @@ +// This file is autogenerated by build-prefixes.js. DO NOT EDIT! + +const css = @import("./css_parser.zig"); +const VendorPrefix = css.VendorPrefix; +const Browsers = css.targets.Browsers; + +pub const Feature = enum { + align_content, + align_items, + align_self, + animation, + animation_delay, + animation_direction, + animation_duration, + animation_fill_mode, + animation_iteration_count, + animation_name, + animation_play_state, + animation_timing_function, + any_pseudo, + appearance, + at_keyframes, + at_resolution, + at_viewport, + backdrop_filter, + backface_visibility, + background_clip, + background_origin, + background_size, + border_block_end, + border_block_start, + border_bottom_left_radius, + border_bottom_right_radius, + border_image, + border_inline_end, + border_inline_start, + border_radius, + border_top_left_radius, + border_top_right_radius, + box_decoration_break, + box_shadow, + box_sizing, + break_after, + break_before, + break_inside, + calc, + clip_path, + color_adjust, + column_count, + column_fill, + column_gap, + column_rule, + column_rule_color, + column_rule_style, + column_rule_width, + column_span, + column_width, + columns, + cross_fade, + display_flex, + display_grid, + element, + fill, + fill_available, + filter, + filter_function, + fit_content, + flex, + flex_basis, + flex_direction, + flex_flow, + flex_grow, + flex_shrink, + flex_wrap, + flow_from, + flow_into, + font_feature_settings, + font_kerning, + font_language_override, + font_variant_ligatures, + grab, + grabbing, + grid_area, + grid_column, + grid_column_align, + grid_column_end, + grid_column_start, + grid_row, + grid_row_align, + grid_row_end, + grid_row_start, + grid_template, + grid_template_areas, + grid_template_columns, + grid_template_rows, + hyphens, + image_rendering, + image_set, + inline_flex, + inline_grid, + isolate, + isolate_override, + justify_content, + linear_gradient, + margin_block_end, + margin_block_start, + margin_inline_end, + margin_inline_start, + mask, + mask_border, + mask_border_outset, + mask_border_repeat, + mask_border_slice, + mask_border_source, + mask_border_width, + mask_clip, + mask_composite, + mask_image, + mask_origin, + mask_position, + mask_repeat, + mask_size, + max_content, + min_content, + object_fit, + object_position, + order, + overscroll_behavior, + padding_block_end, + padding_block_start, + padding_inline_end, + padding_inline_start, + perspective, + perspective_origin, + pixelated, + place_self, + plaintext, + print_color_adjust, + pseudo_class_any_link, + pseudo_class_autofill, + pseudo_class_fullscreen, + pseudo_class_placeholder_shown, + pseudo_class_read_only, + pseudo_class_read_write, + pseudo_element_backdrop, + pseudo_element_file_selector_button, + pseudo_element_placeholder, + pseudo_element_selection, + radial_gradient, + region_fragment, + repeating_linear_gradient, + repeating_radial_gradient, + scroll_snap_coordinate, + scroll_snap_destination, + scroll_snap_points_x, + scroll_snap_points_y, + scroll_snap_type, + shape_image_threshold, + shape_margin, + shape_outside, + sticky, + stretch, + tab_size, + text_align_last, + text_decoration, + text_decoration_color, + text_decoration_line, + text_decoration_skip, + text_decoration_skip_ink, + text_decoration_style, + text_emphasis, + text_emphasis_color, + text_emphasis_position, + text_emphasis_style, + text_orientation, + text_overflow, + text_size_adjust, + text_spacing, + touch_action, + transform, + transform_origin, + transform_style, + transition, + transition_delay, + transition_duration, + transition_property, + transition_timing_function, + user_select, + writing_mode, + zoom_in, + zoom_out, + + pub fn prefixesFor(this: *const Feature, browsers: Browsers) VendorPrefix { + var prefixes = VendorPrefix{ .none = true }; + switch (this.*) { + .border_radius, .border_top_left_radius, .border_top_right_radius, .border_bottom_right_radius, .border_bottom_left_radius => { + if (browsers.android) |version| { + if (version <= 131328) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.chrome) |version| { + if (version <= 262144) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.firefox) |version| { + if (version >= 131072 and version <= 198144) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .moz = true }); + } + } + if (browsers.ios_saf) |version| { + if (version <= 197120) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.safari) |version| { + if (version >= 196864 and version <= 262144) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + }, + .box_shadow => { + if (browsers.android) |version| { + if (version >= 131328 and version <= 196608) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.chrome) |version| { + if (version >= 262144 and version <= 589824) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.firefox) |version| { + if (version >= 197888 and version <= 198144) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .moz = true }); + } + } + if (browsers.ios_saf) |version| { + if (version >= 197120 and version <= 262656) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.safari) |version| { + if (version >= 196864 and version <= 327680) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + }, + .animation, .animation_name, .animation_duration, .animation_delay, .animation_direction, .animation_fill_mode, .animation_iteration_count, .animation_play_state, .animation_timing_function, .at_keyframes => { + if (browsers.android) |version| { + if (version >= 131328 and version <= 263171) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.chrome) |version| { + if (version >= 262144 and version <= 2752512) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.firefox) |version| { + if (version >= 327680 and version <= 983040) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .moz = true }); + } + } + if (browsers.ios_saf) |version| { + if (version >= 197120 and version <= 524544) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.opera) |version| { + if (version == 786432) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .o = true }); + } + if (version >= 983040 and version <= 1900544) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.safari) |version| { + if (version >= 262144 and version <= 524288) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + }, + .transition, .transition_property, .transition_duration, .transition_delay, .transition_timing_function => { + if (browsers.android) |version| { + if (version >= 131328 and version <= 262656) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.chrome) |version| { + if (version >= 262144 and version <= 1638400) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.firefox) |version| { + if (version >= 262144 and version <= 983040) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .moz = true }); + } + } + if (browsers.ios_saf) |version| { + if (version >= 197120 and version <= 393216) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.opera) |version| { + if (version >= 655360 and version <= 786432) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .o = true }); + } + } + if (browsers.safari) |version| { + if (version >= 196864 and version <= 393216) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + }, + .transform, .transform_origin => { + if (browsers.android) |version| { + if (version >= 131328 and version <= 263171) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.chrome) |version| { + if (version >= 262144 and version <= 2293760) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.firefox) |version| { + if (version >= 197888 and version <= 983040) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .moz = true }); + } + } + if (browsers.ie) |version| { + if (version <= 589824) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .ms = true }); + } + } + if (browsers.ios_saf) |version| { + if (version >= 197120 and version <= 524544) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.opera) |version| { + if (version >= 656640 and version <= 786432) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .o = true }); + } + if (version >= 983040 and version <= 1441792) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.safari) |version| { + if (version >= 196864 and version <= 524288) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + }, + .perspective, .perspective_origin, .transform_style => { + if (browsers.android) |version| { + if (version >= 196608 and version <= 263171) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.chrome) |version| { + if (version >= 786432 and version <= 2293760) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.firefox) |version| { + if (version >= 655360 and version <= 983040) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .moz = true }); + } + } + if (browsers.ios_saf) |version| { + if (version >= 197120 and version <= 524544) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.opera) |version| { + if (version >= 983040 and version <= 1441792) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.safari) |version| { + if (version >= 262144 and version <= 524288) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + }, + .backface_visibility => { + if (browsers.android) |version| { + if (version >= 196608 and version <= 263171) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.chrome) |version| { + if (version >= 786432 and version <= 2293760) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.firefox) |version| { + if (version >= 655360 and version <= 983040) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .moz = true }); + } + } + if (browsers.ios_saf) |version| { + if (version >= 197120 and version <= 983552) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.opera) |version| { + if (version >= 983040 and version <= 1441792) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.safari) |version| { + if (version >= 262144 and version <= 983552) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + }, + .linear_gradient, .repeating_linear_gradient, .radial_gradient, .repeating_radial_gradient => { + if (browsers.android) |version| { + if (version >= 131328 and version <= 262656) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.chrome) |version| { + if (version >= 262144 and version <= 1638400) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.firefox) |version| { + if (version >= 198144 and version <= 983040) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .moz = true }); + } + } + if (browsers.ios_saf) |version| { + if (version >= 197120 and version <= 393216) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.opera) |version| { + if (version >= 721152 and version <= 786432) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .o = true }); + } + } + if (browsers.safari) |version| { + if (version >= 262144 and version <= 393216) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + }, + .box_sizing => { + if (browsers.android) |version| { + if (version >= 131328 and version <= 196608) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.chrome) |version| { + if (version >= 262144 and version <= 589824) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.firefox) |version| { + if (version >= 131072 and version <= 1835008) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .moz = true }); + } + } + if (browsers.ios_saf) |version| { + if (version >= 197120 and version <= 262656) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.safari) |version| { + if (version >= 196864 and version <= 327680) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + }, + .filter => { + if (browsers.android) |version| { + if (version >= 263168 and version <= 263171) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.chrome) |version| { + if (version >= 1179648 and version <= 3407872) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.ios_saf) |version| { + if (version >= 393216 and version <= 589824) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.opera) |version| { + if (version >= 983040 and version <= 2555904) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.safari) |version| { + if (version >= 393216 and version <= 589824) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.samsung) |version| { + if (version >= 262144 and version <= 393728) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + }, + .filter_function => { + if (browsers.ios_saf) |version| { + if (version >= 589824 and version <= 590592) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.safari) |version| { + if (version <= 589824) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + }, + .backdrop_filter => { + if (browsers.edge) |version| { + if (version >= 1114112 and version <= 1179648) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.ios_saf) |version| { + if (version >= 589824 and version <= 1115648) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.safari) |version| { + if (version >= 589824 and version <= 1115648) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + }, + .element => { + if (browsers.firefox) |version| { + if (version >= 131072) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .moz = true }); + } + } + }, + .columns, .column_width, .column_gap, .column_rule, .column_rule_color, .column_rule_width, .column_count, .column_rule_style, .column_span, .column_fill => { + if (browsers.android) |version| { + if (version >= 131328 and version <= 263171) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.chrome) |version| { + if (version >= 262144 and version <= 3211264) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.firefox) |version| { + if (version >= 131072 and version <= 3342336) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .moz = true }); + } + } + if (browsers.ios_saf) |version| { + if (version >= 197120 and version <= 524544) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.opera) |version| { + if (version >= 983040 and version <= 2359296) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.safari) |version| { + if (version >= 196864 and version <= 524288) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.samsung) |version| { + if (version <= 262144) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + }, + .break_before, .break_after, .break_inside => { + if (browsers.android) |version| { + if (version >= 131328 and version <= 263171) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.chrome) |version| { + if (version >= 262144 and version <= 3211264) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.ios_saf) |version| { + if (version >= 197120 and version <= 524544) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.opera) |version| { + if (version >= 983040 and version <= 2359296) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.safari) |version| { + if (version >= 196864 and version <= 524288) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.samsung) |version| { + if (version <= 262144) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + }, + .user_select => { + if (browsers.android) |version| { + if (version >= 131328 and version <= 263171) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.chrome) |version| { + if (version >= 262144 and version <= 3473408) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.edge) |version| { + if (version >= 786432 and version <= 1179648) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .ms = true }); + } + } + if (browsers.firefox) |version| { + if (version >= 131072 and version <= 4456448) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .moz = true }); + } + } + if (browsers.ie) |version| { + if (version >= 655360) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .ms = true }); + } + } + if (browsers.ios_saf) |version| { + if (version >= 197120) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.opera) |version| { + if (version >= 983040 and version <= 2621440) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.safari) |version| { + if (version >= 196864) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.samsung) |version| { + if (version >= 262144 and version <= 327680) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + }, + .display_flex, .inline_flex, .flex, .flex_grow, .flex_shrink, .flex_basis, .flex_direction, .flex_wrap, .flex_flow, .justify_content, .order, .align_items, .align_self, .align_content => { + if (browsers.android) |version| { + if (version >= 131328 and version <= 262656) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.chrome) |version| { + if (version >= 262144 and version <= 1835008) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.firefox) |version| { + if (version >= 131072 and version <= 1376256) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .moz = true }); + } + } + if (browsers.ie) |version| { + if (version == 655360) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .ms = true }); + } + } + if (browsers.ios_saf) |version| { + if (version >= 197120 and version <= 524544) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.opera) |version| { + if (version >= 983040 and version <= 1048576) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.safari) |version| { + if (version >= 196864 and version <= 524288) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + }, + .calc => { + if (browsers.chrome) |version| { + if (version >= 1245184 and version <= 1638400) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.firefox) |version| { + if (version >= 262144 and version <= 983040) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .moz = true }); + } + } + if (browsers.ios_saf) |version| { + if (version <= 393216) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.safari) |version| { + if (version <= 393216) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + }, + .background_origin, .background_size => { + if (browsers.android) |version| { + if (version >= 131328 and version <= 131840) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.firefox) |version| { + if (version <= 198144) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .moz = true }); + } + } + if (browsers.opera) |version| { + if (version <= 655360) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .o = true }); + } + } + }, + .background_clip => { + if (browsers.android) |version| { + if (version >= 262144 and version <= 263171) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.chrome) |version| { + if (version >= 262144 and version <= 7798784) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.edge) |version| { + if (version >= 786432 and version <= 917504) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .ms = true }); + } + if (version >= 5177344 and version <= 7798784) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.ios_saf) |version| { + if (version >= 393216 and version <= 852992) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.opera) |version| { + if (version >= 983040 and version <= 6881280) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.safari) |version| { + if (version >= 262144 and version <= 852224) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.samsung) |version| { + if (version >= 262144 and version <= 1572864) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + }, + .font_feature_settings, .font_variant_ligatures, .font_language_override => { + if (browsers.android) |version| { + if (version >= 263168 and version <= 263171) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.chrome) |version| { + if (version >= 1048576 and version <= 3080192) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.firefox) |version| { + if (version >= 262144 and version <= 2162688) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .moz = true }); + } + } + if (browsers.opera) |version| { + if (version >= 983040 and version <= 2228224) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.samsung) |version| { + if (version <= 262144) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + }, + .font_kerning => { + if (browsers.android) |version| { + if (version <= 263168) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.chrome) |version| { + if (version >= 1900544 and version <= 2097152) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.ios_saf) |version| { + if (version >= 524288 and version <= 721664) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.opera) |version| { + if (version >= 1048576 and version <= 1245184) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.safari) |version| { + if (version >= 458752 and version <= 589824) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + }, + .border_image => { + if (browsers.android) |version| { + if (version >= 131328 and version <= 262656) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.chrome) |version| { + if (version >= 262144 and version <= 917504) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.firefox) |version| { + if (version >= 197888 and version <= 917504) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .moz = true }); + } + } + if (browsers.ios_saf) |version| { + if (version >= 197120 and version <= 327680) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.opera) |version| { + if (version >= 720896 and version <= 786688) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .o = true }); + } + } + if (browsers.safari) |version| { + if (version >= 196864 and version <= 327936) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + }, + .pseudo_element_selection => { + if (browsers.firefox) |version| { + if (version >= 131072 and version <= 3997696) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .moz = true }); + } + } + }, + .pseudo_element_placeholder => { + if (browsers.android) |version| { + if (version >= 131328 and version <= 263171) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.chrome) |version| { + if (version >= 262144 and version <= 3670016) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.edge) |version| { + if (version >= 786432 and version <= 1179648) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .ms = true }); + } + } + if (browsers.firefox) |version| { + if (version >= 1179648 and version <= 3276800) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .moz = true }); + } + } + if (browsers.ie) |version| { + if (version >= 655360) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .ms = true }); + } + } + if (browsers.ios_saf) |version| { + if (version >= 262656 and version <= 655360) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.opera) |version| { + if (version >= 983040 and version <= 2818048) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.safari) |version| { + if (version >= 327680 and version <= 655360) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.samsung) |version| { + if (version >= 262144 and version <= 393728) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + }, + .pseudo_class_placeholder_shown => { + if (browsers.firefox) |version| { + if (version >= 262144 and version <= 3276800) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .moz = true }); + } + } + if (browsers.ie) |version| { + if (version >= 655360) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .ms = true }); + } + } + }, + .hyphens => { + if (browsers.edge) |version| { + if (version >= 786432 and version <= 1179648) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .ms = true }); + } + } + if (browsers.firefox) |version| { + if (version >= 393216 and version <= 2752512) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .moz = true }); + } + } + if (browsers.ie) |version| { + if (version >= 655360) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .ms = true }); + } + } + if (browsers.ios_saf) |version| { + if (version >= 262656 and version <= 1050112) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.safari) |version| { + if (version >= 327936 and version <= 1050112) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + }, + .pseudo_class_fullscreen => { + if (browsers.chrome) |version| { + if (version >= 983040 and version <= 4587520) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.firefox) |version| { + if (version >= 655360 and version <= 4128768) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .moz = true }); + } + } + if (browsers.ie) |version| { + if (version >= 720896) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .ms = true }); + } + } + if (browsers.opera) |version| { + if (version >= 983040 and version <= 4128768) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.safari) |version| { + if (version >= 327936 and version <= 1049344) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.samsung) |version| { + if (version >= 262144 and version <= 590336) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + }, + .pseudo_element_backdrop => { + if (browsers.android) |version| { + if (version >= 263168 and version <= 263171) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.chrome) |version| { + if (version >= 2097152 and version <= 2359296) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.edge) |version| { + if (version >= 786432 and version <= 1179648) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .ms = true }); + } + } + if (browsers.ie != null) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .ms = true }); + } + if (browsers.opera) |version| { + if (version >= 1245184 and version <= 1507328) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + }, + .pseudo_element_file_selector_button => { + if (browsers.android) |version| { + if (version >= 263168 and version <= 263171) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.chrome) |version| { + if (version >= 262144 and version <= 5767168) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.edge) |version| { + if (version >= 786432 and version <= 1179648) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .ms = true }); + } + if (version >= 5177344 and version <= 5767168) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.ie) |version| { + if (version >= 655360) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .ms = true }); + } + } + if (browsers.ios_saf) |version| { + if (version >= 197120 and version <= 917504) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.opera) |version| { + if (version >= 983040 and version <= 4849664) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.safari) |version| { + if (version >= 196864 and version <= 917504) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.samsung) |version| { + if (version >= 262144 and version <= 917504) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + }, + .pseudo_class_autofill => { + if (browsers.android) |version| { + if (version >= 263168 and version <= 263171) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.chrome) |version| { + if (version >= 262144 and version <= 7143424) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.edge) |version| { + if (version >= 5177344 and version <= 7143424) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.ios_saf) |version| { + if (version >= 197120 and version <= 918784) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.opera) |version| { + if (version >= 983040 and version <= 6225920) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.safari) |version| { + if (version >= 196864 and version <= 917760) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.samsung) |version| { + if (version >= 262144 and version <= 1310720) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + }, + .tab_size => { + if (browsers.firefox) |version| { + if (version >= 262144 and version <= 5898240) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .moz = true }); + } + } + if (browsers.opera) |version| { + if (version >= 656896 and version <= 786688) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .o = true }); + } + } + }, + .max_content, .min_content => { + if (browsers.android) |version| { + if (version >= 263168 and version <= 263171) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.chrome) |version| { + if (version >= 1441792 and version <= 2949120) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.firefox) |version| { + if (version >= 196608 and version <= 4259840) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .moz = true }); + } + } + if (browsers.ios_saf) |version| { + if (version >= 458752 and version <= 852992) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.opera) |version| { + if (version >= 983040 and version <= 2097152) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.safari) |version| { + if (version >= 393472 and version <= 655616) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.samsung) |version| { + if (version <= 262144) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + }, + .fill, .fill_available => { + if (browsers.chrome) |version| { + if (version >= 1441792) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.android) |version| { + if (version >= 263168) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.edge) |version| { + if (version >= 5177344) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.firefox) |version| { + if (version >= 196608 and version <= 4259840) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .moz = true }); + } + } + if (browsers.ios_saf) |version| { + if (version >= 458752 and version <= 852992) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.opera) |version| { + if (version >= 983040) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.safari) |version| { + if (version >= 393472 and version <= 655616) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.samsung) |version| { + if (version >= 262144) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + }, + .fit_content => { + if (browsers.android) |version| { + if (version >= 263168 and version <= 263171) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.chrome) |version| { + if (version >= 1441792 and version <= 2949120) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.firefox) |version| { + if (version >= 196608 and version <= 6094848) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .moz = true }); + } + } + if (browsers.ios_saf) |version| { + if (version >= 458752 and version <= 852992) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.opera) |version| { + if (version >= 983040 and version <= 2097152) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.safari) |version| { + if (version >= 393472 and version <= 655616) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.samsung) |version| { + if (version <= 262144) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + }, + .stretch => { + if (browsers.chrome) |version| { + if (version >= 1441792) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.firefox) |version| { + if (version >= 196608) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .moz = true }); + } + } + if (browsers.android) |version| { + if (version >= 263168) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.edge) |version| { + if (version >= 5177344) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.ios_saf) |version| { + if (version >= 458752) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.opera) |version| { + if (version >= 983040) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.safari) |version| { + if (version >= 458752) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.samsung) |version| { + if (version >= 327680) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + }, + .zoom_in, .zoom_out => { + if (browsers.chrome) |version| { + if (version >= 262144 and version <= 2359296) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.firefox) |version| { + if (version >= 131072 and version <= 1507328) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .moz = true }); + } + } + if (browsers.opera) |version| { + if (version >= 983040 and version <= 1507328) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.safari) |version| { + if (version >= 196864 and version <= 524288) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + }, + .grab, .grabbing => { + if (browsers.chrome) |version| { + if (version >= 262144 and version <= 4390912) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.firefox) |version| { + if (version >= 131072 and version <= 1703936) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .moz = true }); + } + } + if (browsers.opera) |version| { + if (version >= 983040 and version <= 3538944) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.safari) |version| { + if (version >= 196864 and version <= 655616) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + }, + .sticky => { + if (browsers.ios_saf) |version| { + if (version >= 393216 and version <= 786944) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.safari) |version| { + if (version >= 393472 and version <= 786688) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + }, + .touch_action => { + if (browsers.ie) |version| { + if (version == 655360) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .ms = true }); + } + } + }, + .text_decoration_skip, .text_decoration_skip_ink => { + if (browsers.ios_saf) |version| { + if (version >= 524288) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.safari) |version| { + if (version >= 459008 and version <= 786432) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + }, + .text_decoration => { + if (browsers.ios_saf) |version| { + if (version >= 524288) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.safari) |version| { + if (version >= 524288) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + }, + .text_decoration_color, .text_decoration_line, .text_decoration_style => { + if (browsers.firefox) |version| { + if (version >= 393216 and version <= 2293760) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .moz = true }); + } + } + if (browsers.ios_saf) |version| { + if (version >= 524288 and version <= 786432) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.safari) |version| { + if (version >= 524288 and version <= 786432) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + }, + .text_size_adjust => { + if (browsers.firefox != null) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .moz = true }); + } + if (browsers.edge) |version| { + if (version >= 786432 and version <= 1179648) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .ms = true }); + } + } + if (browsers.ie) |version| { + if (version >= 655360) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .ms = true }); + } + } + if (browsers.ios_saf) |version| { + if (version >= 327680) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + }, + .mask_clip, .mask_composite, .mask_image, .mask_origin, .mask_repeat, .mask_border_repeat, .mask_border_source, .mask, .mask_position, .mask_size, .mask_border, .mask_border_outset, .mask_border_width, .mask_border_slice => { + if (browsers.android) |version| { + if (version >= 131328 and version <= 263171) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.chrome) |version| { + if (version >= 262144 and version <= 7798784) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.edge) |version| { + if (version >= 5177344 and version <= 7798784) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.ios_saf) |version| { + if (version >= 197120 and version <= 983552) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.opera) |version| { + if (version >= 983040 and version <= 6881280) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.safari) |version| { + if (version >= 262144 and version <= 983552) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.samsung) |version| { + if (version >= 262144 and version <= 1572864) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + }, + .clip_path => { + if (browsers.android) |version| { + if (version >= 263168 and version <= 263171) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.chrome) |version| { + if (version >= 1572864 and version <= 3538944) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.ios_saf) |version| { + if (version >= 458752 and version <= 589824) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.opera) |version| { + if (version >= 983040 and version <= 2686976) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.safari) |version| { + if (version >= 458752 and version <= 589824) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.samsung) |version| { + if (version >= 262144 and version <= 327680) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + }, + .box_decoration_break => { + if (browsers.chrome) |version| { + if (version >= 1441792) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.android) |version| { + if (version >= 263168) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.edge) |version| { + if (version >= 5177344) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.ios_saf) |version| { + if (version >= 458752) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.opera) |version| { + if (version >= 983040) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.safari) |version| { + if (version >= 393472) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.samsung) |version| { + if (version >= 262144) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + }, + .object_fit, .object_position => { + if (browsers.opera) |version| { + if (version >= 656896 and version <= 786688) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .o = true }); + } + } + }, + .shape_margin, .shape_outside, .shape_image_threshold => { + if (browsers.ios_saf) |version| { + if (version >= 524288 and version <= 655360) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.safari) |version| { + if (version >= 459008 and version <= 655360) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + }, + .text_overflow => { + if (browsers.opera) |version| { + if (version >= 589824 and version <= 786432) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .o = true }); + } + } + }, + .at_viewport => { + if (browsers.edge) |version| { + if (version >= 786432 and version <= 1179648) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .ms = true }); + } + } + if (browsers.ie) |version| { + if (version >= 655360) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .ms = true }); + } + } + if (browsers.opera) |version| { + if (version >= 720896 and version <= 786688) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .o = true }); + } + } + }, + .at_resolution => { + if (browsers.android) |version| { + if (version >= 131840 and version <= 262656) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.chrome) |version| { + if (version >= 262144 and version <= 1835008) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.firefox) |version| { + if (version >= 197888 and version <= 983040) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .moz = true }); + } + } + if (browsers.ios_saf) |version| { + if (version >= 262144 and version <= 984576) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.opera) |version| { + if (version >= 591104 and version <= 786432) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .o = true }); + } + } + if (browsers.safari) |version| { + if (version >= 262144 and version <= 984576) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + }, + .text_align_last => { + if (browsers.firefox) |version| { + if (version >= 786432 and version <= 3145728) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .moz = true }); + } + } + }, + .pixelated => { + if (browsers.firefox) |version| { + if (version >= 198144 and version <= 4194304) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .moz = true }); + } + } + if (browsers.ios_saf) |version| { + if (version >= 327680 and version <= 393216) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.opera) |version| { + if (version >= 722432 and version <= 786688) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .o = true }); + } + } + if (browsers.safari) |version| { + if (version <= 393216) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + }, + .image_rendering => { + if (browsers.ie) |version| { + if (version >= 458752) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .ms = true }); + } + } + }, + .border_inline_start, .border_inline_end, .margin_inline_start, .margin_inline_end, .padding_inline_start, .padding_inline_end => { + if (browsers.android) |version| { + if (version >= 131328 and version <= 263171) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.chrome) |version| { + if (version >= 262144 and version <= 4456448) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.firefox) |version| { + if (version >= 196608 and version <= 2621440) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .moz = true }); + } + } + if (browsers.ios_saf) |version| { + if (version >= 197120 and version <= 786432) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.opera) |version| { + if (version >= 983040 and version <= 3604480) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.safari) |version| { + if (version >= 196864 and version <= 786432) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.samsung) |version| { + if (version >= 262144 and version <= 590336) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + }, + .border_block_start, .border_block_end, .margin_block_start, .margin_block_end, .padding_block_start, .padding_block_end => { + if (browsers.android) |version| { + if (version >= 131328 and version <= 263171) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.chrome) |version| { + if (version >= 262144 and version <= 4456448) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.ios_saf) |version| { + if (version >= 197120 and version <= 786432) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.opera) |version| { + if (version >= 983040 and version <= 3604480) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.safari) |version| { + if (version >= 196864 and version <= 786432) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.samsung) |version| { + if (version >= 262144 and version <= 590336) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + }, + .appearance => { + if (browsers.android) |version| { + if (version >= 131328 and version <= 263171) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.chrome) |version| { + if (version >= 262144 and version <= 5439488) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.edge) |version| { + if (version >= 786432 and version <= 1179648) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .ms = true }); + } + if (version >= 5177344 and version <= 5439488) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.firefox) |version| { + if (version >= 131072 and version <= 5177344) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .moz = true }); + } + } + if (browsers.ie != null) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .ms = true }); + } + if (browsers.ios_saf) |version| { + if (version >= 197120 and version <= 983552) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.opera) |version| { + if (version >= 983040 and version <= 4718592) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.safari) |version| { + if (version >= 196864 and version <= 983552) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.samsung) |version| { + if (version >= 262144 and version <= 851968) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + }, + .scroll_snap_type, .scroll_snap_coordinate, .scroll_snap_destination, .scroll_snap_points_x, .scroll_snap_points_y => { + if (browsers.edge) |version| { + if (version >= 786432 and version <= 1179648) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .ms = true }); + } + } + if (browsers.ie) |version| { + if (version >= 655360) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .ms = true }); + } + } + if (browsers.ios_saf) |version| { + if (version >= 589824 and version <= 656128) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.safari) |version| { + if (version >= 589824 and version <= 655616) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + }, + .flow_into, .flow_from, .region_fragment => { + if (browsers.chrome) |version| { + if (version >= 983040 and version <= 1179648) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.edge) |version| { + if (version >= 786432 and version <= 1179648) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .ms = true }); + } + } + if (browsers.ie) |version| { + if (version >= 655360) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .ms = true }); + } + } + if (browsers.ios_saf) |version| { + if (version >= 458752 and version <= 720896) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.safari) |version| { + if (version >= 393472 and version <= 720896) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + }, + .image_set => { + if (browsers.android) |version| { + if (version >= 263168 and version <= 263171) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.chrome) |version| { + if (version >= 1376256 and version <= 7340032) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.edge) |version| { + if (version >= 5177344 and version <= 7340032) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.ios_saf) |version| { + if (version >= 393216 and version <= 590592) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.opera) |version| { + if (version >= 983040 and version <= 6422528) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.safari) |version| { + if (version >= 393216 and version <= 590080) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.samsung) |version| { + if (version >= 262144 and version <= 1441792) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + }, + .writing_mode => { + if (browsers.android) |version| { + if (version >= 196608 and version <= 263171) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.chrome) |version| { + if (version >= 524288 and version <= 3080192) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.ie) |version| { + if (version >= 328960) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .ms = true }); + } + } + if (browsers.ios_saf) |version| { + if (version >= 327680 and version <= 656128) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.opera) |version| { + if (version >= 983040 and version <= 2228224) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.safari) |version| { + if (version >= 327936 and version <= 655616) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.samsung) |version| { + if (version <= 262144) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + }, + .cross_fade => { + if (browsers.chrome) |version| { + if (version >= 1114112) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.android) |version| { + if (version >= 263168) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.edge) |version| { + if (version >= 5177344) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.ios_saf) |version| { + if (version >= 327680 and version <= 590592) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.opera) |version| { + if (version >= 983040) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.safari) |version| { + if (version >= 327936 and version <= 590080) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.samsung) |version| { + if (version >= 262144) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + }, + .pseudo_class_read_only, .pseudo_class_read_write => { + if (browsers.firefox) |version| { + if (version >= 196608 and version <= 5046272) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .moz = true }); + } + } + }, + .text_emphasis, .text_emphasis_position, .text_emphasis_style, .text_emphasis_color => { + if (browsers.android) |version| { + if (version >= 263168 and version <= 263171) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.chrome) |version| { + if (version >= 1638400 and version <= 6422528) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.edge) |version| { + if (version >= 5177344 and version <= 6422528) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.opera) |version| { + if (version >= 983040 and version <= 5570560) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.safari) |version| { + if (version >= 393472 and version <= 458752) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.samsung) |version| { + if (version >= 262144 and version <= 1114112) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + }, + .display_grid, .inline_grid, .grid_template_columns, .grid_template_rows, .grid_row_start, .grid_column_start, .grid_row_end, .grid_column_end, .grid_row, .grid_column, .grid_area, .grid_template, .grid_template_areas, .place_self, .grid_column_align, .grid_row_align => { + if (browsers.edge) |version| { + if (version >= 786432 and version <= 983040) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .ms = true }); + } + } + if (browsers.ie) |version| { + if (version >= 655360) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .ms = true }); + } + } + }, + .text_spacing => { + if (browsers.edge) |version| { + if (version >= 786432 and version <= 1179648) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .ms = true }); + } + } + if (browsers.ie) |version| { + if (version >= 524288) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .ms = true }); + } + } + }, + .pseudo_class_any_link => { + if (browsers.android) |version| { + if (version >= 263168 and version <= 263171) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.chrome) |version| { + if (version >= 983040 and version <= 4194304) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.firefox) |version| { + if (version >= 196608 and version <= 3211264) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .moz = true }); + } + } + if (browsers.ios_saf) |version| { + if (version >= 393216 and version <= 524544) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.opera) |version| { + if (version >= 983040 and version <= 3342336) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.safari) |version| { + if (version >= 393472 and version <= 524288) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.samsung) |version| { + if (version >= 327680 and version <= 524800) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + }, + .isolate => { + if (browsers.chrome) |version| { + if (version >= 1048576 and version <= 3080192) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.firefox) |version| { + if (version >= 655360 and version <= 3211264) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .moz = true }); + } + } + if (browsers.ios_saf) |version| { + if (version >= 393216 and version <= 656128) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.opera) |version| { + if (version >= 983040 and version <= 2228224) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.safari) |version| { + if (version >= 393216 and version <= 655616) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + }, + .plaintext => { + if (browsers.firefox) |version| { + if (version >= 655360 and version <= 3211264) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .moz = true }); + } + } + if (browsers.ios_saf) |version| { + if (version >= 393216 and version <= 656128) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.safari) |version| { + if (version >= 393216 and version <= 655616) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + }, + .isolate_override => { + if (browsers.firefox) |version| { + if (version >= 1114112 and version <= 3211264) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .moz = true }); + } + } + if (browsers.ios_saf) |version| { + if (version >= 458752 and version <= 656128) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.safari) |version| { + if (version >= 458752 and version <= 655616) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + }, + .overscroll_behavior => { + if (browsers.edge) |version| { + if (version >= 786432 and version <= 1114112) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .ms = true }); + } + } + if (browsers.ie) |version| { + if (version >= 655360) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .ms = true }); + } + } + }, + .text_orientation => { + if (browsers.safari) |version| { + if (version >= 655616 and version <= 852224) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + }, + .print_color_adjust, .color_adjust => { + if (browsers.chrome) |version| { + if (version >= 1114112) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.android) |version| { + if (version >= 263168) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.edge) |version| { + if (version >= 5177344) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.firefox) |version| { + if (version >= 3145728 and version <= 6291456) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .moz = true }); + } + } + if (browsers.ios_saf) |version| { + if (version >= 393216 and version <= 983552) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.opera) |version| { + if (version >= 983040) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.safari) |version| { + if (version >= 393216 and version <= 983552) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.samsung) |version| { + if (version >= 262144) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + }, + .any_pseudo => { + if (browsers.chrome) |version| { + if (version >= 786432 and version <= 5701632) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.edge) |version| { + if (version >= 5177344 and version <= 5701632) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.firefox) |version| { + if (version >= 262144 and version <= 5111808) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .moz = true }); + } + } + if (browsers.opera) |version| { + if (version >= 917504 and version <= 4784128) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.safari) |version| { + if (version >= 327680 and version <= 851968) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.ios_saf) |version| { + if (version >= 327680 and version <= 851968) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.samsung) |version| { + if (version >= 65536 and version <= 917504) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + if (browsers.android) |version| { + if (version >= 2424832 and version <= 5701632) { + prefixes = prefixes.bitwiseOr(VendorPrefix{ .webkit = true }); + } + } + }, + } + return prefixes; + } + + pub fn isFlex2009(browsers: Browsers) bool { + if (browsers.android) |version| { + if (version >= 131328 and version <= 262656) { + return true; + } + } + if (browsers.chrome) |version| { + if (version >= 262144 and version <= 1310720) { + return true; + } + } + if (browsers.ios_saf) |version| { + if (version >= 197120 and version <= 393216) { + return true; + } + } + if (browsers.safari) |version| { + if (version >= 196864 and version <= 393216) { + return true; + } + } + return false; + } + + pub fn isWebkitGradient(browsers: Browsers) bool { + if (browsers.android) |version| { + if (version >= 131328 and version <= 196608) { + return true; + } + } + if (browsers.chrome) |version| { + if (version >= 262144 and version <= 589824) { + return true; + } + } + if (browsers.ios_saf) |version| { + if (version >= 197120 and version <= 393216) { + return true; + } + } + if (browsers.safari) |version| { + if (version >= 262144 and version <= 393216) { + return true; + } + } + return false; + } +}; diff --git a/src/css/printer.zig b/src/css/printer.zig new file mode 100644 index 0000000000..9d7b029e2a --- /dev/null +++ b/src/css/printer.zig @@ -0,0 +1,480 @@ +const std = @import("std"); +const Allocator = std.mem.Allocator; +const bun = @import("root").bun; +const logger = bun.logger; +const Log = logger.Log; + +pub const css = @import("./css_parser.zig"); +pub const css_values = @import("./values/values.zig"); +const DashedIdent = css_values.ident.DashedIdent; +const Ident = css_values.ident.Ident; +pub const Error = css.Error; +const Location = css.Location; +const PrintErr = css.PrintErr; + +const ArrayList = std.ArrayListUnmanaged; + +const sourcemap = @import("./sourcemap.zig"); + +/// Options that control how CSS is serialized to a string. +pub const PrinterOptions = struct { + /// Whether to minify the CSS, i.e. remove white space. + minify: bool = false, + /// An optional reference to a source map to write mappings into. + /// (Available when the `sourcemap` feature is enabled.) + source_map: ?*sourcemap.SourceMap = null, + /// An optional project root path, used to generate relative paths for sources used in CSS module hashes. + project_root: ?[]const u8 = null, + /// Targets to output the CSS for. + targets: Targets = .{}, + /// Whether to analyze dependencies (i.e. `@import` and `url()`). + /// If true, the dependencies are returned as part of the + /// [ToCssResult](super::stylesheet::ToCssResult). + /// + /// When enabled, `@import` and `url()` dependencies + /// are replaced with hashed placeholders that can be replaced with the final + /// urls later (after bundling). + analyze_dependencies: ?css.dependencies.DependencyOptions = null, + /// A mapping of pseudo classes to replace with class names that can be applied + /// from JavaScript. Useful for polyfills, for example. + pseudo_classes: ?PseudoClasses = null, + public_path: []const u8 = "", +}; + +/// A mapping of user action pseudo classes to replace with class names. +/// +/// See [PrinterOptions](PrinterOptions). +const PseudoClasses = struct { + /// The class name to replace `:hover` with. + hover: ?[]const u8 = null, + /// The class name to replace `:active` with. + active: ?[]const u8 = null, + /// The class name to replace `:focus` with. + focus: ?[]const u8 = null, + /// The class name to replace `:focus-visible` with. + focus_visible: ?[]const u8 = null, + /// The class name to replace `:focus-within` with. + focus_within: ?[]const u8 = null, +}; + +pub const Targets = css.targets.Targets; + +pub const Features = css.targets.Features; + +const Browsers = css.targets.Browsers; + +/// A `Printer` represents a destination to output serialized CSS, as used in +/// the [ToCss](super::traits::ToCss) trait. It can wrap any destination that +/// implements [std::fmt::Write](std::fmt::Write), such as a [String](String). +/// +/// A `Printer` keeps track of the current line and column position, and uses +/// this to generate a source map if provided in the options. +/// +/// `Printer` also includes helper functions that assist with writing output +/// that respects options such as `minify`, and `css_modules`. +pub fn Printer(comptime Writer: type) type { + return struct { + // #[cfg(feature = "sourcemap")] + sources: ?*const ArrayList([]const u8), + dest: Writer, + loc: Location = Location{ + .source_index = 0, + .line = 0, + .column = 1, + }, + indent_amt: u8 = 0, + line: u32 = 0, + col: u32 = 0, + minify: bool, + targets: Targets, + vendor_prefix: css.VendorPrefix = css.VendorPrefix.empty(), + in_calc: bool = false, + css_module: ?css.CssModule = null, + dependencies: ?ArrayList(css.Dependency) = null, + remove_imports: bool, + pseudo_classes: ?PseudoClasses = null, + indentation_buf: std.ArrayList(u8), + ctx: ?*const css.StyleContext = null, + scratchbuf: std.ArrayList(u8), + error_kind: ?css.PrinterError = null, + import_records: ?*const bun.BabyList(bun.ImportRecord), + public_path: []const u8, + /// NOTE This should be the same mimalloc heap arena allocator + allocator: Allocator, + // TODO: finish the fields + + const This = @This(); + + inline fn getWrittenAmt(writer: Writer) usize { + return switch (Writer) { + ArrayList(u8).Writer => writer.context.self.items.len, + *bun.js_printer.BufferWriter => writer.written.len, + else => @compileError("Dunno what to do with this type yo: " ++ @typeName(Writer)), + }; + } + + /// Returns the current source filename that is being printed. + pub fn filename(this: *const This) []const u8 { + if (this.sources) |sources| { + if (this.loc.source_index < sources.items.len) return sources.items[this.loc.source_index]; + } + return "unknown.css"; + } + + /// Returns whether the indent level is greater than one. + pub fn isNested(this: *const This) bool { + return this.indent_amt > 2; + } + + /// Add an error related to std lib fmt errors + pub fn addFmtError(this: *This) PrintErr!void { + this.error_kind = css.PrinterError{ + .kind = .fmt_error, + .loc = null, + }; + return PrintErr.lol; + } + + pub fn addNoImportRecordError(this: *This) PrintErr!void { + this.error_kind = css.PrinterError{ + .kind = .no_import_records, + .loc = null, + }; + return PrintErr.lol; + } + + /// Returns an error of the given kind at the provided location in the current source file. + pub fn newError( + this: *This, + kind: css.PrinterErrorKind, + maybe_loc: ?css.dependencies.Location, + ) PrintErr!void { + bun.debugAssert(this.error_kind == null); + this.error_kind = css.PrinterError{ + .kind = kind, + .loc = if (maybe_loc) |loc| css.ErrorLocation{ + .filename = this.filename(), + .line = loc.line - 1, + .column = loc.column, + } else null, + }; + return PrintErr.lol; + } + + pub fn deinit(this: *This) void { + this.scratchbuf.deinit(); + this.indentation_buf.deinit(); + if (this.dependencies) |*dependencies| { + dependencies.deinit(this.allocator); + } + } + + /// If `import_records` is null, then the printer will error when it encounters code that relies on import records (urls()) + pub fn new( + allocator: Allocator, + scratchbuf: std.ArrayList(u8), + dest: Writer, + options: PrinterOptions, + import_records: ?*const bun.BabyList(bun.ImportRecord), + ) This { + return .{ + .sources = null, + .dest = dest, + .minify = options.minify, + .targets = options.targets, + .dependencies = if (options.analyze_dependencies != null) ArrayList(css.Dependency){} else null, + .remove_imports = options.analyze_dependencies != null and options.analyze_dependencies.?.remove_imports, + .pseudo_classes = options.pseudo_classes, + .indentation_buf = std.ArrayList(u8).init(allocator), + .import_records = import_records, + .scratchbuf = scratchbuf, + .allocator = allocator, + .public_path = options.public_path, + .loc = Location{ + .source_index = 0, + .line = 0, + .column = 1, + }, + }; + } + + pub inline fn getImportRecords(this: *This) PrintErr!*const bun.BabyList(bun.ImportRecord) { + if (this.import_records) |import_records| return import_records; + try this.addNoImportRecordError(); + unreachable; + } + + pub fn printImportRecord(this: *This, import_record_idx: u32) PrintErr!void { + if (this.import_records) |import_records| { + const import_record = import_records.at(import_record_idx); + const a, const b = bun.bundle_v2.cheapPrefixNormalizer(this.public_path, import_record.path.text); + try this.writeStr(a); + try this.writeStr(b); + return; + } + return this.addNoImportRecordError(); + } + + pub inline fn importRecord(this: *Printer(Writer), import_record_idx: u32) PrintErr!*const bun.ImportRecord { + if (this.import_records) |import_records| return import_records.at(import_record_idx); + try this.addNoImportRecordError(); + unreachable; + } + + pub inline fn getImportRecordUrl(this: *This, import_record_idx: u32) PrintErr![]const u8 { + return (try this.importRecord(import_record_idx)).path.text; + } + + pub fn context(this: *const Printer(Writer)) ?*const css.StyleContext { + return this.ctx; + } + + /// To satisfy io.Writer interface + /// + /// NOTE: Same constraints as `writeStr`, the `str` param is assumted to not + /// contain any newline characters + pub fn writeAll(this: *This, str: []const u8) !void { + return this.writeStr(str) catch std.mem.Allocator.Error.OutOfMemory; + } + + pub fn writeComment(this: *This, comment: []const u8) PrintErr!void { + _ = this.dest.writeAll(comment) catch { + return this.addFmtError(); + }; + const new_lines = std.mem.count(u8, comment, "\n"); + this.line += @intCast(new_lines); + this.col = 0; + const last_line_start = comment.len - (std.mem.lastIndexOfScalar(u8, comment, '\n') orelse comment.len); + this.col += @intCast(last_line_start); + return; + } + + /// Writes a raw string to the underlying destination. + /// + /// NOTE: Is is assumed that the string does not contain any newline characters. + /// If such a string is written, it will break source maps. + pub fn writeStr(this: *This, s: []const u8) PrintErr!void { + if (comptime bun.Environment.isDebug) { + bun.assert(std.mem.indexOfScalar(u8, s, '\n') == null); + } + this.col += @intCast(s.len); + _ = this.dest.writeAll(s) catch { + return this.addFmtError(); + }; + return; + } + + /// Writes a formatted string to the underlying destination. + /// + /// NOTE: Is is assumed that the formatted string does not contain any newline characters. + /// If such a string is written, it will break source maps. + pub fn writeFmt(this: *This, comptime fmt: []const u8, args: anytype) PrintErr!void { + // assuming the writer comes from an ArrayList + const start: usize = getWrittenAmt(this.dest); + this.dest.print(fmt, args) catch bun.outOfMemory(); + const written = getWrittenAmt(this.dest) - start; + this.col += @intCast(written); + } + + fn replaceDots(allocator: Allocator, s: []const u8) []const u8 { + var str = allocator.dupe(u8, s) catch bun.outOfMemory(); + std.mem.replaceScalar(u8, str[0..], '.', '-'); + return str; + } + + /// Writes a CSS identifier to the underlying destination, escaping it + /// as appropriate. If the `css_modules` option was enabled, then a hash + /// is added, and the mapping is added to the CSS module. + pub fn writeIdent(this: *This, ident: []const u8, handle_css_module: bool) PrintErr!void { + if (handle_css_module) { + if (this.css_module) |*css_module| { + const Closure = struct { first: bool, printer: *This }; + var closure = Closure{ .first = true, .printer = this }; + css_module.config.pattern.write( + css_module.hashes.items[this.loc.source_index], + css_module.sources.items[this.loc.source_index], + ident, + &closure, + struct { + pub fn writeFn(self: *Closure, s1: []const u8, replace_dots: bool) void { + // PERF: stack fallback? + const s = if (!replace_dots) s1 else replaceDots(self.printer.allocator, s1); + defer if (replace_dots) self.printer.allocator.free(s); + self.printer.col += @intCast(s.len); + if (self.first) { + self.first = false; + return css.serializer.serializeIdentifier(s, self.printer) catch |e| css.OOM(e); + } else { + return css.serializer.serializeName(s, self.printer) catch |e| css.OOM(e); + } + } + }.writeFn, + ); + + css_module.addLocal(this.allocator, ident, ident, this.loc.source_index); + return; + } + } + + return css.serializer.serializeIdentifier(ident, this) catch return this.addFmtError(); + } + + pub fn writeDashedIdent(this: *This, ident: *const DashedIdent, is_declaration: bool) !void { + try this.writeStr("--"); + + if (this.css_module) |*css_module| { + if (css_module.config.dashed_idents) { + const Fn = struct { + pub fn writeFn(self: *This, s1: []const u8, replace_dots: bool) void { + const s = if (!replace_dots) s1 else replaceDots(self.allocator, s1); + defer if (replace_dots) self.allocator.free(s); + self.col += @intCast(s.len); + return css.serializer.serializeName(s, self) catch |e| css.OOM(e); + } + }; + css_module.config.pattern.write( + css_module.hashes.items[this.loc.source_index], + css_module.sources.items[this.loc.source_index], + ident.v[2..], + this, + Fn.writeFn, + ); + + if (is_declaration) { + css_module.addDashed(this.allocator, ident.v, this.loc.source_index); + } + } + } + + return css.serializer.serializeName(ident.v[2..], this) catch return this.addFmtError(); + } + + pub fn writeByte(this: *This, char: u8) !void { + return this.writeChar(char) catch return Allocator.Error.OutOfMemory; + } + + /// Write a single character to the underlying destination. + pub fn writeChar(this: *This, char: u8) PrintErr!void { + if (char == '\n') { + this.line += 1; + this.col = 0; + } else { + this.col += 1; + } + _ = this.dest.writeByte(char) catch { + return this.addFmtError(); + }; + } + + /// Writes a newline character followed by indentation. + /// If the `minify` option is enabled, then nothing is printed. + pub fn newline(this: *This) PrintErr!void { + if (this.minify) { + return; + } + + try this.writeChar('\n'); + return this.writeIndent(); + } + + /// Writes a delimiter character, followed by whitespace (depending on the `minify` option). + /// If `ws_before` is true, then whitespace is also written before the delimiter. + pub fn delim(this: *This, delim_: u8, ws_before: bool) PrintErr!void { + if (ws_before) { + try this.whitespace(); + } + try this.writeChar(delim_); + return this.whitespace(); + } + + /// Writes a single whitespace character, unless the `minify` option is enabled. + /// + /// Use `write_char` instead if you wish to force a space character to be written, + /// regardless of the `minify` option. + pub fn whitespace(this: *This) PrintErr!void { + if (this.minify) return; + return this.writeChar(' '); + } + + pub fn withContext( + this: *This, + selectors: *const css.SelectorList, + closure: anytype, + comptime func: anytype, + ) PrintErr!void { + const parent = if (this.ctx) |ctx| parent: { + this.ctx = null; + break :parent ctx; + } else null; + + const ctx = css.StyleContext{ .selectors = selectors, .parent = parent }; + + this.ctx = &ctx; + const res = func(closure, Writer, this); + this.ctx = parent; + + return res; + } + + pub fn withClearedContext( + this: *This, + closure: anytype, + comptime func: anytype, + ) PrintErr!void { + const parent = if (this.ctx) |ctx| parent: { + this.ctx = null; + break :parent ctx; + } else null; + const res = func(closure, Writer, this); + this.ctx = parent; + return res; + } + + /// Increases the current indent level. + pub fn indent(this: *This) void { + this.indent_amt += 2; + } + + /// Decreases the current indent level. + pub fn dedent(this: *This) void { + this.indent_amt -= 2; + } + + const INDENTS: []const []const u8 = indents: { + const levels = 32; + var indents: [levels][]const u8 = undefined; + for (0..levels) |i| { + const n = i * 2; + var str: [n]u8 = undefined; + for (0..n) |j| { + str[j] = ' '; + } + indents[i] = str; + } + break :indents indents; + }; + + fn getIndent(this: *This, idnt: u8) []const u8 { + // divide by 2 to get index into table + const i = idnt >> 1; + // PERF: may be faster to just do `i < (IDENTS.len - 1) * 2` (e.g. 62 if IDENTS.len == 32) here + if (i < INDENTS.len) { + return INDENTS[i]; + } + if (this.indentation_buf.items.len < idnt) { + this.indentation_buf.appendNTimes(' ', this.indentation_buf.items.len - idnt) catch unreachable; + } else { + this.indentation_buf.items = this.indentation_buf.items[0..idnt]; + } + return this.indentation_buf.items; + } + + fn writeIndent(this: *This) PrintErr!void { + bun.debugAssert(!this.minify); + if (this.indent_amt > 0) { + // try this.writeStr(this.getIndent(this.ident)); + this.dest.writeByteNTimes(' ', this.indent_amt) catch return this.addFmtError(); + } + } + }; +} diff --git a/src/css/properties/align.zig b/src/css/properties/align.zig new file mode 100644 index 0000000000..d5f3f5b716 --- /dev/null +++ b/src/css/properties/align.zig @@ -0,0 +1,1085 @@ +const std = @import("std"); +const bun = @import("root").bun; +const Allocator = std.mem.Allocator; +const ArrayList = std.ArrayListUnmanaged; + +pub const css = @import("../css_parser.zig"); + +const Printer = css.Printer; +const PrintErr = css.PrintErr; + +const LengthPercentage = css.css_values.length.LengthPercentage; + +/// A value for the [align-content](https://www.w3.org/TR/css-align-3/#propdef-align-content) property. +pub const AlignContent = union(enum) { + /// Default alignment. + normal: void, + /// A baseline position. + baseline_position: BaselinePosition, + /// A content distribution keyword. + content_distribution: ContentDistribution, + /// A content position keyword. + content_position: struct { + /// An overflow alignment mode. + overflow: ?OverflowPosition, + /// A content position keyword. + value: ContentPosition, + + pub fn toInner(this: *const @This()) ContentPositionInner { + return .{ + .overflow = this.overflow, + .value = this.value, + }; + } + + pub fn __generateToCss() void {} + + pub fn parse(input: *css.Parser) css.Result(@This()) { + const overflow = OverflowPosition.parse(input).asValue(); + const value = switch (ContentPosition.parse(input)) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + return .{ .result = .{ .overflow = overflow, .value = value } }; + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + }, + + pub usingnamespace css.DeriveParse(@This()); + pub usingnamespace css.DeriveToCss(@This()); + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } +}; + +/// A [``](https://www.w3.org/TR/css-align-3/#typedef-baseline-position) value, +/// as used in the alignment properties. +pub const BaselinePosition = enum { + /// The first baseline. + first, + /// The last baseline. + last, + + pub fn parse(input: *css.Parser) css.Result(@This()) { + const location = input.currentSourceLocation(); + const ident = switch (input.expectIdent()) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + + const BaselinePositionIdent = enum { + baseline, + first, + last, + }; + + const BaselinePositionMap = bun.ComptimeEnumMap(BaselinePositionIdent); + if (BaselinePositionMap.getASCIIICaseInsensitive(ident)) |value| + switch (value) { + .baseline => return .{ .result = BaselinePosition.first }, + .first => { + if (input.expectIdentMatching("baseline").asErr()) |e| return .{ .err = e }; + return .{ .result = BaselinePosition.first }; + }, + .last => { + if (input.expectIdentMatching("baseline").asErr()) |e| return .{ .err = e }; + return .{ .result = BaselinePosition.last }; + }, + } + else + return .{ .err = location.newUnexpectedTokenError(.{ .ident = ident }) }; + } + + pub fn toCss(this: *const BaselinePosition, comptime W: type, dest: *css.Printer(W)) css.PrintErr!void { + return switch (this.*) { + .first => try dest.writeStr("baseline"), + .last => try dest.writeStr("last baseline"), + }; + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } +}; + +/// A value for the [justify-content](https://www.w3.org/TR/css-align-3/#propdef-justify-content) property. +pub const JustifyContent = union(enum) { + /// Default justification. + normal, + /// A content distribution keyword. + content_distribution: ContentDistribution, + /// A content position keyword. + content_position: struct { + /// A content position keyword. + value: ContentPosition, + /// An overflow alignment mode. + overflow: ?OverflowPosition, + + pub fn toInner(this: *const @This()) ContentPositionInner { + return .{ + .overflow = this.overflow, + .value = this.value, + }; + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + }, + /// Justify to the left. + left: struct { + /// An overflow alignment mode. + overflow: ?OverflowPosition, + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + }, + /// Justify to the right. + right: struct { + /// An overflow alignment mode. + overflow: ?OverflowPosition, + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + }, + + pub fn parse(input: *css.Parser) css.Result(@This()) { + if (input.expectIdentMatching("normal").isOk()) { + return .{ .result = .normal }; + } + + if (ContentDistribution.parse(input).asValue()) |val| { + return .{ .result = .{ .content_distribution = val } }; + } + + const overflow = OverflowPosition.parse(input).asValue(); + if (ContentPosition.parse(input).asValue()) |content_position| { + return .{ .result = .{ + .content_position = .{ + .overflow = overflow, + .value = content_position, + }, + } }; + } + + const location = input.currentSourceLocation(); + const ident = switch (input.expectIdent()) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + + const JustifyContentIdent = enum { + left, + right, + }; + + const JustifyContentIdentMap = bun.ComptimeEnumMap(JustifyContentIdent); + if (JustifyContentIdentMap.getASCIIICaseInsensitive(ident)) |value| + return switch (value) { + .left => .{ .result = .{ .left = .{ .overflow = overflow } } }, + .right => .{ .result = .{ .right = .{ .overflow = overflow } } }, + } + else + return .{ .err = location.newUnexpectedTokenError(.{ .ident = ident }) }; + } + + pub fn toCss(this: *const @This(), comptime W: type, dest: *css.Printer(W)) css.PrintErr!void { + return switch (this.*) { + .normal => dest.writeStr("normal"), + .content_distribution => |value| value.toCss(W, dest), + .content_position => |*cp| { + if (cp.overflow) |*overflow| { + try overflow.toCss(W, dest); + try dest.writeStr(" "); + } + return cp.value.toCss(W, dest); + }, + .left => |*l| { + if (l.overflow) |*overflow| { + try overflow.toCss(W, dest); + try dest.writeStr(" "); + } + return dest.writeStr("left"); + }, + .right => |*r| { + if (r.overflow) |*overflow| { + try overflow.toCss(W, dest); + try dest.writeStr(" "); + } + return dest.writeStr("right"); + }, + }; + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } +}; + +/// A value for the [align-self](https://www.w3.org/TR/css-align-3/#align-self-property) property. +pub const AlignSelf = union(enum) { + /// Automatic alignment. + auto, + /// Default alignment. + normal, + /// Item is stretched. + stretch, + /// A baseline position keyword. + baseline_position: BaselinePosition, + /// A self position keyword. + self_position: struct { + /// An overflow alignment mode. + overflow: ?OverflowPosition, + /// A self position keyword. + value: SelfPosition, + + pub fn toInner(this: *const @This()) SelfPositionInner { + return .{ + .overflow = this.overflow, + .value = this.value, + }; + } + + pub fn __generateToCss() void {} + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub fn parse(input: *css.Parser) css.Result(@This()) { + const overflow = OverflowPosition.parse(input).asValue(); + const self_position = switch (SelfPosition.parse(input)) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + return .{ + .result = .{ + .overflow = overflow, + .value = self_position, + }, + }; + } + }, + + pub usingnamespace css.DeriveParse(@This()); + pub usingnamespace css.DeriveToCss(@This()); + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } +}; + +/// A value for the [justify-self](https://www.w3.org/TR/css-align-3/#justify-self-property) property. +pub const JustifySelf = union(enum) { + /// Automatic justification. + auto, + /// Default justification. + normal, + /// Item is stretched. + stretch, + /// A baseline position keyword. + baseline_position: BaselinePosition, + /// A self position keyword. + self_position: struct { + /// A self position keyword. + value: SelfPosition, + /// An overflow alignment mode. + overflow: ?OverflowPosition, + + pub fn toInner(this: *const @This()) SelfPositionInner { + return .{ + .overflow = this.overflow, + .value = this.value, + }; + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + }, + /// Item is justified to the left. + left: struct { + /// An overflow alignment mode. + overflow: ?OverflowPosition, + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + }, + /// Item is justified to the right. + right: struct { + /// An overflow alignment mode. + overflow: ?OverflowPosition, + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + }, + + pub fn parse(input: *css.Parser) css.Result(@This()) { + if (input.tryParse(css.Parser.expectIdentMatching, .{"auto"}).isOk()) { + return .{ .result = .auto }; + } + + if (input.tryParse(css.Parser.expectIdentMatching, .{"normal"}).isOk()) { + return .{ .result = .normal }; + } + + if (input.tryParse(css.Parser.expectIdentMatching, .{"stretch"}).isOk()) { + return .{ .result = .stretch }; + } + + if (input.tryParse(BaselinePosition.parse, .{}).asValue()) |val| { + return .{ .result = .{ .baseline_position = val } }; + } + + const overflow = input.tryParse(OverflowPosition.parse, .{}).asValue(); + if (input.tryParse(SelfPosition.parse, .{}).asValue()) |self_position| { + return .{ .result = .{ .self_position = .{ .overflow = overflow, .value = self_position } } }; + } + + const location = input.currentSourceLocation(); + const ident = switch (input.expectIdent()) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + const Enum = enum { left, right }; + const Map = bun.ComptimeEnumMap(Enum); + if (Map.getASCIIICaseInsensitive(ident)) |val| return .{ .result = switch (val) { + .left => .{ .left = .{ .overflow = overflow } }, + .right => .{ .right = .{ .overflow = overflow } }, + } }; + return .{ .err = location.newUnexpectedTokenError(.{ .ident = ident }) }; + } + + pub fn toCss(this: *const JustifySelf, comptime W: type, dest: *css.Printer(W)) css.PrintErr!void { + return switch (this.*) { + .auto => try dest.writeStr("auto"), + .normal => try dest.writeStr("normal"), + .stretch => try dest.writeStr("stretch"), + .baseline_position => |*baseline_position| baseline_position.toCss(W, dest), + .self_position => |*self_position| { + if (self_position.overflow) |*overflow| { + try overflow.toCss(W, dest); + try dest.writeStr(" "); + } + + try self_position.value.toCss(W, dest); + }, + .left => |*left| { + if (left.overflow) |*overflow| { + try overflow.toCss(W, dest); + try dest.writeStr(" "); + } + try dest.writeStr("left"); + }, + .right => |*right| { + if (right.overflow) |*overflow| { + try overflow.toCss(W, dest); + try dest.writeStr(" "); + } + try dest.writeStr("right"); + }, + }; + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } +}; + +/// A value for the [align-items](https://www.w3.org/TR/css-align-3/#align-items-property) property. +pub const AlignItems = union(enum) { + /// Default alignment. + normal, + /// Items are stretched. + stretch, + /// A baseline position keyword. + baseline_position: BaselinePosition, + /// A self position keyword. + self_position: struct { + /// An overflow alignment mode. + overflow: ?OverflowPosition, + /// A self position keyword. + value: SelfPosition, + + pub fn toInner(this: *const @This()) SelfPositionInner { + return .{ + .overflow = this.overflow, + .value = this.value, + }; + } + + pub fn parse(input: *css.Parser) css.Result(@This()) { + const overflow = OverflowPosition.parse(input).asValue(); + const self_position = switch (SelfPosition.parse(input)) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + return .{ + .result = .{ + .overflow = overflow, + .value = self_position, + }, + }; + } + + pub fn __generateToCss() void {} + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + }, + + pub usingnamespace css.DeriveParse(@This()); + pub usingnamespace css.DeriveToCss(@This()); + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } +}; + +/// A value for the [justify-items](https://www.w3.org/TR/css-align-3/#justify-items-property) property. +pub const JustifyItems = union(enum) { + /// Default justification. + normal, + /// Items are stretched. + stretch, + /// A baseline position keyword. + baseline_position: BaselinePosition, + /// A self position keyword, with optional overflow position. + self_position: struct { + /// A self position keyword. + value: SelfPosition, + /// An overflow alignment mode. + overflow: ?OverflowPosition, + + pub fn toInner(this: *const @This()) SelfPositionInner { + return .{ + .overflow = this.overflow, + .value = this.value, + }; + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + }, + /// Items are justified to the left, with an optional overflow position. + left: struct { + /// An overflow alignment mode. + overflow: ?OverflowPosition, + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + }, + /// Items are justified to the right, with an optional overflow position. + right: struct { + /// An overflow alignment mode. + overflow: ?OverflowPosition, + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + }, + /// A legacy justification keyword. + legacy: LegacyJustify, + + pub fn parse(input: *css.Parser) css.Result(@This()) { + if (input.tryParse(css.Parser.expectIdentMatching, .{"normal"}).isOk()) { + return .{ .result = .normal }; + } + + if (input.tryParse(css.Parser.expectIdentMatching, .{"stretch"}).isOk()) { + return .{ .result = .stretch }; + } + + if (input.tryParse(BaselinePosition.parse, .{}).asValue()) |val| { + return .{ .result = .{ .baseline_position = val } }; + } + + if (input.tryParse(LegacyJustify.parse, .{}).asValue()) |val| { + return .{ .result = .{ .legacy = val } }; + } + + const overflow = input.tryParse(OverflowPosition.parse, .{}).asValue(); + if (input.tryParse(SelfPosition.parse, .{}).asValue()) |self_position| { + return .{ .result = .{ .self_position = .{ .overflow = overflow, .value = self_position } } }; + } + + const location = input.currentSourceLocation(); + const ident = switch (input.expectIdent()) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + + const Enum = enum { left, right }; + const Map = bun.ComptimeEnumMap(Enum); + if (Map.getASCIIICaseInsensitive(ident)) |val| return .{ .result = switch (val) { + .left => .{ .left = .{ .overflow = overflow } }, + .right => .{ .right = .{ .overflow = overflow } }, + } }; + return .{ .err = location.newUnexpectedTokenError(.{ .ident = ident }) }; + } + + pub fn toCss(this: *const JustifyItems, comptime W: type, dest: *css.Printer(W)) css.PrintErr!void { + switch (this.*) { + .normal => try dest.writeStr("normal"), + .stretch => try dest.writeStr("stretch"), + .baseline_position => |*val| try val.toCss(W, dest), + .self_position => |*sp| { + if (sp.overflow) |*overflow| { + try overflow.toCss(W, dest); + try dest.writeStr(" "); + } + try sp.value.toCss(W, dest); + }, + .left => |*l| { + if (l.overflow) |*overflow| { + try overflow.toCss(W, dest); + try dest.writeStr(" "); + } + try dest.writeStr("left"); + }, + .right => |*r| { + if (r.overflow) |*overflow| { + try overflow.toCss(W, dest); + try dest.writeStr(" "); + } + try dest.writeStr("right"); + }, + .legacy => |l| try l.toCss(W, dest), + } + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } +}; + +/// A legacy justification keyword, as used in the `justify-items` property. +pub const LegacyJustify = enum { + /// Left justify. + left, + /// Right justify. + right, + /// Centered. + center, + + pub fn parse(input: *css.Parser) css.Result(@This()) { + const location = input.currentSourceLocation(); + const ident = switch (input.expectIdent()) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + + const LegacyJustifyIdent = enum { + legacy, + left, + right, + center, + }; + + const LegacyJustifyMap = bun.ComptimeEnumMap(LegacyJustifyIdent); + if (LegacyJustifyMap.getASCIIICaseInsensitive(ident)) |value| { + switch (value) { + .legacy => { + const inner_location = input.currentSourceLocation(); + const inner_ident = switch (input.expectIdent()) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + const InnerEnum = enum { left, right, center }; + const InnerLegacyJustifyMap = bun.ComptimeEnumMap(InnerEnum); + if (InnerLegacyJustifyMap.getASCIIICaseInsensitive(inner_ident)) |inner_value| { + return switch (inner_value) { + .left => .{ .result = .left }, + .right => .{ .result = .right }, + .center => .{ .result = .center }, + }; + } else { + return .{ .err = inner_location.newUnexpectedTokenError(.{ .ident = inner_ident }) }; + } + }, + .left => { + if (input.expectIdentMatching("legacy").asErr()) |e| return .{ .err = e }; + return .{ .result = .left }; + }, + .right => { + if (input.expectIdentMatching("legacy").asErr()) |e| return .{ .err = e }; + return .{ .result = .right }; + }, + .center => { + if (input.expectIdentMatching("legacy").asErr()) |e| return .{ .err = e }; + return .{ .result = .center }; + }, + } + } + return .{ .err = location.newUnexpectedTokenError(.{ .ident = ident }) }; + } + + pub fn toCss(this: *const @This(), comptime W: type, dest: *css.Printer(W)) css.PrintErr!void { + try dest.writeStr("legacy "); + switch (this.*) { + .left => try dest.writeStr("left"), + .right => try dest.writeStr("right"), + .center => try dest.writeStr("center"), + } + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } +}; + +/// A [gap](https://www.w3.org/TR/css-align-3/#column-row-gap) value, as used in the +/// `column-gap` and `row-gap` properties. +pub const GapValue = union(enum) { + /// Equal to `1em` for multi-column containers, and zero otherwise. + normal, + /// An explicit length. + length_percentage: LengthPercentage, + + pub usingnamespace css.DeriveParse(@This()); + pub usingnamespace css.DeriveToCss(@This()); + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } +}; + +/// A value for the [gap](https://www.w3.org/TR/css-align-3/#gap-shorthand) shorthand property. +pub const Gap = struct { + /// The row gap. + row: GapValue, + /// The column gap. + column: GapValue, + + pub usingnamespace css.DefineShorthand(@This(), css.PropertyIdTag.gap); + + pub const PropertyFieldMap = .{ + .row = "row-gap", + .column = "column-gap", + }; + + pub fn parse(input: *css.Parser) css.Result(@This()) { + const row = switch (@call(.auto, @field(GapValue, "parse"), .{input})) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + const column = switch (input.tryParse(@field(GapValue, "parse"), .{})) { + .result => |v| v, + .err => row, + }; + return .{ .result = .{ .row = row, .column = column } }; + } + + pub fn toCss(this: *const Gap, comptime W: type, dest: *css.Printer(W)) css.PrintErr!void { + try this.row.toCss(W, dest); + if (!this.column.eql(&this.row)) { + try dest.writeStr(" "); + try this.column.toCss(W, dest); + } + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } +}; + +/// A value for the [place-items](https://www.w3.org/TR/css-align-3/#place-items-property) shorthand property. +pub const PlaceItems = struct { + /// The item alignment. + @"align": AlignItems, + /// The item justification. + justify: JustifyItems, + + pub usingnamespace css.DefineShorthand(@This(), css.PropertyIdTag.@"place-items"); + + pub const PropertyFieldMap = .{ + .@"align" = "align-items", + .justify = "justify-items", + }; + + pub const VendorPrefixMap = .{ + .@"align" = true, + }; + + pub fn parse(input: *css.Parser) css.Result(@This()) { + const @"align" = switch (@call(.auto, @field(AlignItems, "parse"), .{input})) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + const justify = switch (input.tryParse(@field(JustifyItems, "parse"), .{})) { + .result => |v| v, + .err => switch (@"align") { + .normal => JustifyItems.normal, + .stretch => JustifyItems.stretch, + .baseline_position => |p| JustifyItems{ .baseline_position = p }, + .self_position => |sp| JustifyItems{ + .self_position = .{ + .overflow = if (sp.overflow) |o| o else null, + .value = sp.value, + }, + }, + }, + }; + + return .{ .result = .{ .@"align" = @"align", .justify = justify } }; + } + + pub fn toCss(this: *const PlaceItems, comptime W: type, dest: *css.Printer(W)) css.PrintErr!void { + try this.@"align".toCss(W, dest); + const is_equal = switch (this.justify) { + .normal => this.@"align".eql(&AlignItems{ .normal = {} }), + .stretch => this.@"align".eql(&AlignItems{ .stretch = {} }), + .baseline_position => |*p| brk: { + if (this.@"align" == .baseline_position) break :brk p.eql(&this.@"align".baseline_position); + break :brk false; + }, + .self_position => |*p| brk: { + if (this.@"align" == .self_position) break :brk p.toInner().eql(&this.@"align".self_position.toInner()); + break :brk false; + }, + else => false, + }; + + if (!is_equal) { + try dest.writeStr(" "); + try this.justify.toCss(W, dest); + } + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } +}; + +/// A value for the [place-self](https://www.w3.org/TR/css-align-3/#place-self-property) shorthand property. +pub const PlaceSelf = struct { + /// The item alignment. + @"align": AlignSelf, + /// The item justification. + justify: JustifySelf, + + pub usingnamespace css.DefineShorthand(@This(), css.PropertyIdTag.@"place-self"); + + pub const PropertyFieldMap = .{ + .@"align" = "align-self", + .justify = "justify-self", + }; + + pub const VendorPrefixMap = .{ + .@"align" = true, + }; + + pub fn parse(input: *css.Parser) css.Result(@This()) { + const @"align" = switch (@call(.auto, @field(AlignSelf, "parse"), .{input})) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + const justify = switch (input.tryParse(@field(JustifySelf, "parse"), .{})) { + .result => |v| v, + .err => switch (@"align") { + .auto => JustifySelf.auto, + .normal => JustifySelf.normal, + .stretch => JustifySelf.stretch, + .baseline_position => |p| JustifySelf{ .baseline_position = p }, + .self_position => |sp| JustifySelf{ + .self_position = .{ + .overflow = if (sp.overflow) |o| o else null, + .value = sp.value, + }, + }, + }, + }; + + return .{ .result = .{ .@"align" = @"align", .justify = justify } }; + } + + pub fn toCss(this: *const PlaceSelf, comptime W: type, dest: *css.Printer(W)) css.PrintErr!void { + try this.@"align".toCss(W, dest); + const is_equal = switch (this.justify) { + .auto => true, + .normal => this.@"align" == .normal, + .stretch => this.@"align" == .stretch, + .baseline_position => |p| switch (this.@"align") { + .baseline_position => |p2| p.eql(&p2), + else => false, + }, + .self_position => |sp| brk: { + if (this.@"align" == .self_position) break :brk sp.toInner().eql(&this.@"align".self_position.toInner()); + break :brk false; + }, + else => false, + }; + + if (!is_equal) { + try dest.writeStr(" "); + try this.justify.toCss(W, dest); + } + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } +}; + +/// A [``](https://www.w3.org/TR/css-align-3/#typedef-self-position) value. +pub const SelfPosition = enum { + /// Item is centered within the container. + center, + /// Item is aligned to the start of the container. + start, + /// Item is aligned to the end of the container. + end, + /// Item is aligned to the edge of the container corresponding to the start side of the item. + @"self-start", + /// Item is aligned to the edge of the container corresponding to the end side of the item. + @"self-end", + /// Item is aligned to the start of the container, within flexbox layouts. + @"flex-start", + /// Item is aligned to the end of the container, within flexbox layouts. + @"flex-end", + + pub usingnamespace css.DefineEnumProperty(@This()); +}; + +/// A value for the [place-content](https://www.w3.org/TR/css-align-3/#place-content) shorthand property. +pub const PlaceContent = struct { + /// The content alignment. + @"align": AlignContent, + /// The content justification. + justify: JustifyContent, + + pub usingnamespace css.DefineShorthand(@This(), css.PropertyIdTag.@"place-content"); + + pub const PropertyFieldMap = .{ + .@"align" = css.PropertyIdTag.@"align-content", + .justify = css.PropertyIdTag.@"justify-content", + }; + + pub const VendorPrefixMap = .{ + .@"align" = true, + .justify = true, + }; + + pub fn parse(input: *css.Parser) css.Result(@This()) { + const @"align" = switch (@call(.auto, @field(AlignContent, "parse"), .{input})) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + const justify = switch (@call(.auto, @field(JustifyContent, "parse"), .{input})) { + .result => |v| v, + .err => |_| switch (@"align") { + .baseline_position => JustifyContent{ .content_position = .{ + .overflow = null, + .value = .start, + } }, + .normal => JustifyContent.normal, + .content_distribution => |value| JustifyContent{ .content_distribution = value }, + .content_position => |pos| JustifyContent{ .content_position = .{ + .overflow = if (pos.overflow) |*overflow| overflow.deepClone(input.allocator()) else null, + .value = pos.value.deepClone(input.allocator()), + } }, + }, + }; + + return .{ .result = .{ .@"align" = @"align", .justify = justify } }; + } + + pub fn toCss(this: *const PlaceContent, comptime W: type, dest: *css.Printer(W)) css.PrintErr!void { + try this.@"align".toCss(W, dest); + const is_equal = switch (this.justify) { + .normal => brk: { + if (this.@"align" == .normal) break :brk true; + break :brk false; + }, + .content_distribution => |*d| brk: { + if (this.@"align" == .content_distribution) break :brk d.eql(&this.@"align".content_distribution); + break :brk false; + }, + .content_position => |*p| brk: { + if (this.@"align" == .content_position) break :brk p.toInner().eql(&this.@"align".content_position.toInner()); + break :brk false; + }, + else => false, + }; + + if (!is_equal) { + try dest.writeStr(" "); + try this.justify.toCss(W, dest); + } + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } +}; + +/// A [``](https://www.w3.org/TR/css-align-3/#typedef-content-distribution) value. +pub const ContentDistribution = enum { + /// Items are spaced evenly, with the first and last items against the edge of the container. + @"space-between", + /// Items are spaced evenly, with half-size spaces at the start and end. + @"space-around", + /// Items are spaced evenly, with full-size spaces at the start and end. + @"space-evenly", + /// Items are stretched evenly to fill free space. + stretch, + + pub usingnamespace css.DefineEnumProperty(@This()); +}; + +/// An [``](https://www.w3.org/TR/css-align-3/#typedef-overflow-position) value. +pub const OverflowPosition = enum { + /// If the size of the alignment subject overflows the alignment container, + /// the alignment subject is instead aligned as if the alignment mode were start. + safe, + /// Regardless of the relative sizes of the alignment subject and alignment + /// container, the given alignment value is honored. + unsafe, + + pub usingnamespace css.DefineEnumProperty(@This()); +}; + +/// A [``](https://www.w3.org/TR/css-align-3/#typedef-content-position) value. +pub const ContentPosition = enum { + /// Content is centered within the container. + center, + /// Content is aligned to the start of the container. + start, + /// Content is aligned to the end of the container. + end, + /// Same as `start` when within a flexbox container. + @"flex-start", + /// Same as `end` when within a flexbox container. + @"flex-end", + + pub usingnamespace css.DefineEnumProperty(@This()); +}; + +pub const SelfPositionInner = struct { + /// An overflow alignment mode. + overflow: ?OverflowPosition, + /// A self position keyword. + value: SelfPosition, + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } +}; + +pub const ContentPositionInner = struct { + /// An overflow alignment mode. + overflow: ?OverflowPosition, + /// A content position keyword. + value: ContentPosition, + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } +}; diff --git a/src/css/properties/animation.zig b/src/css/properties/animation.zig new file mode 100644 index 0000000000..b6136db261 --- /dev/null +++ b/src/css/properties/animation.zig @@ -0,0 +1,161 @@ +const std = @import("std"); +const bun = @import("root").bun; +const Allocator = std.mem.Allocator; +const ArrayList = std.ArrayListUnmanaged; + +pub const css = @import("../css_parser.zig"); + +const SmallList = css.SmallList; +const Printer = css.Printer; +const PrintErr = css.PrintErr; + +const LengthPercentage = css.css_values.length.LengthPercentage; +const CustomIdent = css.css_values.ident.CustomIdent; +const CSSString = css.css_values.string.CSSString; +const CSSNumber = css.css_values.number.CSSNumber; +const LengthPercentageOrAuto = css.css_values.length.LengthPercentageOrAuto; +const Size2D = css.css_values.size.Size2D; +const DashedIdent = css.css_values.ident.DashedIdent; + +/// A list of animations. +pub const AnimationList = SmallList(Animation, 1); + +/// A list of animation names. +pub const AnimationNameList = SmallList(AnimationName, 1); + +/// A value for the [animation](https://drafts.csswg.org/css-animations/#animation) shorthand property. +pub const Animation = @compileError(css.todo_stuff.depth); + +/// A value for the [animation-name](https://drafts.csswg.org/css-animations/#animation-name) property. +pub const AnimationName = union(enum) { + /// The `none` keyword. + none, + /// An identifier of a `@keyframes` rule. + ident: CustomIdent, + /// A `` name of a `@keyframes` rule. + string: CSSString, + + // ~toCssImpl + const This = @This(); + + pub fn hash(this: *const @This(), hasher: *std.hash.Wyhash) void { + return css.implementHash(@This(), this, hasher); + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub fn toCss(this: *const This, comptime W: type, dest: *Printer(W)) PrintErr!void { + _ = this; // autofix + _ = dest; // autofix + @panic(css.todo_stuff.depth); + } +}; + +/// A value for the [animation-iteration-count](https://drafts.csswg.org/css-animations/#animation-iteration-count) property. +pub const AnimationIterationCount = union(enum) { + /// The animation will repeat the specified number of times. + number: CSSNumber, + /// The animation will repeat forever. + infinite, +}; + +/// A value for the [animation-direction](https://drafts.csswg.org/css-animations/#animation-direction) property. +pub const AnimationDirection = css.DefineEnumProperty(@compileError(css.todo_stuff.depth)); + +/// A value for the [animation-play-state](https://drafts.csswg.org/css-animations/#animation-play-state) property. +pub const AnimationPlayState = css.DefineEnumProperty(@compileError(css.todo_stuff.depth)); + +/// A value for the [animation-fill-mode](https://drafts.csswg.org/css-animations/#animation-fill-mode) property. +pub const AnimationFillMode = css.DefineEnumProperty(@compileError(css.todo_stuff.depth)); + +/// A value for the [animation-composition](https://drafts.csswg.org/css-animations-2/#animation-composition) property. +pub const AnimationComposition = css.DefineEnumProperty(@compileError(css.todo_stuff.depth)); + +/// A value for the [animation-timeline](https://drafts.csswg.org/css-animations-2/#animation-timeline) property. +pub const AnimationTimeline = union(enum) { + /// The animation's timeline is a DocumentTimeline, more specifically the default document timeline. + auto, + /// The animation is not associated with a timeline. + none, + /// A timeline referenced by name. + dashed_ident: DashedIdent, + /// The scroll() function. + scroll: ScrollTimeline, + /// The view() function. + view: ViewTimeline, +}; + +/// The [scroll()](https://drafts.csswg.org/scroll-animations-1/#scroll-notation) function. +pub const ScrollTimeline = struct { + /// Specifies which element to use as the scroll container. + scroller: Scroller, + /// Specifies which axis of the scroll container to use as the progress for the timeline. + axis: ScrollAxis, +}; + +/// The [view()](https://drafts.csswg.org/scroll-animations-1/#view-notation) function. +pub const ViewTimeline = struct { + /// Specifies which axis of the scroll container to use as the progress for the timeline. + axis: ScrollAxis, + /// Provides an adjustment of the view progress visibility range. + inset: Size2D(LengthPercentageOrAuto), +}; + +/// A scroller, used in the `scroll()` function. +pub const Scroller = @compileError(css.todo_stuff.depth); + +/// A scroll axis, used in the `scroll()` function. +pub const ScrollAxis = @compileError(css.todo_stuff.depth); + +/// A value for the animation-range shorthand property. +pub const AnimationRange = struct { + /// The start of the animation's attachment range. + start: AnimationRangeStart, + /// The end of the animation's attachment range. + end: AnimationRangeEnd, +}; + +/// A value for the [animation-range-start](https://drafts.csswg.org/scroll-animations/#animation-range-start) property. +pub const AnimationRangeStart = struct { + v: AnimationAttachmentRange, +}; + +/// A value for the [animation-range-end](https://drafts.csswg.org/scroll-animations/#animation-range-start) property. +pub const AnimationRangeEnd = struct { + v: AnimationAttachmentRange, +}; + +/// A value for the [animation-range-start](https://drafts.csswg.org/scroll-animations/#animation-range-start) +/// or [animation-range-end](https://drafts.csswg.org/scroll-animations/#animation-range-end) property. +pub const AnimationAttachmentRange = union(enum) { + /// The start of the animation's attachment range is the start of its associated timeline. + normal, + /// The animation attachment range starts at the specified point on the timeline measuring from the start of the timeline. + length_percentage: LengthPercentage, + /// The animation attachment range starts at the specified point on the timeline measuring from the start of the specified named timeline range. + timeline_range: struct { + /// The name of the timeline range. + name: TimelineRangeName, + /// The offset from the start of the named timeline range. + offset: LengthPercentage, + }, +}; + +/// A [view progress timeline range](https://drafts.csswg.org/scroll-animations/#view-timelines-ranges) +pub const TimelineRangeName = enum { + /// Represents the full range of the view progress timeline. + cover, + /// Represents the range during which the principal box is either fully contained by, + /// or fully covers, its view progress visibility range within the scrollport. + contain, + /// Represents the range during which the principal box is entering the view progress visibility range. + entry, + /// Represents the range during which the principal box is exiting the view progress visibility range. + exit, + /// Represents the range during which the principal box crosses the end border edge. + entry_crossing, + /// Represents the range during which the principal box crosses the start border edge. + exit_crossing, +}; diff --git a/src/css/properties/background.zig b/src/css/properties/background.zig new file mode 100644 index 0000000000..438b43ade6 --- /dev/null +++ b/src/css/properties/background.zig @@ -0,0 +1,1054 @@ +const std = @import("std"); +const bun = @import("root").bun; +const Allocator = std.mem.Allocator; +const ArrayList = std.ArrayListUnmanaged; + +pub const css = @import("../css_parser.zig"); + +const Property = css.Property; +const VendorPrefix = css.VendorPrefix; +const SmallList = css.SmallList; +const Printer = css.Printer; +const PrintErr = css.PrintErr; + +const LengthPercentage = css.css_values.length.LengthPercentage; +const CustomIdent = css.css_values.ident.CustomIdent; +const CSSString = css.css_values.string.CSSString; +const CSSNumber = css.css_values.number.CSSNumber; +const LengthPercentageOrAuto = css.css_values.length.LengthPercentageOrAuto; +const Size2D = css.css_values.size.Size2D; +const DashedIdent = css.css_values.ident.DashedIdent; +const Image = css.css_values.image.Image; +const CssColor = css.css_values.color.CssColor; +const Ratio = css.css_values.ratio.Ratio; +const HorizontalPosition = css.css_values.position.HorizontalPosition; +const VerticalPosition = css.css_values.position.VerticalPosition; + +const Position = css.css_values.position.Position; + +/// A value for the [background](https://www.w3.org/TR/css-backgrounds-3/#background) shorthand property. +pub const Background = struct { + /// The background image. + image: Image, + /// The background color. + color: CssColor, + /// The background position. + position: BackgroundPosition, + /// How the background image should repeat. + repeat: BackgroundRepeat, + /// The size of the background image. + size: BackgroundSize, + /// The background attachment. + attachment: BackgroundAttachment, + /// The background origin. + origin: BackgroundOrigin, + /// How the background should be clipped. + clip: BackgroundClip, + + pub fn deinit(_: *@This(), _: Allocator) void { + // TODO: implement this + // not necessary right now because all allocations in CSS parser are in arena + } + + pub fn parse(input: *css.Parser) css.Result(@This()) { + var color: ?CssColor = null; + var position: ?BackgroundPosition = null; + var size: ?BackgroundSize = null; + var image: ?Image = null; + var repeat: ?BackgroundRepeat = null; + var attachment: ?BackgroundAttachment = null; + var origin: ?BackgroundOrigin = null; + var clip: ?BackgroundClip = null; + + while (true) { + // TODO: only allowed on the last background. + if (color == null) { + if (input.tryParse(CssColor.parse, .{}).asValue()) |value| { + color = value; + continue; + } + } + + if (position == null) { + if (input.tryParse(BackgroundPosition.parse, .{}).asValue()) |value| { + position = value; + + size = input.tryParse(struct { + fn parse(i: *css.Parser) css.Result(BackgroundSize) { + if (i.expectDelim('/').asErr()) |e| return .{ .err = e }; + return BackgroundSize.parse(i); + } + }.parse, .{}).asValue(); + + continue; + } + } + + if (image == null) { + if (input.tryParse(Image.parse, .{}).asValue()) |value| { + image = value; + continue; + } + } + + if (repeat == null) { + if (input.tryParse(BackgroundRepeat.parse, .{}).asValue()) |value| { + repeat = value; + continue; + } + } + + if (attachment == null) { + if (input.tryParse(BackgroundAttachment.parse, .{}).asValue()) |value| { + attachment = value; + continue; + } + } + + if (origin == null) { + if (input.tryParse(BackgroundOrigin.parse, .{}).asValue()) |value| { + origin = value; + continue; + } + } + + if (clip == null) { + if (input.tryParse(BackgroundClip.parse, .{}).asValue()) |value| { + clip = value; + continue; + } + } + + break; + } + + if (clip == null) { + if (origin) |o| { + clip = @as(BackgroundClip, @enumFromInt(@intFromEnum(o))); + } + } + + return .{ .result = .{ + .image = image orelse Image.default(), + .color = color orelse CssColor.default(), + .position = position orelse BackgroundPosition.default(), + .repeat = repeat orelse BackgroundRepeat.default(), + .size = size orelse BackgroundSize.default(), + .attachment = attachment orelse BackgroundAttachment.default(), + .origin = origin orelse .@"padding-box", + .clip = clip orelse .@"border-box", + } }; + } + + pub fn toCss(this: *const Background, comptime W: type, dest: *Printer(W)) PrintErr!void { + var has_output = false; + + if (!this.color.eql(&CssColor.default())) { + try this.color.toCss(W, dest); + has_output = true; + } + + if (!this.image.eql(&Image.default())) { + if (has_output) try dest.writeStr(" "); + try this.image.toCss(W, dest); + has_output = true; + } + + const position: Position = this.position.intoPosition(); + if (!position.isZero() or !this.size.eql(&BackgroundSize.default())) { + if (has_output) { + try dest.writeStr(" "); + } + try position.toCss(W, dest); + + if (!this.size.eql(&BackgroundSize.default())) { + try dest.delim('/', true); + try this.size.toCss(W, dest); + } + + has_output = true; + } + + if (!this.repeat.eql(&BackgroundRepeat.default())) { + if (has_output) try dest.writeStr(" "); + try this.repeat.toCss(W, dest); + has_output = true; + } + + if (!this.attachment.eql(&BackgroundAttachment.default())) { + if (has_output) try dest.writeStr(" "); + try this.attachment.toCss(W, dest); + has_output = true; + } + + const output_padding_box = !this.origin.eql(&BackgroundOrigin.@"padding-box") or + (!this.clip.eqlOrigin(&BackgroundOrigin.@"border-box") and this.clip.isBackgroundBox()); + + if (output_padding_box) { + if (has_output) try dest.writeStr(" "); + try this.origin.toCss(W, dest); + has_output = true; + } + + if ((output_padding_box and !this.clip.eqlOrigin(&BackgroundOrigin.@"border-box")) or + !this.clip.eqlOrigin(&BackgroundOrigin.@"border-box")) + { + if (has_output) try dest.writeStr(" "); + + try this.clip.toCss(W, dest); + has_output = true; + } + + // If nothing was output, then this is the initial value, e.g. background: transparent + if (!has_output) { + if (dest.minify) { + // `0 0` is the shortest valid background value + try this.position.toCss(W, dest); + } else { + try dest.writeStr("none"); + } + } + } + + pub fn getImage(this: *const @This()) *const Image { + return &this.image; + } + + pub fn withImage(this: *const @This(), allocator: Allocator, image: Image) @This() { + var ret = this.*; + ret.image = .none; + ret = ret.deepClone(allocator); + ret.image = image; + return ret; + } + + pub fn getFallback(this: *const @This(), allocator: Allocator, kind: css.ColorFallbackKind) Background { + var ret: Background = this.*; + // Dummy values for the clone + ret.color = CssColor.default(); + ret.image = Image.default(); + ret = ret.deepClone(allocator); + ret.color = this.color.getFallback(allocator, kind); + ret.image = this.image.getFallback(allocator, kind); + return ret; + } + + pub fn getNecessaryFallbacks(this: *const @This(), targets: css.targets.Targets) css.ColorFallbackKind { + return this.color.getNecessaryFallbacks(targets).bitwiseOr(this.getImage().getNecessaryFallbacks(targets)); + } + + pub inline fn deepClone(this: *const @This(), allocator: Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } +}; + +/// A value for the [background-size](https://www.w3.org/TR/css-backgrounds-3/#background-size) property. +pub const BackgroundSize = union(enum) { + /// An explicit background size. + explicit: struct { + /// The width of the background. + width: css.css_values.length.LengthPercentageOrAuto, + /// The height of the background. + height: css.css_values.length.LengthPercentageOrAuto, + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub inline fn deepClone(this: *const @This(), allocator: Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + }, + /// The `cover` keyword. Scales the background image to cover both the width and height of the element. + cover, + /// The `contain` keyword. Scales the background image so that it fits within the element. + contain, + + pub fn parse(input: *css.Parser) css.Result(@This()) { + if (input.tryParse(LengthPercentageOrAuto.parse, .{}).asValue()) |width| { + const height = input.tryParse(LengthPercentageOrAuto.parse, .{}).unwrapOr(.auto); + return .{ .result = .{ .explicit = .{ .width = width, .height = height } } }; + } + + const location = input.currentSourceLocation(); + const ident = switch (input.expectIdent()) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + + if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(ident, "cover")) { + return .{ .result = .cover }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(ident, "contain")) { + return .{ .result = .contain }; + } else { + return .{ .err = location.newBasicUnexpectedTokenError(.{ .ident = ident }) }; + } + } + + pub fn toCss(this: *const BackgroundSize, comptime W: type, dest: *Printer(W)) PrintErr!void { + return switch (this.*) { + .cover => dest.writeStr("cover"), + .contain => dest.writeStr("contain"), + .explicit => |explicit| { + try explicit.width.toCss(W, dest); + if (explicit.height != .auto) { + try dest.writeStr(" "); + try explicit.height.toCss(W, dest); + } + return; + }, + }; + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub fn default() @This() { + return BackgroundSize{ .explicit = .{ + .width = .auto, + .height = .auto, + } }; + } + + pub inline fn deepClone(this: *const @This(), allocator: Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } +}; + +/// A value for the [background-position](https://drafts.csswg.org/css-backgrounds/#background-position) shorthand property. +pub const BackgroundPosition = struct { + /// The x-position. + x: HorizontalPosition, + /// The y-position. + y: VerticalPosition, + + pub usingnamespace css.DefineListShorthand(@This()); + + const PropertyFieldMap = .{ + .x = css.PropertyIdTag.@"background-position-x", + .y = css.PropertyIdTag.@"background-position-y", + }; + + pub fn parse(input: *css.Parser) css.Result(@This()) { + const pos = switch (css.css_values.position.Position.parse(input)) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + return .{ .result = BackgroundPosition.fromPosition(pos) }; + } + + pub fn toCss(this: *const BackgroundPosition, comptime W: type, dest: *Printer(W)) PrintErr!void { + const pos = this.intoPosition(); + return pos.toCss(W, dest); + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub fn default() @This() { + return BackgroundPosition.fromPosition(Position.default()); + } + + pub fn fromPosition(pos: Position) BackgroundPosition { + return BackgroundPosition{ .x = pos.x, .y = pos.y }; + } + + pub fn intoPosition(this: *const BackgroundPosition) Position { + return Position{ .x = this.x, .y = this.y }; + } + + pub inline fn deepClone(this: *const @This(), allocator: Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } +}; + +/// A value for the [background-repeat](https://www.w3.org/TR/css-backgrounds-3/#background-repeat) property. +pub const BackgroundRepeat = struct { + /// A repeat style for the x direction. + x: BackgroundRepeatKeyword, + /// A repeat style for the y direction. + y: BackgroundRepeatKeyword, + + pub fn default() @This() { + return BackgroundRepeat{ + .x = .repeat, + .y = .repeat, + }; + } + + pub fn parse(input: *css.Parser) css.Result(@This()) { + const state = input.state(); + const ident = switch (input.expectIdent()) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + + if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(ident, "repeat-x")) { + return .{ .result = .{ .x = .repeat, .y = .@"no-repeat" } }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(ident, "repeat-y")) { + return .{ .result = .{ .x = .@"no-repeat", .y = .repeat } }; + } + + input.reset(&state); + + const x = switch (BackgroundRepeatKeyword.parse(input)) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + + const y = input.tryParse(BackgroundRepeatKeyword.parse, .{}).unwrapOrNoOptmizations(x); + + return .{ .result = .{ .x = x, .y = y } }; + } + + pub fn toCss(this: *const BackgroundRepeat, comptime W: type, dest: *Printer(W)) PrintErr!void { + const Repeat = BackgroundRepeatKeyword.repeat; + const NoRepeat = BackgroundRepeatKeyword.@"no-repeat"; + + if (this.x == Repeat and this.y == NoRepeat) { + return dest.writeStr("repeat-x"); + } else if (this.x == NoRepeat and this.y == Repeat) { + return dest.writeStr("repeat-y"); + } else { + try this.x.toCss(W, dest); + if (this.y != this.x) { + try dest.writeStr(" "); + try this.y.toCss(W, dest); + } + } + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub fn deepClone(this: *const @This(), allocator: Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } +}; + +/// A [``](https://www.w3.org/TR/css-backgrounds-3/#typedef-repeat-style) value, +/// used within the `background-repeat` property to represent how a background image is repeated +/// in a single direction. +/// +/// See [BackgroundRepeat](BackgroundRepeat). +pub const BackgroundRepeatKeyword = enum { + /// The image is repeated in this direction. + repeat, + /// The image is repeated so that it fits, and then spaced apart evenly. + space, + /// The image is scaled so that it repeats an even number of times. + round, + /// The image is placed once and not repeated in this direction. + @"no-repeat", + + pub usingnamespace css.DefineEnumProperty(@This()); +}; + +/// A value for the [background-attachment](https://www.w3.org/TR/css-backgrounds-3/#background-attachment) property. +pub const BackgroundAttachment = enum { + /// The background scrolls with the container. + scroll, + /// The background is fixed to the viewport. + fixed, + /// The background is fixed with regard to the element's contents. + local, + + pub usingnamespace css.DefineEnumProperty(@This()); + + pub fn default() @This() { + return .scroll; + } +}; + +/// A value for the [background-origin](https://www.w3.org/TR/css-backgrounds-3/#background-origin) property. +pub const BackgroundOrigin = enum { + /// The position is relative to the border box. + @"border-box", + /// The position is relative to the padding box. + @"padding-box", + /// The position is relative to the content box. + @"content-box", + + pub usingnamespace css.DefineEnumProperty(@This()); +}; + +/// A value for the [background-clip](https://drafts.csswg.org/css-backgrounds-4/#background-clip) property. +pub const BackgroundClip = enum { + /// The background is clipped to the border box. + @"border-box", + /// The background is clipped to the padding box. + @"padding-box", + /// The background is clipped to the content box. + @"content-box", + /// The background is clipped to the area painted by the border. + border, + /// The background is clipped to the text content of the element. + text, + + pub usingnamespace css.DefineEnumProperty(@This()); + + pub fn default() BackgroundClip { + return .@"border-box"; + } + + pub fn eqlOrigin(this: *const @This(), other: *const BackgroundOrigin) bool { + return switch (this.*) { + .@"border-box" => other.* == .@"border-box", + .@"padding-box" => other.* == .@"padding-box", + .@"content-box" => other.* == .@"content-box", + else => false, + }; + } + + pub fn isBackgroundBox(this: *const @This()) bool { + return switch (this.*) { + .@"border-box", .@"padding-box", .@"content-box" => true, + else => false, + }; + } +}; + +/// A value for the [aspect-ratio](https://drafts.csswg.org/css-sizing-4/#aspect-ratio) property. +pub const AspectRatio = struct { + /// The `auto` keyword. + auto: bool, + /// A preferred aspect ratio for the box, specified as width / height. + ratio: ?Ratio, +}; + +pub const BackgroundProperty = packed struct(u16) { + @"background-color": bool = false, + @"background-image": bool = false, + @"background-position-x": bool = false, + @"background-position-y": bool = false, + @"background-repeat": bool = false, + @"background-size": bool = false, + @"background-attachment": bool = false, + @"background-origin": bool = false, + @"background-clip": bool = false, + __unused: u7 = 0, + + pub usingnamespace css.Bitflags(@This()); + + pub const @"background-color" = BackgroundProperty{ .@"background-color" = true }; + pub const @"background-image" = BackgroundProperty{ .@"background-image" = true }; + pub const @"background-position-x" = BackgroundProperty{ .@"background-position-x" = true }; + pub const @"background-position-y" = BackgroundProperty{ .@"background-position-y" = true }; + pub const @"background-position" = BackgroundProperty{ .@"background-position-x" = true, .@"background-position-y" = true }; + pub const @"background-repeat" = BackgroundProperty{ .@"background-repeat" = true }; + pub const @"background-size" = BackgroundProperty{ .@"background-size" = true }; + pub const @"background-attachment" = BackgroundProperty{ .@"background-attachment" = true }; + pub const @"background-origin" = BackgroundProperty{ .@"background-origin" = true }; + pub const @"background-clip" = BackgroundProperty{ .@"background-clip" = true }; + pub const background = BackgroundProperty{ + .@"background-color" = true, + .@"background-image" = true, + .@"background-position-x" = true, + .@"background-position-y" = true, + .@"background-repeat" = true, + .@"background-size" = true, + .@"background-attachment" = true, + .@"background-origin" = true, + .@"background-clip" = true, + }; + + pub fn fromPropertyId(property_id: css.PropertyId) ?BackgroundProperty { + return switch (property_id) { + .@"background-color" => BackgroundProperty{ .@"background-color" = true }, + .@"background-image" => BackgroundProperty{ .@"background-image" = true }, + .@"background-position-x" => BackgroundProperty{ .@"background-position-x" = true }, + .@"background-position-y" => BackgroundProperty{ .@"background-position-y" = true }, + .@"background-position" => BackgroundProperty{ .@"background-position-x" = true, .@"background-position-y" = true }, + .@"background-repeat" => BackgroundProperty{ .@"background-repeat" = true }, + .@"background-size" => BackgroundProperty{ .@"background-size" = true }, + .@"background-attachment" => BackgroundProperty{ .@"background-attachment" = true }, + .@"background-origin" => BackgroundProperty{ .@"background-origin" = true }, + .background => BackgroundProperty{ + .@"background-color" = true, + .@"background-image" = true, + .@"background-position-x" = true, + .@"background-position-y" = true, + .@"background-repeat" = true, + .@"background-size" = true, + .@"background-attachment" = true, + .@"background-origin" = true, + .@"background-clip" = true, + }, + else => null, + }; + } +}; + +pub const BackgroundHandler = struct { + color: ?CssColor = null, + images: ?css.SmallList(Image, 1) = null, + has_prefix: bool = false, + x_positions: ?css.SmallList(HorizontalPosition, 1) = null, + y_positions: ?css.SmallList(VerticalPosition, 1) = null, + repeats: ?css.SmallList(BackgroundRepeat, 1) = null, + sizes: ?css.SmallList(BackgroundSize, 1) = null, + attachments: ?css.SmallList(BackgroundAttachment, 1) = null, + origins: ?css.SmallList(BackgroundOrigin, 1) = null, + clips: ?struct { css.SmallList(BackgroundClip, 1), VendorPrefix } = null, + decls: ArrayList(Property) = undefined, + flushed_properties: BackgroundProperty = undefined, + has_any: bool = false, + + pub fn handleProperty( + this: *BackgroundHandler, + property: *const Property, + dest: *css.DeclarationList, + context: *css.PropertyHandlerContext, + ) bool { + const allocator = context.allocator; + switch (property.*) { + .@"background-color" => |*val| { + this.flushHelper(allocator, "color", CssColor, val, dest, context); + this.color = val.deepClone(allocator); + }, + .@"background-image" => |*val| { + this.backgroundHelper(allocator, SmallList(Image, 1), val, property, dest, context); + this.images = val.deepClone(allocator); + }, + .@"background-position" => |val| { + const x_positions = this.initSmallListHelper(HorizontalPosition, 1, "x_positions", allocator, val.len()); + const y_positions = this.initSmallListHelper(VerticalPosition, 1, "y_positions", allocator, val.len()); + for (val.slice(), x_positions, y_positions) |position, *x, *y| { + x.* = position.x.deepClone(allocator); + y.* = position.y.deepClone(allocator); + } + }, + .@"background-position-x" => |val| { + if (this.x_positions) |*x_positions| x_positions.deinit(allocator); + this.x_positions = val.deepClone(allocator); + }, + .@"background-position-y" => |val| { + if (this.y_positions) |*y_positions| y_positions.deinit(allocator); + this.y_positions = val.deepClone(allocator); + }, + .@"background-repeat" => |val| { + if (this.repeats) |*repeats| repeats.deinit(allocator); + this.repeats = val.deepClone(allocator); + }, + .@"background-size" => |val| { + if (this.sizes) |*sizes| sizes.deinit(allocator); + this.sizes = val.deepClone(allocator); + }, + .@"background-attachment" => |val| { + if (this.attachments) |*attachments| attachments.deinit(allocator); + this.attachments = val.deepClone(allocator); + }, + .@"background-origin" => |val| { + if (this.origins) |*origins| origins.deinit(allocator); + this.origins = val.deepClone(allocator); + }, + .@"background-clip" => |*x| { + const val: *const SmallList(BackgroundClip, 1) = &x.*[0]; + const vendor_prefix: VendorPrefix = x.*[1]; + if (this.clips) |*clips_and_vp| { + var clips: *SmallList(BackgroundClip, 1) = &clips_and_vp.*[0]; + const vp: *VendorPrefix = &clips_and_vp.*[1]; + if (!vendor_prefix.eql(vp.*) and !val.eql(clips)) { + this.flush(allocator, dest, context); + clips.deinit(allocator); + this.clips = .{ val.deepClone(allocator), vendor_prefix }; + } else { + if (!val.eql(clips)) { + clips.deinit(allocator); + clips.* = val.deepClone(allocator); + } + vp.insert(vendor_prefix); + } + } else { + this.clips = .{ val.deepClone(allocator), vendor_prefix }; + } + }, + .background => |*val| { + var images = SmallList(Image, 1).initCapacity(allocator, val.len()); + for (val.slice()) |*b| { + images.appendAssumeCapacity(b.image.deepClone(allocator)); + } + this.backgroundHelper(allocator, SmallList(Image, 1), &images, property, dest, context); + const color = val.last().?.color.deepClone(allocator); + this.flushHelper(allocator, "color", CssColor, &color, dest, context); + var clips = SmallList(BackgroundClip, 1).initCapacity(allocator, val.len()); + for (val.slice()) |*b| { + clips.appendAssumeCapacity(b.clip.deepClone(allocator)); + } + var clips_vp = VendorPrefix{ .none = true }; + if (this.clips) |*clips_and_vp| { + if (!clips_vp.eql(clips_and_vp.*[1]) and !clips_and_vp.*[0].eql(&clips_and_vp[0])) { + this.flush(allocator, dest, context); + } else { + clips_vp.insert(clips_and_vp.*[1]); + } + } + + if (this.color) |*c| c.deinit(allocator); + this.color = color; + if (this.images) |*i| i.deinit(allocator); + this.images = images; + const x_positions = this.initSmallListHelper(HorizontalPosition, 1, "x_positions", allocator, val.len()); + const y_positions = this.initSmallListHelper(VerticalPosition, 1, "y_positions", allocator, val.len()); + const repeats = this.initSmallListHelper(BackgroundRepeat, 1, "repeats", allocator, val.len()); + const sizes = this.initSmallListHelper(BackgroundSize, 1, "sizes", allocator, val.len()); + const attachments = this.initSmallListHelper(BackgroundAttachment, 1, "attachments", allocator, val.len()); + const origins = this.initSmallListHelper(BackgroundOrigin, 1, "origins", allocator, val.len()); + + for ( + val.slice(), + x_positions, + y_positions, + repeats, + sizes, + attachments, + origins, + ) |*b, *x, *y, *r, *s, *a, *o| { + x.* = b.position.x.deepClone(allocator); + y.* = b.position.y.deepClone(allocator); + r.* = b.repeat.deepClone(allocator); + s.* = b.size.deepClone(allocator); + a.* = b.attachment.deepClone(allocator); + o.* = b.origin.deepClone(allocator); + } + + this.clips = .{ clips, clips_vp }; + }, + .unparsed => |*val| { + if (isBackgroundProperty(val.property_id)) { + this.flush(allocator, dest, context); + var unparsed = val.deepClone(allocator); + context.addUnparsedFallbacks(&unparsed); + if (BackgroundProperty.fromPropertyId(val.property_id)) |prop| { + this.flushed_properties.insert(prop); + } + + dest.append(allocator, Property{ .unparsed = unparsed }) catch bun.outOfMemory(); + } else return false; + }, + else => return false, + } + + this.has_any = true; + return true; + } + + // Either get the value from the field on `this` or initialize a new one + fn initSmallListHelper( + this: *@This(), + comptime T: type, + comptime N: comptime_int, + comptime field: []const u8, + allocator: Allocator, + length: u32, + ) []T { + if (@field(this, field)) |*list| { + list.clearRetainingCapacity(); + list.ensureTotalCapacity(allocator, length); + list.setLen(length); + return list.slice_mut(); + } else { + @field(this, field) = SmallList(T, N).initCapacity(allocator, length); + @field(this, field).?.setLen(length); + return @field(this, field).?.slice_mut(); + } + } + + fn backgroundHelper( + this: *@This(), + allocator: Allocator, + comptime T: type, + val: *const T, + property: *const Property, + dest: *css.DeclarationList, + context: *css.PropertyHandlerContext, + ) void { + this.flushHelper(allocator, "images", T, val, dest, context); + + // Store prefixed properties. Clear if we hit an unprefixed property and we have + // targets. In this case, the necessary prefixes will be generated. + this.has_prefix = val.any(struct { + pub fn predicate(item: *const Image) bool { + return item.hasVendorPrefix(); + } + }.predicate); + if (this.has_prefix) { + this.decls.append(allocator, property.deepClone(allocator)) catch bun.outOfMemory(); + } else if (context.targets.browsers != null) { + this.decls.clearRetainingCapacity(); + } + } + + fn flushHelper( + this: *@This(), + allocator: Allocator, + comptime field: []const u8, + comptime T: type, + val: *const T, + dest: *css.DeclarationList, + context: *css.PropertyHandlerContext, + ) void { + if (@field(this, field) != null and + !@field(this, field).?.eql(val) and + context.targets.browsers != null and !val.isCompatible(context.targets.browsers.?)) + { + this.flush(allocator, dest, context); + } + } + + fn flush(this: *@This(), allocator: Allocator, dest: *css.DeclarationList, context: *css.PropertyHandlerContext) void { + if (!this.has_any) return; + this.has_any = false; + const push = struct { + fn push(self: *BackgroundHandler, alloc: Allocator, d: *css.DeclarationList, comptime property_field_name: []const u8, val: anytype) void { + d.append(alloc, @unionInit(Property, property_field_name, val)) catch bun.outOfMemory(); + const prop = @field(BackgroundProperty, property_field_name); + self.flushed_properties.insert(prop); + } + }.push; + + var maybe_color: ?CssColor = bun.take(&this.color); + var maybe_images: ?css.SmallList(Image, 1) = bun.take(&this.images); + var maybe_x_positions: ?css.SmallList(HorizontalPosition, 1) = bun.take(&this.x_positions); + var maybe_y_positions: ?css.SmallList(VerticalPosition, 1) = bun.take(&this.y_positions); + var maybe_repeats: ?css.SmallList(BackgroundRepeat, 1) = bun.take(&this.repeats); + var maybe_sizes: ?css.SmallList(BackgroundSize, 1) = bun.take(&this.sizes); + var maybe_attachments: ?css.SmallList(BackgroundAttachment, 1) = bun.take(&this.attachments); + var maybe_origins: ?css.SmallList(BackgroundOrigin, 1) = bun.take(&this.origins); + var maybe_clips: ?struct { css.SmallList(BackgroundClip, 1), css.VendorPrefix } = bun.take(&this.clips); + defer { + if (maybe_color) |*c| c.deinit(allocator); + if (maybe_images) |*i| i.deinit(allocator); + if (maybe_x_positions) |*x| x.deinit(allocator); + if (maybe_y_positions) |*y| y.deinit(allocator); + if (maybe_repeats) |*r| r.deinit(allocator); + if (maybe_sizes) |*s| s.deinit(allocator); + if (maybe_attachments) |*a| a.deinit(allocator); + if (maybe_origins) |*o| o.deinit(allocator); + if (maybe_clips) |*c| c.*[0].deinit(allocator); + } + + if (maybe_color != null and + maybe_images != null and + maybe_x_positions != null and + maybe_y_positions != null and + maybe_repeats != null and + maybe_sizes != null and + maybe_attachments != null and + maybe_origins != null and + maybe_clips != null) + { + const color = &maybe_color.?; + var images = &maybe_images.?; + var x_positions = &maybe_x_positions.?; + var y_positions = &maybe_y_positions.?; + var repeats = &maybe_repeats.?; + var sizes = &maybe_sizes.?; + var attachments = &maybe_attachments.?; + var origins = &maybe_origins.?; + var clips = &maybe_clips.?; + + // Only use shorthand syntax if the number of layers matches on all properties. + const len = images.len(); + if (x_positions.len() == len and + y_positions.len() == len and + repeats.len() == len and + sizes.len() == len and attachments.len() == len and origins.len() == len and clips[0].len() == len) + { + const clip_prefixes = if (clips.*[0].any(struct { + fn predicate(clip: *const BackgroundClip) bool { + return clip.* == BackgroundClip.text; + } + }.predicate)) context.targets.prefixes(clips.*[1], .background_clip) else clips.*[1]; + const clip_property = if (!clip_prefixes.eql(css.VendorPrefix{ .none = true })) + css.Property{ .@"background-clip" = .{ clips.*[0].deepClone(allocator), clip_prefixes } } + else + null; + + var backgrounds = SmallList(Background, 1).initCapacity(allocator, len); + for ( + images.slice(), + x_positions.slice(), + y_positions.slice(), + repeats.slice(), + sizes.slice(), + attachments.slice(), + origins.slice(), + clips.*[0].slice(), + 0.., + ) |image, x_position, y_position, repeat, size, attachment, origin, clip, i| { + backgrounds.appendAssumeCapacity(Background{ + .color = if (i == len - 1) color.deepClone(allocator) else CssColor.default(), + .image = image, + .position = BackgroundPosition{ .x = x_position, .y = y_position }, + .repeat = repeat, + .size = size, + .attachment = attachment, + .origin = origin, + .clip = if (clip_prefixes.eql(css.VendorPrefix{ .none = true })) clip else BackgroundClip.default(), + }); + } + defer { + images.clearRetainingCapacity(); + x_positions.clearRetainingCapacity(); + y_positions.clearRetainingCapacity(); + repeats.clearRetainingCapacity(); + sizes.clearRetainingCapacity(); + attachments.clearRetainingCapacity(); + origins.clearRetainingCapacity(); + clips.*[0].clearRetainingCapacity(); + } + + if (!this.flushed_properties.intersects(BackgroundProperty.background)) { + for (backgrounds.getFallbacks(allocator, context.targets).slice()) |fallback| { + push(this, allocator, dest, "background", fallback); + } + } + + push(this, allocator, dest, "background", backgrounds); + + if (clip_property) |clip| { + dest.append(allocator, clip) catch bun.outOfMemory(); + this.flushed_properties.insert(BackgroundProperty.@"background-clip"); + } + + this.reset(allocator); + return; + } + } + + if (bun.take(&maybe_color)) |color_| { + var color: CssColor = color_; + if (!this.flushed_properties.contains(BackgroundProperty.@"background-color")) { + for (color.getFallbacks(allocator, context.targets).slice()) |fallback| { + push(this, allocator, dest, "background-color", fallback); + } + } + push(this, allocator, dest, "background-color", color); + } + + if (bun.take(&maybe_images)) |images_| { + var images: css.SmallList(Image, 1) = images_; + if (!this.flushed_properties.contains(BackgroundProperty.@"background-image")) { + var fallbacks = images.getFallbacks(allocator, context.targets); + for (fallbacks.slice()) |fallback| { + push(this, allocator, dest, "background-image", fallback); + } + } + push(this, allocator, dest, "background-image", images); + } + + if (maybe_x_positions != null and maybe_y_positions != null and maybe_x_positions.?.len() == maybe_y_positions.?.len()) { + var positions = SmallList(BackgroundPosition, 1).initCapacity(allocator, maybe_x_positions.?.len()); + for (maybe_x_positions.?.slice(), maybe_y_positions.?.slice()) |x, y| { + positions.appendAssumeCapacity(BackgroundPosition{ .x = x, .y = y }); + } + maybe_x_positions.?.clearRetainingCapacity(); + maybe_y_positions.?.clearRetainingCapacity(); + push(this, allocator, dest, "background-position", positions); + } else { + if (bun.take(&maybe_x_positions)) |x| { + push(this, allocator, dest, "background-position-x", x); + } + if (bun.take(&maybe_y_positions)) |y| { + push(this, allocator, dest, "background-position-y", y); + } + } + + if (bun.take(&maybe_repeats)) |rep| { + push(this, allocator, dest, "background-repeat", rep); + } + + if (bun.take(&maybe_sizes)) |rep| { + push(this, allocator, dest, "background-size", rep); + } + + if (bun.take(&maybe_attachments)) |rep| { + push(this, allocator, dest, "background-attachment", rep); + } + + if (bun.take(&maybe_origins)) |rep| { + push(this, allocator, dest, "background-origin", rep); + } + + if (bun.take(&maybe_clips)) |c| { + const clips: css.SmallList(BackgroundClip, 1), const vp: css.VendorPrefix = c; + const prefixes = if (clips.any(struct { + pub fn predicate(clip: *const BackgroundClip) bool { + return clip.* == BackgroundClip.text; + } + }.predicate)) context.targets.prefixes(vp, css.prefixes.Feature.background_clip) else vp; + dest.append( + allocator, + Property{ + .@"background-clip" = .{ clips.deepClone(allocator), prefixes }, + }, + ) catch bun.outOfMemory(); + this.flushed_properties.insert(BackgroundProperty.@"background-clip"); + } + + this.reset(allocator); + } + + fn reset(this: *@This(), allocator: Allocator) void { + if (this.color) |c| c.deinit(allocator); + this.color = null; + if (this.images) |*i| i.deinit(allocator); + this.images = null; + if (this.x_positions) |*x| x.deinit(allocator); + this.x_positions = null; + if (this.y_positions) |*y| y.deinit(allocator); + this.y_positions = null; + if (this.repeats) |*r| r.deinit(allocator); + this.repeats = null; + if (this.sizes) |*s| s.deinit(allocator); + this.sizes = null; + if (this.attachments) |*a| a.deinit(allocator); + this.attachments = null; + if (this.origins) |*o| o.deinit(allocator); + this.origins = null; + if (this.clips) |*c| c.*[0].deinit(allocator); + this.clips = null; + } + + pub fn finalize(this: *@This(), dest: *css.DeclarationList, context: *css.PropertyHandlerContext) void { + const allocator = context.allocator; + // If the last declaration is prefixed, pop the last value + // so it isn't duplicated when we flush. + if (this.has_prefix) { + var prop = this.decls.pop(); + prop.deinit(allocator); + } + + dest.appendSlice(allocator, this.decls.items) catch bun.outOfMemory(); + this.decls.clearRetainingCapacity(); + + this.flush(allocator, dest, context); + this.flushed_properties = BackgroundProperty.empty(); + } +}; + +fn isBackgroundProperty(property_id: css.PropertyId) bool { + return switch (property_id) { + .@"background-color", + .@"background-image", + .@"background-position", + .@"background-position-x", + .@"background-position-y", + .@"background-repeat", + .@"background-size", + .@"background-attachment", + .@"background-origin", + .@"background-clip", + .background, + => true, + else => false, + }; +} diff --git a/src/css/properties/border.zig b/src/css/properties/border.zig new file mode 100644 index 0000000000..6f89d00d28 --- /dev/null +++ b/src/css/properties/border.zig @@ -0,0 +1,458 @@ +const std = @import("std"); +const bun = @import("root").bun; +const Allocator = std.mem.Allocator; +const ArrayList = std.ArrayListUnmanaged; + +pub const css = @import("../css_parser.zig"); + +const SmallList = css.SmallList; +const Printer = css.Printer; +const PrintErr = css.PrintErr; + +const LengthPercentage = css.css_values.length.LengthPercentage; +const CustomIdent = css.css_values.ident.CustomIdent; +const CSSString = css.css_values.string.CSSString; +const CSSNumber = css.css_values.number.CSSNumber; +const LengthPercentageOrAuto = css.css_values.length.LengthPercentageOrAuto; +const Size2D = css.css_values.size.Size2D; +const DashedIdent = css.css_values.ident.DashedIdent; +const Image = css.css_values.image.Image; +const CssColor = css.css_values.color.CssColor; +const Ratio = css.css_values.ratio.Ratio; +const Length = css.css_values.length.Length; + +/// A value for the [border-top](https://www.w3.org/TR/css-backgrounds-3/#propdef-border-top) shorthand property. +pub const BorderTop = GenericBorder(LineStyle, 0); +/// A value for the [border-right](https://www.w3.org/TR/css-backgrounds-3/#propdef-border-right) shorthand property. +pub const BorderRight = GenericBorder(LineStyle, 1); +/// A value for the [border-bottom](https://www.w3.org/TR/css-backgrounds-3/#propdef-border-bottom) shorthand property. +pub const BorderBottom = GenericBorder(LineStyle, 2); +/// A value for the [border-left](https://www.w3.org/TR/css-backgrounds-3/#propdef-border-left) shorthand property. +pub const BorderLeft = GenericBorder(LineStyle, 3); +/// A value for the [border-block-start](https://drafts.csswg.org/css-logical/#propdef-border-block-start) shorthand property. +pub const BorderBlockStart = GenericBorder(LineStyle, 4); +/// A value for the [border-block-end](https://drafts.csswg.org/css-logical/#propdef-border-block-end) shorthand property. +pub const BorderBlockEnd = GenericBorder(LineStyle, 5); +/// A value for the [border-inline-start](https://drafts.csswg.org/css-logical/#propdef-border-inline-start) shorthand property. +pub const BorderInlineStart = GenericBorder(LineStyle, 6); +/// A value for the [border-inline-end](https://drafts.csswg.org/css-logical/#propdef-border-inline-end) shorthand property. +pub const BorderInlineEnd = GenericBorder(LineStyle, 7); +/// A value for the [border-block](https://drafts.csswg.org/css-logical/#propdef-border-block) shorthand property. +pub const BorderBlock = GenericBorder(LineStyle, 8); +/// A value for the [border-inline](https://drafts.csswg.org/css-logical/#propdef-border-inline) shorthand property. +pub const BorderInline = GenericBorder(LineStyle, 9); +/// A value for the [border](https://www.w3.org/TR/css-backgrounds-3/#propdef-border) shorthand property. +pub const Border = GenericBorder(LineStyle, 10); + +/// A generic type that represents the `border` and `outline` shorthand properties. +pub fn GenericBorder(comptime S: type, comptime P: u8) type { + _ = P; // autofix + return struct { + /// The width of the border. + width: BorderSideWidth, + /// The border style. + style: S, + /// The border color. + color: CssColor, + + const This = @This(); + + pub fn parse(input: *css.Parser) css.Result(@This()) { + // Order doesn't matter + var color: ?CssColor = null; + var style: ?S = null; + var width: ?BorderSideWidth = null; + var any = false; + + while (true) { + if (width == null) { + if (input.tryParse(BorderSideWidth.parse, .{}).asValue()) |value| { + width = value; + any = true; + } + } + + if (style == null) { + if (input.tryParse(S.parse, .{}).asValue()) |value| { + style = value; + any = true; + continue; + } + } + + if (color == null) { + if (input.tryParse(CssColor.parse, .{}).asValue()) |value| { + color = value; + any = true; + continue; + } + } + break; + } + + if (any) { + return .{ + .result = This{ + .width = width orelse BorderSideWidth.medium, + .style = style orelse S.default(), + .color = color orelse CssColor.current_color, + }, + }; + } + + return .{ .err = input.newCustomError(css.ParserError.invalid_declaration) }; + } + + pub fn toCss(this: *const This, W: anytype, dest: *Printer(W)) PrintErr!void { + if (this.eql(&This.default())) { + try this.style.toCss(W, dest); + return; + } + + var needs_space = false; + if (!this.width.eql(&BorderSideWidth.default())) { + try this.width.toCss(W, dest); + needs_space = true; + } + if (!this.style.eql(&S.default())) { + if (needs_space) { + try dest.writeStr(" "); + } + try this.style.toCss(W, dest); + needs_space = true; + } + if (!this.color.eql(&CssColor{ .current_color = {} })) { + if (needs_space) { + try dest.writeStr(" "); + } + try this.color.toCss(W, dest); + needs_space = true; + } + return; + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn eql(this: *const This, other: *const This) bool { + return this.width.eql(&other.width) and this.style.eql(&other.style) and this.color.eql(&other.color); + } + + pub inline fn default() This { + return This{ + .width = .medium, + .style = S.default(), + .color = CssColor.current_color, + }; + } + }; +} +/// A [``](https://drafts.csswg.org/css-backgrounds/#typedef-line-style) value, used in the `border-style` property. +/// A [``](https://drafts.csswg.org/css-backgrounds/#typedef-line-style) value, used in the `border-style` property. +pub const LineStyle = enum { + /// No border. + none, + /// Similar to `none` but with different rules for tables. + hidden, + /// Looks as if the content on the inside of the border is sunken into the canvas. + inset, + /// Looks as if it were carved in the canvas. + groove, + /// Looks as if the content on the inside of the border is coming out of the canvas. + outset, + /// Looks as if it were coming out of the canvas. + ridge, + /// A series of round dots. + dotted, + /// A series of square-ended dashes. + dashed, + /// A single line segment. + solid, + /// Two parallel solid lines with some space between them. + double, + + pub usingnamespace css.DefineEnumProperty(@This()); + + pub fn default() LineStyle { + return .none; + } +}; + +/// A value for the [border-width](https://www.w3.org/TR/css-backgrounds-3/#border-width) property. +pub const BorderSideWidth = union(enum) { + /// A UA defined `thin` value. + thin, + /// A UA defined `medium` value. + medium, + /// A UA defined `thick` value. + thick, + /// An explicit width. + length: Length, + + pub usingnamespace css.DeriveParse(@This()); + pub usingnamespace css.DeriveToCss(@This()); + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn default() BorderSideWidth { + return .medium; + } + + pub fn eql(this: *const @This(), other: *const @This()) bool { + return switch (this.*) { + .thin => switch (other.*) { + .thin => true, + else => false, + }, + .medium => switch (other.*) { + .medium => true, + else => false, + }, + .thick => switch (other.*) { + .thick => true, + else => false, + }, + .length => switch (other.*) { + .length => this.length.eql(&other.length), + else => false, + }, + }; + } +}; + +// TODO: fallbacks +/// A value for the [border-color](https://drafts.csswg.org/css-backgrounds/#propdef-border-color) shorthand property. +pub const BorderColor = struct { + top: CssColor, + right: CssColor, + bottom: CssColor, + left: CssColor, + + // TODO: bring this back + // pub usingnamespace css.DefineShorthand(@This(), css.PropertyIdTag.@"border-color"); + pub usingnamespace css.DefineRectShorthand(@This(), CssColor); + + pub const PropertyFieldMap = .{ + .top = css.PropertyIdTag.@"border-top-color", + .right = css.PropertyIdTag.@"border-right-color", + .bottom = css.PropertyIdTag.@"border-bottom-color", + .left = css.PropertyIdTag.@"border-left-color", + }; + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } +}; + +/// A value for the [border-style](https://drafts.csswg.org/css-backgrounds/#propdef-border-style) shorthand property. +pub const BorderStyle = struct { + top: LineStyle, + right: LineStyle, + bottom: LineStyle, + left: LineStyle, + + // TODO: bring this back + // pub usingnamespace css.DefineShorthand(@This(), css.PropertyIdTag.@"border-style"); + pub usingnamespace css.DefineRectShorthand(@This(), LineStyle); + + pub const PropertyFieldMap = .{ + .top = css.PropertyIdTag.@"border-top-style", + .right = css.PropertyIdTag.@"border-right-style", + .bottom = css.PropertyIdTag.@"border-bottom-style", + .left = css.PropertyIdTag.@"border-left-style", + }; + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } +}; + +/// A value for the [border-width](https://drafts.csswg.org/css-backgrounds/#propdef-border-width) shorthand property. +pub const BorderWidth = struct { + top: BorderSideWidth, + right: BorderSideWidth, + bottom: BorderSideWidth, + left: BorderSideWidth, + + // TODO: bring this back + // pub usingnamespace css.DefineShorthand(@This(), css.PropertyIdTag.@"border-width"); + pub usingnamespace css.DefineRectShorthand(@This(), BorderSideWidth); + + pub const PropertyFieldMap = .{ + .top = css.PropertyIdTag.@"border-top-width", + .right = css.PropertyIdTag.@"border-right-width", + .bottom = css.PropertyIdTag.@"border-bottom-width", + .left = css.PropertyIdTag.@"border-left-width", + }; + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } +}; + +// TODO: fallbacks +/// A value for the [border-block-color](https://drafts.csswg.org/css-logical/#propdef-border-block-color) shorthand property. +pub const BorderBlockColor = struct { + /// The block start value. + start: CssColor, + /// The block end value. + end: CssColor, + + // TODO: bring this back + // pub usingnamespace css.DefineShorthand(@This(), css.PropertyIdTag.@"border-block-color"); + pub usingnamespace css.DefineSizeShorthand(@This(), CssColor); + + pub const PropertyFieldMap = .{ + .start = css.PropertyIdTag.@"border-block-start-color", + .end = css.PropertyIdTag.@"border-block-end-color", + }; + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } +}; + +/// A value for the [border-block-style](https://drafts.csswg.org/css-logical/#propdef-border-block-style) shorthand property. +pub const BorderBlockStyle = struct { + /// The block start value. + start: LineStyle, + /// The block end value. + end: LineStyle, + + // TODO: bring this back + // pub usingnamespace css.DefineShorthand(@This(), css.PropertyIdTag.@"border-block-style"); + pub usingnamespace css.DefineSizeShorthand(@This(), LineStyle); + + pub const PropertyFieldMap = .{ + .start = css.PropertyIdTag.@"border-block-start-style", + .end = css.PropertyIdTag.@"border-block-end-style", + }; + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } +}; + +/// A value for the [border-block-width](https://drafts.csswg.org/css-logical/#propdef-border-block-width) shorthand property. +pub const BorderBlockWidth = struct { + /// The block start value. + start: BorderSideWidth, + /// The block end value. + end: BorderSideWidth, + + // TODO: bring this back + // pub usingnamespace css.DefineShorthand(@This(), css.PropertyIdTag.@"border-block-width"); + pub usingnamespace css.DefineSizeShorthand(@This(), BorderSideWidth); + + pub const PropertyFieldMap = .{ + .start = css.PropertyIdTag.@"border-block-start-width", + .end = css.PropertyIdTag.@"border-block-end-width", + }; + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } +}; + +// TODO: fallbacks +/// A value for the [border-inline-color](https://drafts.csswg.org/css-logical/#propdef-border-inline-color) shorthand property. +pub const BorderInlineColor = struct { + /// The inline start value. + start: CssColor, + /// The inline end value. + end: CssColor, + + // TODO: bring this back + // pub usingnamespace css.DefineShorthand(@This(), css.PropertyIdTag.@"border-inline-color"); + pub usingnamespace css.DefineSizeShorthand(@This(), CssColor); + + pub const PropertyFieldMap = .{ + .start = css.PropertyIdTag.@"border-inline-start-color", + .end = css.PropertyIdTag.@"border-inline-end-color", + }; + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } +}; + +/// A value for the [border-inline-style](https://drafts.csswg.org/css-logical/#propdef-border-inline-style) shorthand property. +pub const BorderInlineStyle = struct { + /// The inline start value. + start: LineStyle, + /// The inline end value. + end: LineStyle, + + // TODO: bring this back + // pub usingnamespace css.DefineShorthand(@This(), css.PropertyIdTag.@"border-inline-style"); + pub usingnamespace css.DefineSizeShorthand(@This(), LineStyle); + + pub const PropertyFieldMap = .{ + .start = css.PropertyIdTag.@"border-inline-start-style", + .end = css.PropertyIdTag.@"border-inline-end-style", + }; + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } +}; + +/// A value for the [border-inline-width](https://drafts.csswg.org/css-logical/#propdef-border-inline-width) shorthand property. +pub const BorderInlineWidth = struct { + /// The inline start value. + start: BorderSideWidth, + /// The inline end value. + end: BorderSideWidth, + + // TODO: bring this back + // pub usingnamespace css.DefineShorthand(@This(), css.PropertyIdTag.@"border-inline-width"); + pub usingnamespace css.DefineSizeShorthand(@This(), BorderSideWidth); + + pub const PropertyFieldMap = .{ + .start = css.PropertyIdTag.@"border-inline-start-width", + .end = css.PropertyIdTag.@"border-inline-end-width", + }; + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } +}; diff --git a/src/css/properties/border_image.zig b/src/css/properties/border_image.zig new file mode 100644 index 0000000000..bde899c8ee --- /dev/null +++ b/src/css/properties/border_image.zig @@ -0,0 +1,347 @@ +const std = @import("std"); +const bun = @import("root").bun; +const Allocator = std.mem.Allocator; +const ArrayList = std.ArrayListUnmanaged; + +pub const css = @import("../css_parser.zig"); + +const SmallList = css.SmallList; +const Printer = css.Printer; +const PrintErr = css.PrintErr; + +const LengthPercentage = css.css_values.length.LengthPercentage; +const CustomIdent = css.css_values.ident.CustomIdent; +const CSSString = css.css_values.string.CSSString; +const CSSNumber = css.css_values.number.CSSNumber; +const LengthPercentageOrAuto = css.css_values.length.LengthPercentageOrAuto; +const LengthOrNumber = css.css_values.length.LengthOrNumber; +const Size2D = css.css_values.size.Size2D; +const DashedIdent = css.css_values.ident.DashedIdent; +const Image = css.css_values.image.Image; +const CssColor = css.css_values.color.CssColor; +const Ratio = css.css_values.ratio.Ratio; +const Length = css.css_values.length.LengthValue; +const Rect = css.css_values.rect.Rect; +const NumberOrPercentage = css.css_values.percentage.NumberOrPercentage; +const Percentage = css.css_values.percentage.Percentage; + +/// A value for the [border-image](https://www.w3.org/TR/css-backgrounds-3/#border-image) shorthand property. +pub const BorderImage = struct { + /// The border image. + source: Image, + /// The offsets that define where the image is sliced. + slice: BorderImageSlice, + /// The width of the border image. + width: Rect(BorderImageSideWidth), + /// The amount that the image extends beyond the border box. + outset: Rect(css.css_values.length.LengthOrNumber), + /// How the border image is scaled and tiled. + repeat: BorderImageRepeat, + + pub usingnamespace css.DefineShorthand(@This(), css.PropertyIdTag.@"border-image"); + + pub const PropertyFieldMap = .{ + .source = css.PropertyIdTag.@"border-image-source", + .slice = css.PropertyIdTag.@"border-image-slice", + .width = css.PropertyIdTag.@"border-image-width", + .outset = css.PropertyIdTag.@"border-image-outset", + .repeat = css.PropertyIdTag.@"border-image-repeat", + }; + + pub const VendorPrefixMap = .{ + .source = true, + .slice = true, + .width = true, + .outset = true, + .repeat = true, + }; + + pub fn parse(input: *css.Parser) css.Result(BorderImage) { + return parseWithCallback(input, {}, struct { + pub fn cb(_: void, _: *css.Parser) bool { + return false; + } + }.cb); + } + + pub fn parseWithCallback(input: *css.Parser, ctx: anytype, comptime callback: anytype) css.Result(BorderImage) { + var source: ?Image = null; + var slice: ?BorderImageSlice = null; + var width: ?Rect(BorderImageSideWidth) = null; + var outset: ?Rect(LengthOrNumber) = null; + var repeat: ?BorderImageRepeat = null; + + while (true) { + if (slice == null) { + if (input.tryParse(BorderImageSlice.parse, .{}).asValue()) |value| { + slice = value; + // Parse border image width and outset, if applicable. + const maybe_width_outset = input.tryParse(struct { + pub fn parse(i: *css.Parser) css.Result(struct { ?Rect(BorderImageSideWidth), ?Rect(LengthOrNumber) }) { + if (i.expectDelim('/').asErr()) |e| return .{ .err = e }; + + const w = i.tryParse(Rect(BorderImageSideWidth).parse, .{}).asValue(); + + const o = i.tryParse(struct { + pub fn parseFn(in: *css.Parser) css.Result(Rect(LengthOrNumber)) { + if (in.expectDelim('/').asErr()) |e| return .{ .err = e }; + return Rect(LengthOrNumber).parse(in); + } + }.parseFn, .{}).asValue(); + + if (w == null and o == null) return .{ .err = i.newCustomError(css.ParserError.invalid_declaration) }; + return .{ .result = .{ w, o } }; + } + }.parse, .{}); + + if (maybe_width_outset.asValue()) |val| { + width = val[0]; + outset = val[1]; + } + continue; + } + } + + if (source == null) { + if (input.tryParse(Image.parse, .{}).asValue()) |value| { + source = value; + continue; + } + } + + if (repeat == null) { + if (input.tryParse(BorderImageRepeat.parse, .{}).asValue()) |value| { + repeat = value; + continue; + } + } + + if (@call(.auto, callback, .{ ctx, input })) { + continue; + } + + break; + } + + if (source != null or slice != null or width != null or outset != null or repeat != null) { + return .{ + .result = BorderImage{ + .source = source orelse Image.default(), + .slice = slice orelse BorderImageSlice.default(), + .width = width orelse Rect(BorderImageSideWidth).all(BorderImageSideWidth.default()), + .outset = outset orelse Rect(LengthOrNumber).all(LengthOrNumber.default()), + .repeat = repeat orelse BorderImageRepeat.default(), + }, + }; + } + return .{ .err = input.newCustomError(css.ParserError.invalid_declaration) }; + } + + pub fn toCss(this: *const BorderImage, comptime W: type, dest: *css.Printer(W)) PrintErr!void { + return toCssInternal(&this.source, &this.slice, &this.width, &this.outset, &this.repeat, W, dest); + } + + pub fn toCssInternal( + source: *const Image, + slice: *const BorderImageSlice, + width: *const Rect(BorderImageSideWidth), + outset: *const Rect(LengthOrNumber), + repeat: *const BorderImageRepeat, + comptime W: type, + dest: *css.Printer(W), + ) PrintErr!void { + if (!css.generic.eql(Image, source, &Image.default())) { + try source.toCss(W, dest); + } + const has_slice = !css.generic.eql(BorderImageSlice, slice, &BorderImageSlice.default()); + const has_width = !css.generic.eql(Rect(BorderImageSideWidth), width, &Rect(BorderImageSideWidth).all(BorderImageSideWidth.default())); + const has_outset = !css.generic.eql(Rect(LengthOrNumber), outset, &Rect(LengthOrNumber).all(LengthOrNumber{ .number = 0.0 })); + if (has_slice or has_width or has_outset) { + try dest.writeStr(" "); + try slice.toCss(W, dest); + if (has_width or has_outset) { + try dest.delim('/', true); + } + if (has_width) { + try width.toCss(W, dest); + } + + if (has_outset) { + try dest.delim('/', true); + try outset.toCss(W, dest); + } + } + + if (!css.generic.eql(BorderImageRepeat, repeat, &BorderImageRepeat.default())) { + try dest.writeStr(" "); + return repeat.toCss(W, dest); + } + + return; + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn eql(this: *const BorderImage, other: *const BorderImage) bool { + return this.source.eql(&other.source) and + this.slice.eql(&other.slice) and + this.width.eql(&other.width) and + this.outset.eql(&other.outset) and + this.repeat.eql(&other.repeat); + } + + pub fn default() BorderImage { + return BorderImage{ + .source = Image.default(), + .slice = BorderImageSlice.default(), + .width = Rect(BorderImageSideWidth).all(BorderImageSideWidth.default()), + .outset = Rect(LengthOrNumber).all(LengthOrNumber.default()), + .repeat = BorderImageRepeat.default(), + }; + } +}; + +/// A value for the [border-image-repeat](https://www.w3.org/TR/css-backgrounds-3/#border-image-repeat) property. +pub const BorderImageRepeat = struct { + /// The horizontal repeat value. + horizontal: BorderImageRepeatKeyword, + /// The vertical repeat value. + vertical: BorderImageRepeatKeyword, + + pub fn parse(input: *css.Parser) css.Result(BorderImageRepeat) { + const horizontal = switch (BorderImageRepeatKeyword.parse(input)) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + const vertical = input.tryParse(BorderImageRepeatKeyword.parse, .{}).asValue(); + return .{ .result = BorderImageRepeat{ + .horizontal = horizontal, + .vertical = vertical orelse horizontal, + } }; + } + + pub fn toCss(this: *const BorderImageRepeat, comptime W: type, dest: *Printer(W)) PrintErr!void { + try this.horizontal.toCss(W, dest); + if (this.horizontal != this.vertical) { + try dest.writeStr(" "); + try this.vertical.toCss(W, dest); + } + } + + pub fn default() BorderImageRepeat { + return BorderImageRepeat{ + .horizontal = BorderImageRepeatKeyword.stretch, + .vertical = BorderImageRepeatKeyword.stretch, + }; + } + + pub fn eql(this: *const BorderImageRepeat, other: *const BorderImageRepeat) bool { + return this.horizontal.eql(&other.horizontal) and this.vertical.eql(&other.vertical); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } +}; + +/// A value for the [border-image-width](https://www.w3.org/TR/css-backgrounds-3/#border-image-width) property. +pub const BorderImageSideWidth = union(enum) { + /// A number representing a multiple of the border width. + number: CSSNumber, + /// An explicit length or percentage. + length_percentage: LengthPercentage, + /// The `auto` keyword, representing the natural width of the image slice. + auto: void, + + pub usingnamespace css.DeriveParse(@This()); + pub usingnamespace css.DeriveToCss(@This()); + + pub fn default() BorderImageSideWidth { + return .{ .number = 1.0 }; + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn eql(this: *const BorderImageSideWidth, other: *const BorderImageSideWidth) bool { + return switch (this.*) { + .number => |*a| switch (other.*) { + .number => |*b| a.* == b.*, + else => false, + }, + .length_percentage => |*a| switch (other.*) { + .length_percentage => css.generic.eql(LengthPercentage, a, &other.length_percentage), + else => false, + }, + .auto => switch (other.*) { + .auto => true, + else => false, + }, + }; + } +}; + +/// A single [border-image-repeat](https://www.w3.org/TR/css-backgrounds-3/#border-image-repeat) keyword. +pub const BorderImageRepeatKeyword = enum { + /// The image is stretched to fill the area. + stretch, + /// The image is tiled (repeated) to fill the area. + repeat, + /// The image is scaled so that it repeats an even number of times. + round, + /// The image is repeated so that it fits, and then spaced apart evenly. + space, + + pub usingnamespace css.DefineEnumProperty(@This()); +}; + +/// A value for the [border-image-slice](https://www.w3.org/TR/css-backgrounds-3/#border-image-slice) property. +pub const BorderImageSlice = struct { + /// The offsets from the edges of the image. + offsets: Rect(NumberOrPercentage), + /// Whether the middle of the border image should be preserved. + fill: bool, + + pub fn parse(input: *css.Parser) css.Result(BorderImageSlice) { + var fill = switch (input.expectIdentMatching("fill")) { + .err => false, + .result => true, + }; + const offsets = switch (Rect(NumberOrPercentage).parse(input)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + if (!fill) { + fill = switch (input.expectIdentMatching("fill")) { + .err => false, + .result => true, + }; + } + return .{ .result = BorderImageSlice{ .offsets = offsets, .fill = fill } }; + } + + pub fn toCss(this: *const BorderImageSlice, comptime W: type, dest: *Printer(W)) PrintErr!void { + try this.offsets.toCss(W, dest); + if (this.fill) { + try dest.writeStr(" fill"); + } + } + + pub fn eql(this: *const BorderImageSlice, other: *const BorderImageSlice) bool { + return this.offsets.eql(&other.offsets) and this.fill == other.fill; + } + + pub fn default() BorderImageSlice { + return BorderImageSlice{ + .offsets = Rect(NumberOrPercentage).all(NumberOrPercentage{ .percentage = Percentage{ .v = 1.0 } }), + .fill = false, + }; + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } +}; diff --git a/src/css/properties/border_radius.zig b/src/css/properties/border_radius.zig new file mode 100644 index 0000000000..befd591f75 --- /dev/null +++ b/src/css/properties/border_radius.zig @@ -0,0 +1,109 @@ +const std = @import("std"); +const bun = @import("root").bun; +const Allocator = std.mem.Allocator; +const ArrayList = std.ArrayListUnmanaged; + +pub const css = @import("../css_parser.zig"); + +const SmallList = css.SmallList; +const Printer = css.Printer; +const PrintErr = css.PrintErr; + +const LengthPercentage = css.css_values.length.LengthPercentage; +const CustomIdent = css.css_values.ident.CustomIdent; +const CSSString = css.css_values.string.CSSString; +const CSSNumber = css.css_values.number.CSSNumber; +const LengthPercentageOrAuto = css.css_values.length.LengthPercentageOrAuto; +const Size2D = css.css_values.size.Size2D; +const DashedIdent = css.css_values.ident.DashedIdent; +const Image = css.css_values.image.Image; +const CssColor = css.css_values.color.CssColor; +const Ratio = css.css_values.ratio.Ratio; +const Length = css.css_values.length.LengthValue; +const Rect = css.css_values.rect.Rect; +const NumberOrPercentage = css.css_values.percentage.NumberOrPercentage; + +/// A value for the [border-radius](https://www.w3.org/TR/css-backgrounds-3/#border-radius) property. +pub const BorderRadius = struct { + /// The x and y radius values for the top left corner. + top_left: Size2D(LengthPercentage), + /// The x and y radius values for the top right corner. + top_right: Size2D(LengthPercentage), + /// The x and y radius values for the bottom right corner. + bottom_right: Size2D(LengthPercentage), + /// The x and y radius values for the bottom left corner. + bottom_left: Size2D(LengthPercentage), + + pub usingnamespace css.DefineShorthand(@This(), css.PropertyIdTag.@"border-radius"); + + pub const PropertyFieldMap = .{ + .top_left = "border-top-left-radius", + .top_right = "border-top-right-radius", + .bottom_right = "border-bottom-right-radius", + .bottom_left = "border-bottom-left-radius", + }; + + pub const VendorPrefixMap = .{ + .top_left = true, + .top_right = true, + .bottom_right = true, + .bottom_left = true, + }; + + pub fn parse(input: *css.Parser) css.Result(BorderRadius) { + const widths = switch (Rect(LengthPercentage).parse(input)) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + const heights = if (input.tryParse(css.Parser.expectDelim, .{'/'}).isOk()) + switch (Rect(LengthPercentage).parse(input)) { + .result => |v| v, + .err => |e| { + widths.deinit(input.allocator()); + return .{ .err = e }; + }, + } + else + widths.deepClone(input.allocator()); + + return .{ + .result = BorderRadius{ + .top_left = Size2D(LengthPercentage){ .a = widths.top, .b = heights.top }, + .top_right = Size2D(LengthPercentage){ .a = widths.right, .b = heights.right }, + .bottom_right = Size2D(LengthPercentage){ .a = widths.bottom, .b = heights.bottom }, + .bottom_left = Size2D(LengthPercentage){ .a = widths.left, .b = heights.left }, + }, + }; + } + + pub fn toCss(this: *const BorderRadius, comptime W: type, dest: *Printer(W)) PrintErr!void { + const widths = Rect(*const LengthPercentage){ + .top = &this.top_left.a, + .right = &this.top_right.a, + .bottom = &this.bottom_right.a, + .left = &this.bottom_left.a, + }; + + const heights = Rect(*const LengthPercentage){ + .top = &this.top_left.b, + .right = &this.top_right.b, + .bottom = &this.bottom_right.b, + .left = &this.bottom_left.b, + }; + + try widths.toCss(W, dest); + + if (!widths.eql(&heights)) { + try dest.delim('/', true); + try heights.toCss(W, dest); + } + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } +}; diff --git a/src/css/properties/box_shadow.zig b/src/css/properties/box_shadow.zig new file mode 100644 index 0000000000..dea6c1bf53 --- /dev/null +++ b/src/css/properties/box_shadow.zig @@ -0,0 +1,131 @@ +const std = @import("std"); +const bun = @import("root").bun; +const Allocator = std.mem.Allocator; +const ArrayList = std.ArrayListUnmanaged; + +pub const css = @import("../css_parser.zig"); + +const SmallList = css.SmallList; +const Printer = css.Printer; +const PrintErr = css.PrintErr; + +const LengthPercentage = css.css_values.length.LengthPercentage; +const CustomIdent = css.css_values.ident.CustomIdent; +const CSSString = css.css_values.string.CSSString; +const CSSNumber = css.css_values.number.CSSNumber; +const LengthPercentageOrAuto = css.css_values.length.LengthPercentageOrAuto; +const Size2D = css.css_values.size.Size2D; +const DashedIdent = css.css_values.ident.DashedIdent; +const Image = css.css_values.image.Image; +const CssColor = css.css_values.color.CssColor; +const Ratio = css.css_values.ratio.Ratio; +const Length = css.css_values.length.LengthValue; +const Rect = css.css_values.rect.Rect; +const NumberOrPercentage = css.css_values.percentage.NumberOrPercentage; + +/// A value for the [box-shadow](https://drafts.csswg.org/css-backgrounds/#box-shadow) property. +pub const BoxShadow = struct { + /// The color of the box shadow. + color: CssColor, + /// The x offset of the shadow. + x_offset: Length, + /// The y offset of the shadow. + y_offset: Length, + /// The blur radius of the shadow. + blur: Length, + /// The spread distance of the shadow. + spread: Length, + /// Whether the shadow is inset within the box. + inset: bool, + + pub fn parse(input: *css.Parser) css.Result(@This()) { + var color: ?CssColor = null; + const Lengths = struct { x: Length, y: Length, blur: Length, spread: Length }; + var lengths: ?Lengths = null; + var inset = false; + + while (true) { + if (!inset) { + if (input.tryParse(css.Parser.expectIdentMatching, .{"inset"}).isOk()) { + inset = true; + continue; + } + } + + if (lengths == null) { + const value = input.tryParse(struct { + fn parse(p: *css.Parser) css.Result(Lengths) { + const horizontal = switch (Length.parse(p)) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + const vertical = switch (Length.parse(p)) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + const blur = p.tryParse(Length.parse, .{}).asValue() orelse Length.zero(); + const spread = p.tryParse(Length.parse, .{}).asValue() orelse Length.zero(); + return .{ .result = .{ .x = horizontal, .y = vertical, .blur = blur, .spread = spread } }; + } + }.parse, .{}); + + if (value.isOk()) { + lengths = value.result; + continue; + } + } + + if (color == null) { + if (input.tryParse(CssColor.parse, .{}).asValue()) |c| { + color = c; + continue; + } + } + + break; + } + + const final_lengths = lengths orelse return .{ .err = input.newError(.qualified_rule_invalid) }; + return .{ .result = BoxShadow{ + .color = color orelse CssColor{ .current_color = {} }, + .x_offset = final_lengths.x, + .y_offset = final_lengths.y, + .blur = final_lengths.blur, + .spread = final_lengths.spread, + .inset = inset, + } }; + } + + pub fn toCss(this: *const @This(), comptime W: type, dest: *css.Printer(W)) css.PrintErr!void { + if (this.inset) { + try dest.writeStr("inset "); + } + + try this.x_offset.toCss(W, dest); + try dest.writeChar(' '); + try this.y_offset.toCss(W, dest); + + if (!this.blur.eql(&Length.zero()) or !this.spread.eql(&Length.zero())) { + try dest.writeChar(' '); + try this.blur.toCss(W, dest); + + if (!this.spread.eql(&Length.zero())) { + try dest.writeChar(' '); + try this.spread.toCss(W, dest); + } + } + + if (!this.color.eql(&CssColor{ .current_color = {} })) { + try dest.writeChar(' '); + try this.color.toCss(W, dest); + } + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } +}; diff --git a/src/css/properties/contain.zig b/src/css/properties/contain.zig new file mode 100644 index 0000000000..a095c5228c --- /dev/null +++ b/src/css/properties/contain.zig @@ -0,0 +1,43 @@ +const std = @import("std"); +const bun = @import("root").bun; +const Allocator = std.mem.Allocator; +const ArrayList = std.ArrayListUnmanaged; + +pub const css = @import("../css_parser.zig"); + +const SmallList = css.SmallList; +const Printer = css.Printer; +const PrintErr = css.PrintErr; + +const ContainerName = css.css_rules.container.ContainerName; + +const LengthPercentage = css.css_values.length.LengthPercentage; +const CustomIdent = css.css_values.ident.CustomIdent; +const CSSString = css.css_values.string.CSSString; +const CSSNumber = css.css_values.number.CSSNumber; +const LengthPercentageOrAuto = css.css_values.length.LengthPercentageOrAuto; +const Size2D = css.css_values.size.Size2D; +const DashedIdent = css.css_values.ident.DashedIdent; +const Image = css.css_values.image.Image; +const CssColor = css.css_values.color.CssColor; +const Ratio = css.css_values.ratio.Ratio; +const Length = css.css_values.length.LengthValue; +const Rect = css.css_values.rect.Rect; +const NumberOrPercentage = css.css_values.percentage.NumberOrPercentage; + +const ContainerIdent = ContainerName; + +/// A value for the [container-type](https://drafts.csswg.org/css-contain-3/#container-type) property. +/// Establishes the element as a query container for the purpose of container queries. +pub const ContainerType = css.DefineEnumProperty(@compileError(css.todo_stuff.depth)); + +/// A value for the [container-name](https://drafts.csswg.org/css-contain-3/#container-name) property. +pub const ContainerNameList = union(enum) { + /// The `none` keyword. + none, + /// A list of container names. + names: SmallList(ContainerIdent, 1), +}; + +/// A value for the [container](https://drafts.csswg.org/css-contain-3/#container-shorthand) shorthand property. +pub const Container = css.DefineEnumProperty(@compileError(css.todo_stuff.depth)); diff --git a/src/css/properties/css_modules.zig b/src/css/properties/css_modules.zig new file mode 100644 index 0000000000..fa087a3866 --- /dev/null +++ b/src/css/properties/css_modules.zig @@ -0,0 +1,111 @@ +const std = @import("std"); +const bun = @import("root").bun; +const Allocator = std.mem.Allocator; +const ArrayList = std.ArrayListUnmanaged; + +pub const css = @import("../css_parser.zig"); + +const SmallList = css.SmallList; +const Printer = css.Printer; +const PrintErr = css.PrintErr; +const Error = css.Error; + +const ContainerName = css.css_rules.container.ContainerName; + +const LengthPercentage = css.css_values.length.LengthPercentage; +const CustomIdent = css.css_values.ident.CustomIdent; +const CSSString = css.css_values.string.CSSString; +const CSSNumber = css.css_values.number.CSSNumber; +const LengthPercentageOrAuto = css.css_values.length.LengthPercentageOrAuto; +const Size2D = css.css_values.size.Size2D; +const DashedIdent = css.css_values.ident.DashedIdent; +const Image = css.css_values.image.Image; +const CssColor = css.css_values.color.CssColor; +const Ratio = css.css_values.ratio.Ratio; +const Length = css.css_values.length.LengthValue; +const Rect = css.css_values.rect.Rect; +const NumberOrPercentage = css.css_values.percentage.NumberOrPercentage; +const CustomIdentList = css.css_values.ident.CustomIdentList; +const CustomIdentFns = css.css_values.ident.CustomIdentFns; + +const Location = css.dependencies.Location; + +/// A value for the [composes](https://github.com/css-modules/css-modules/#dependencies) property from CSS modules. +pub const Composes = struct { + /// A list of class names to compose. + names: CustomIdentList, + /// Where the class names are composed from. + from: ?Specifier, + /// The source location of the `composes` property. + loc: Location, + + pub fn parse(input: *css.Parser) css.Result(Composes) { + _ = input; // autofix + @panic(css.todo_stuff.depth); + } + + pub fn toCss(this: *const @This(), comptime W: type, dest: *Printer(W)) PrintErr!void { + var first = true; + for (this.names.slice()) |name| { + if (first) { + first = false; + } else { + try dest.writeChar(' '); + } + try CustomIdentFns.toCss(&name, W, dest); + } + + if (this.from) |*from| { + try dest.writeStr(" from "); + try from.toCss(W, dest); + } + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } +}; + +/// Defines where the class names referenced in the `composes` property are located. +/// +/// See [Composes](Composes). +pub const Specifier = union(enum) { + /// The referenced name is global. + global, + /// The referenced name comes from the specified file. + file: []const u8, + /// The referenced name comes from a source index (used during bundling). + source_index: u32, + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub fn parse(input: *css.Parser) css.Result(Specifier) { + if (input.tryParse(css.Parser.expectString, .{}).asValue()) |file| { + return .{ .result = .{ .file = file } }; + } + if (input.expectIdentMatching("global").asErr()) |e| return .{ .err = e }; + return .{ .result = .global }; + } + + pub fn toCss(this: *const @This(), comptime W: type, dest: *Printer(W)) PrintErr!void { + return switch (this.*) { + .global => dest.writeStr("global"), + .file => |file| css.serializer.serializeString(file, dest) catch return dest.addFmtError(), + .source_index => {}, + }; + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn hash(this: *const @This(), hasher: *std.hash.Wyhash) void { + return css.implementHash(@This(), this, hasher); + } +}; diff --git a/src/css/properties/custom.zig b/src/css/properties/custom.zig new file mode 100644 index 0000000000..eaa7ad2f89 --- /dev/null +++ b/src/css/properties/custom.zig @@ -0,0 +1,1545 @@ +const std = @import("std"); +const Allocator = std.mem.Allocator; +const bun = @import("root").bun; +const logger = bun.logger; +const Log = logger.Log; + +pub const css = @import("../css_parser.zig"); +pub const css_values = @import("../values/values.zig"); +pub const Printer = css.Printer; +pub const PrintErr = css.PrintErr; +const DashedIdent = css_values.ident.DashedIdent; +const DashedIdentFns = css_values.ident.DashedIdentFns; +const Ident = css_values.ident.Ident; +const IdentFns = css_values.ident.IdentFns; +pub const Result = css.Result; + +pub const CssColor = css.css_values.color.CssColor; +pub const RGBA = css.css_values.color.RGBA; +pub const SRGB = css.css_values.color.SRGB; +pub const HSL = css.css_values.color.HSL; +pub const CSSInteger = css.css_values.number.CSSInteger; +pub const CSSIntegerFns = css.css_values.number.CSSIntegerFns; +pub const CSSNumberFns = css.css_values.number.CSSNumberFns; +pub const Percentage = css.css_values.percentage.Percentage; +pub const Url = css.css_values.url.Url; +pub const DashedIdentReference = css.css_values.ident.DashedIdentReference; +pub const CustomIdent = css.css_values.ident.CustomIdent; +pub const CustomIdentFns = css.css_values.ident.CustomIdentFns; +pub const LengthValue = css.css_values.length.LengthValue; +pub const Angle = css.css_values.angle.Angle; +pub const Time = css.css_values.time.Time; +pub const Resolution = css.css_values.resolution.Resolution; +pub const AnimationName = css.css_properties.animation.AnimationName; +const ComponentParser = css.css_values.color.ComponentParser; + +const SupportsCondition = css.SupportsCondition; +const ColorFallbackKind = css.ColorFallbackKind; + +const ArrayList = std.ArrayListUnmanaged; + +/// PERF: nullable optimization +pub const TokenList = struct { + v: std.ArrayListUnmanaged(TokenOrValue) = .{}, + + const This = @This(); + + pub fn deinit(this: *TokenList, allocator: Allocator) void { + for (this.v.items) |*token_or_value| { + token_or_value.deinit(allocator); + } + this.v.deinit(allocator); + } + + pub fn toCss( + this: *const This, + comptime W: type, + dest: *Printer(W), + is_custom_property: bool, + ) PrintErr!void { + if (!dest.minify and this.v.items.len == 1 and this.v.items[0].isWhitespace()) { + return; + } + + var has_whitespace = false; + for (this.v.items, 0..) |*token_or_value, i| { + switch (token_or_value.*) { + .color => |color| { + try color.toCss(W, dest); + has_whitespace = false; + }, + .unresolved_color => |color| { + try color.toCss(W, dest, is_custom_property); + has_whitespace = false; + }, + .url => |url| { + if (dest.dependencies != null and is_custom_property and !url.isAbsolute(try dest.getImportRecords())) { + return dest.newError(css.PrinterErrorKind{ + .ambiguous_url_in_custom_property = .{ .url = (try dest.getImportRecords()).at(url.import_record_idx).path.pretty }, + }, url.loc); + } + try url.toCss(W, dest); + has_whitespace = false; + }, + .@"var" => |@"var"| { + try @"var".toCss(W, dest, is_custom_property); + has_whitespace = try this.writeWhitespaceIfNeeded(i, W, dest); + }, + .env => |env| { + try env.toCss(W, dest, is_custom_property); + has_whitespace = try this.writeWhitespaceIfNeeded(i, W, dest); + }, + .function => |f| { + try f.toCss(W, dest, is_custom_property); + has_whitespace = try this.writeWhitespaceIfNeeded(i, W, dest); + }, + .length => |v| { + // Do not serialize unitless zero lengths in custom properties as it may break calc(). + const value, const unit = v.toUnitValue(); + try css.serializer.serializeDimension(value, unit, W, dest); + has_whitespace = false; + }, + .angle => |v| { + try v.toCss(W, dest); + has_whitespace = false; + }, + .time => |v| { + try v.toCss(W, dest); + has_whitespace = false; + }, + .resolution => |v| { + try v.toCss(W, dest); + has_whitespace = false; + }, + .dashed_ident => |v| { + try DashedIdentFns.toCss(&v, W, dest); + has_whitespace = false; + }, + .animation_name => |v| { + try v.toCss(W, dest); + has_whitespace = false; + }, + .token => |token| switch (token) { + .delim => |d| { + if (d == '+' or d == '-') { + try dest.writeChar(' '); + bun.assert(d <= 0x7F); + try dest.writeChar(@intCast(d)); + try dest.writeChar(' '); + } else { + const ws_before = !has_whitespace and (d == '/' or d == '*'); + bun.assert(d <= 0x7F); + try dest.delim(@intCast(d), ws_before); + } + has_whitespace = true; + }, + .comma => { + try dest.delim(',', false); + has_whitespace = true; + }, + .close_paren, .close_square, .close_curly => { + try token.toCss(W, dest); + has_whitespace = try this.writeWhitespaceIfNeeded(i, W, dest); + }, + .dimension => { + try css.serializer.serializeDimension(token.dimension.num.value, token.dimension.unit, W, dest); + has_whitespace = false; + }, + .number => |v| { + try css.css_values.number.CSSNumberFns.toCss(&v.value, W, dest); + has_whitespace = false; + }, + else => { + try token.toCss(W, dest); + has_whitespace = token == .whitespace; + }, + }, + } + } + } + + pub fn toCssRaw(this: *const TokenList, comptime W: type, dest: *Printer(W)) PrintErr!void { + for (this.v.items) |*token_or_value| { + if (token_or_value.* == .token) { + try token_or_value.token.toCss(W, dest); + } else { + return dest.addFmtError(); + } + } + } + + pub fn writeWhitespaceIfNeeded( + this: *const This, + i: usize, + comptime W: type, + dest: *Printer(W), + ) PrintErr!bool { + if (!dest.minify and + i != this.v.items.len - 1 and + !(this.v.items[i + 1] == .token and switch (this.v.items[i + 1].token) { + .comma, .close_paren => true, + else => false, + })) { + // Whitespace is removed during parsing, so add it back if we aren't minifying. + try dest.writeChar(' '); + return true; + } else return false; + } + + pub fn parse(input: *css.Parser, options: *const css.ParserOptions, depth: usize) Result(TokenList) { + var tokens = ArrayList(TokenOrValue){}; // PERF: deinit on error + if (TokenListFns.parseInto(input, &tokens, options, depth).asErr()) |e| return .{ .err = e }; + + // Slice off leading and trailing whitespace if there are at least two tokens. + // If there is only one token, we must preserve it. e.g. `--foo: ;` is valid. + // PERF(alloc): this feels like a common codepath, idk how I feel about reallocating a new array just to slice off whitespace. + if (tokens.items.len >= 2) { + var slice = tokens.items[0..]; + if (tokens.items.len > 0 and tokens.items[0].isWhitespace()) { + slice = slice[1..]; + } + if (tokens.items.len > 0 and tokens.items[tokens.items.len - 1].isWhitespace()) { + slice = slice[0 .. slice.len - 1]; + } + var newlist = ArrayList(TokenOrValue){}; + newlist.insertSlice(input.allocator(), 0, slice) catch unreachable; + tokens.deinit(input.allocator()); + return .{ .result = TokenList{ .v = newlist } }; + } + + return .{ .result = .{ .v = tokens } }; + } + + pub fn parseWithOptions(input: *css.Parser, options: *const css.ParserOptions) Result(TokenList) { + return parse(input, options, 0); + } + + pub fn parseRaw( + input: *css.Parser, + tokens: *ArrayList(TokenOrValue), + options: *const css.ParserOptions, + depth: usize, + ) Result(void) { + if (depth > 500) { + return .{ .err = input.newCustomError(css.ParserError.maximum_nesting_depth) }; + } + + while (true) { + const state = input.state(); + const token = switch (input.nextIncludingWhitespace()) { + .result => |vv| vv, + .err => break, + }; + switch (token.*) { + .open_paren, .open_square, .open_curly => { + tokens.append( + input.allocator(), + .{ .token = token.* }, + ) catch unreachable; + const closing_delimiter: css.Token = switch (token.*) { + .open_paren => .close_paren, + .open_square => .close_square, + .open_curly => .close_curly, + else => unreachable, + }; + const Closure = struct { + options: *const css.ParserOptions, + depth: usize, + tokens: *ArrayList(TokenOrValue), + pub fn parsefn(this: *@This(), input2: *css.Parser) Result(void) { + return TokenListFns.parseRaw( + input2, + this.tokens, + this.options, + this.depth + 1, + ); + } + }; + var closure = Closure{ + .options = options, + .depth = depth, + .tokens = tokens, + }; + if (input.parseNestedBlock(void, &closure, Closure.parsefn).asErr()) |e| return .{ .err = e }; + tokens.append( + input.allocator(), + .{ .token = closing_delimiter }, + ) catch unreachable; + }, + .function => { + tokens.append( + input.allocator(), + .{ .token = token.* }, + ) catch unreachable; + const Closure = struct { + options: *const css.ParserOptions, + depth: usize, + tokens: *ArrayList(TokenOrValue), + pub fn parsefn(this: *@This(), input2: *css.Parser) Result(void) { + return TokenListFns.parseRaw( + input2, + this.tokens, + this.options, + this.depth + 1, + ); + } + }; + var closure = Closure{ + .options = options, + .depth = depth, + .tokens = tokens, + }; + if (input.parseNestedBlock(void, &closure, Closure.parsefn).asErr()) |e| return .{ .err = e }; + tokens.append( + input.allocator(), + .{ .token = .close_paren }, + ) catch unreachable; + }, + else => { + if (token.isParseError()) { + return .{ + .err = css.ParseError(css.ParserError){ + .kind = .{ .basic = .{ .unexpected_token = token.* } }, + .location = state.sourceLocation(), + }, + }; + } + tokens.append( + input.allocator(), + .{ .token = token.* }, + ) catch unreachable; + }, + } + } + + return .{ .result = {} }; + } + + pub fn parseInto( + input: *css.Parser, + tokens: *ArrayList(TokenOrValue), + options: *const css.ParserOptions, + depth: usize, + ) Result(void) { + if (depth > 500) { + return .{ .err = input.newCustomError(css.ParserError.maximum_nesting_depth) }; + } + + var last_is_delim = false; + var last_is_whitespace = false; + + while (true) { + const state = input.state(); + const tok = switch (input.nextIncludingWhitespace()) { + .result => |vv| vv, + .err => break, + }; + switch (tok.*) { + .whitespace, .comment => { + // Skip whitespace if the last token was a delimiter. + // Otherwise, replace all whitespace and comments with a single space character. + if (!last_is_delim) { + tokens.append( + input.allocator(), + .{ .token = .{ .whitespace = " " } }, + ) catch unreachable; + last_is_whitespace = true; + } + continue; + }, + .function => |f| { + // Attempt to parse embedded color values into hex tokens. + if (tryParseColorToken(f, &state, input)) |color| { + tokens.append( + input.allocator(), + .{ .color = color }, + ) catch unreachable; + last_is_delim = false; + last_is_whitespace = true; + } else if (input.tryParse(UnresolvedColor.parse, .{ f, options }).asValue()) |color| { + tokens.append( + input.allocator(), + .{ .unresolved_color = color }, + ) catch unreachable; + last_is_delim = false; + last_is_whitespace = true; + } else if (bun.strings.eql(f, "url")) { + input.reset(&state); + tokens.append( + input.allocator(), + .{ .url = switch (Url.parse(input)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + } }, + ) catch unreachable; + last_is_delim = false; + last_is_whitespace = false; + } else if (bun.strings.eql(f, "var")) { + const Closure = struct { + options: *const css.ParserOptions, + depth: usize, + tokens: *ArrayList(TokenOrValue), + pub fn parsefn(this: *@This(), input2: *css.Parser) Result(TokenOrValue) { + const thevar = switch (Variable.parse(input2, this.options, this.depth + 1)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + return .{ .result = TokenOrValue{ .@"var" = thevar } }; + } + }; + var closure = Closure{ + .options = options, + .depth = depth, + .tokens = tokens, + }; + const @"var" = switch (input.parseNestedBlock(TokenOrValue, &closure, Closure.parsefn)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + tokens.append( + input.allocator(), + @"var", + ) catch unreachable; + last_is_delim = true; + last_is_whitespace = false; + } else if (bun.strings.eql(f, "env")) { + const Closure = struct { + options: *const css.ParserOptions, + depth: usize, + pub fn parsefn(this: *@This(), input2: *css.Parser) Result(TokenOrValue) { + const env = switch (EnvironmentVariable.parseNested(input2, this.options, this.depth + 1)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + return .{ .result = TokenOrValue{ .env = env } }; + } + }; + var closure = Closure{ + .options = options, + .depth = depth, + }; + const env = switch (input.parseNestedBlock(TokenOrValue, &closure, Closure.parsefn)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + tokens.append( + input.allocator(), + env, + ) catch unreachable; + last_is_delim = true; + last_is_whitespace = false; + } else { + const Closure = struct { + options: *const css.ParserOptions, + depth: usize, + pub fn parsefn(this: *@This(), input2: *css.Parser) Result(TokenList) { + const args = switch (TokenListFns.parse(input2, this.options, this.depth + 1)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + return .{ .result = args }; + } + }; + var closure = Closure{ + .options = options, + .depth = depth, + }; + const arguments = switch (input.parseNestedBlock(TokenList, &closure, Closure.parsefn)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + tokens.append( + input.allocator(), + .{ + .function = .{ + .name = .{ .v = f }, + .arguments = arguments, + }, + }, + ) catch unreachable; + last_is_delim = true; // Whitespace is not required after any of these chars. + last_is_whitespace = false; + } + continue; + }, + .hash, .idhash => { + const h = switch (tok.*) { + .hash => |h| h, + .idhash => |h| h, + else => unreachable, + }; + brk: { + const r, const g, const b, const a = css.color.parseHashColor(h) orelse { + tokens.append( + input.allocator(), + .{ .token = .{ .hash = h } }, + ) catch unreachable; + break :brk; + }; + tokens.append( + input.allocator(), + .{ + .color = CssColor{ .rgba = RGBA.new(r, g, b, a) }, + }, + ) catch unreachable; + } + last_is_delim = false; + last_is_whitespace = false; + continue; + }, + .unquoted_url => { + input.reset(&state); + tokens.append( + input.allocator(), + .{ .url = switch (Url.parse(input)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + } }, + ) catch unreachable; + last_is_delim = false; + last_is_whitespace = false; + continue; + }, + .ident => |name| { + if (bun.strings.startsWith(name, "--")) { + tokens.append(input.allocator(), .{ .dashed_ident = .{ .v = name } }) catch unreachable; + last_is_delim = false; + last_is_whitespace = false; + continue; + } + }, + .open_paren, .open_square, .open_curly => { + tokens.append( + input.allocator(), + .{ .token = tok.* }, + ) catch unreachable; + const closing_delimiter: css.Token = switch (tok.*) { + .open_paren => .close_paren, + .open_square => .close_square, + .open_curly => .close_curly, + else => unreachable, + }; + const Closure = struct { + options: *const css.ParserOptions, + depth: usize, + tokens: *ArrayList(TokenOrValue), + pub fn parsefn(this: *@This(), input2: *css.Parser) Result(void) { + return TokenListFns.parseInto( + input2, + this.tokens, + this.options, + this.depth + 1, + ); + } + }; + var closure = Closure{ + .options = options, + .depth = depth, + .tokens = tokens, + }; + if (input.parseNestedBlock(void, &closure, Closure.parsefn).asErr()) |e| return .{ .err = e }; + tokens.append( + input.allocator(), + .{ .token = closing_delimiter }, + ) catch unreachable; + last_is_delim = true; // Whitespace is not required after any of these chars. + last_is_whitespace = false; + continue; + }, + .dimension => { + const value = if (LengthValue.tryFromToken(tok).asValue()) |length| + TokenOrValue{ .length = length } + else if (Angle.tryFromToken(tok).asValue()) |angle| + TokenOrValue{ .angle = angle } + else if (Time.tryFromToken(tok).asValue()) |time| + TokenOrValue{ .time = time } + else if (Resolution.tryFromToken(tok).asValue()) |resolution| + TokenOrValue{ .resolution = resolution } + else + TokenOrValue{ .token = tok.* }; + + tokens.append( + input.allocator(), + value, + ) catch unreachable; + + last_is_delim = false; + last_is_whitespace = false; + continue; + }, + else => {}, + } + + if (tok.isParseError()) { + return .{ + .err = .{ + .kind = .{ .basic = .{ .unexpected_token = tok.* } }, + .location = state.sourceLocation(), + }, + }; + } + last_is_delim = switch (tok.*) { + .delim, .comma => true, + else => false, + }; + + // If this is a delimiter, and the last token was whitespace, + // replace the whitespace with the delimiter since both are not required. + if (last_is_delim and last_is_whitespace) { + const last = &tokens.items[tokens.items.len - 1]; + last.* = .{ .token = tok.* }; + } else { + tokens.append( + input.allocator(), + .{ .token = tok.* }, + ) catch unreachable; + } + + last_is_whitespace = false; + } + + return .{ .result = {} }; + } + + pub fn getFallback(this: *const TokenList, allocator: Allocator, kind: ColorFallbackKind) @This() { + var tokens = TokenList{}; + tokens.v.ensureTotalCapacity(allocator, this.v.items.len) catch bun.outOfMemory(); + for (this.v.items, tokens.v.items[0..this.v.items.len]) |*old, *new| { + new.* = switch (old.*) { + .color => |*color| TokenOrValue{ .color = color.getFallback(allocator, kind) }, + .function => |*f| TokenOrValue{ .function = f.getFallback(allocator, kind) }, + .@"var" => |*v| TokenOrValue{ .@"var" = v.getFallback(allocator, kind) }, + .env => |*e| TokenOrValue{ .env = e.getFallback(allocator, kind) }, + else => old.deepClone(allocator), + }; + } + tokens.v.items.len = this.v.items.len; + return tokens; + } + + pub const Fallbacks = struct { SupportsCondition, TokenList }; + pub fn getFallbacks(this: *const TokenList, allocator: Allocator, targets: css.targets.Targets) css.SmallList(Fallbacks, 2) { + // Get the full list of possible fallbacks, and remove the lowest one, which will replace + // the original declaration. The remaining fallbacks need to be added as @supports rules. + var fallbacks = this.getNecessaryFallbacks(targets); + const lowest_fallback = fallbacks.lowest(); + fallbacks.remove(lowest_fallback); + + var res = css.SmallList(Fallbacks, 2){}; + if (fallbacks.contains(ColorFallbackKind.P3)) { + res.appendAssumeCapacity(.{ + ColorFallbackKind.P3.supportsCondition(), + this.getFallback(allocator, ColorFallbackKind.P3), + }); + } + + if (fallbacks.contains(ColorFallbackKind.LAB)) { + res.appendAssumeCapacity(.{ + ColorFallbackKind.LAB.supportsCondition(), + this.getFallback(allocator, ColorFallbackKind.LAB), + }); + } + + if (!lowest_fallback.isEmpty()) { + for (this.v.items) |*token_or_value| { + switch (token_or_value.*) { + .color => |*color| { + color.* = color.getFallback(allocator, lowest_fallback); + }, + .function => |*f| { + f.* = f.getFallback(allocator, lowest_fallback); + }, + .@"var" => |*v| { + if (v.fallback) |*fallback| { + fallback.* = fallback.getFallback(allocator, lowest_fallback); + } + }, + .env => |*v| { + if (v.fallback) |*fallback| { + fallback.* = fallback.getFallback(allocator, lowest_fallback); + } + }, + else => {}, + } + } + } + + return res; + } + + pub fn getNecessaryFallbacks(this: *const TokenList, targets: css.targets.Targets) ColorFallbackKind { + var fallbacks = ColorFallbackKind.empty(); + for (this.v.items) |*token_or_value| { + switch (token_or_value.*) { + .color => |*color| { + fallbacks.insert(color.getPossibleFallbacks(targets)); + }, + .function => |*f| { + fallbacks.insert(f.arguments.getNecessaryFallbacks(targets)); + }, + .@"var" => |*v| { + if (v.fallback) |*fallback| { + fallbacks.insert(fallback.getNecessaryFallbacks(targets)); + } + }, + .env => |*v| { + if (v.fallback) |*fallback| { + fallbacks.insert(fallback.getNecessaryFallbacks(targets)); + } + }, + else => {}, + } + } + + return fallbacks; + } + + pub fn eql(lhs: *const TokenList, rhs: *const TokenList) bool { + return css.generic.eqlList(TokenOrValue, &lhs.v, &rhs.v); + } + + pub fn hash(this: *const @This(), hasher: *std.hash.Wyhash) void { + return css.implementHash(@This(), this, hasher); + } + + pub fn deepClone(this: *const TokenList, allocator: Allocator) TokenList { + return .{ + .v = css.deepClone(TokenOrValue, allocator, &this.v), + }; + } +}; +pub const TokenListFns = TokenList; + +/// A color value with an unresolved alpha value (e.g. a variable). +/// These can be converted from the modern slash syntax to older comma syntax. +/// This can only be done when the only unresolved component is the alpha +/// since variables can resolve to multiple tokens. +pub const UnresolvedColor = union(enum) { + /// An rgb() color. + RGB: struct { + /// The red component. + r: f32, + /// The green component. + g: f32, + /// The blue component. + b: f32, + /// The unresolved alpha component. + alpha: TokenList, + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + pub fn __generateHash() void {} + }, + /// An hsl() color. + HSL: struct { + /// The hue component. + h: f32, + /// The saturation component. + s: f32, + /// The lightness component. + l: f32, + /// The unresolved alpha component. + alpha: TokenList, + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + pub fn __generateHash() void {} + }, + /// The light-dark() function. + light_dark: struct { + /// The light value. + light: TokenList, + /// The dark value. + dark: TokenList, + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub fn __generateHash() void {} + }, + const This = @This(); + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub fn hash(this: *const @This(), hasher: *std.hash.Wyhash) void { + return css.implementHash(@This(), this, hasher); + } + + pub fn deepClone(this: *const This, allocator: Allocator) This { + return switch (this.*) { + .RGB => |*rgb| .{ .RGB = .{ .r = rgb.r, .g = rgb.g, .b = rgb.b, .alpha = rgb.alpha.deepClone(allocator) } }, + .HSL => |*hsl| .{ .HSL = .{ .h = hsl.h, .s = hsl.s, .l = hsl.l, .alpha = hsl.alpha.deepClone(allocator) } }, + .light_dark => |*light_dark| .{ + .light_dark = .{ + .light = light_dark.light.deepClone(allocator), + .dark = light_dark.dark.deepClone(allocator), + }, + }, + }; + } + + pub fn deinit(this: *This, allocator: Allocator) void { + return switch (this.*) { + .RGB => |*rgb| rgb.alpha.deinit(allocator), + .HSL => |*hsl| hsl.alpha.deinit(allocator), + .light_dark => |*light_dark| { + light_dark.light.deinit(allocator); + light_dark.dark.deinit(allocator); + }, + }; + } + + pub fn toCss( + this: *const This, + comptime W: type, + dest: *Printer(W), + is_custom_property: bool, + ) PrintErr!void { + const Helper = struct { + pub fn conv(c: f32) i32 { + return @intFromFloat(bun.clamp(@round(c * 255.0), 0.0, 255.0)); + } + }; + + switch (this.*) { + .RGB => |rgb| { + if (dest.targets.shouldCompileSame(.space_separated_color_notation)) { + try dest.writeStr("rgba("); + try css.to_css.integer(i32, Helper.conv(rgb.r), W, dest); + try dest.delim(',', false); + try css.to_css.integer(i32, Helper.conv(rgb.g), W, dest); + try dest.delim(',', false); + try css.to_css.integer(i32, Helper.conv(rgb.b), W, dest); + try rgb.alpha.toCss(W, dest, is_custom_property); + try dest.writeChar(')'); + return; + } + + try dest.writeStr("rgb("); + try css.to_css.integer(i32, Helper.conv(rgb.r), W, dest); + try dest.writeChar(' '); + try css.to_css.integer(i32, Helper.conv(rgb.g), W, dest); + try dest.writeChar(' '); + try css.to_css.integer(i32, Helper.conv(rgb.b), W, dest); + try dest.delim('/', true); + try rgb.alpha.toCss(W, dest, is_custom_property); + try dest.writeChar(')'); + }, + .HSL => |hsl| { + if (dest.targets.shouldCompileSame(.space_separated_color_notation)) { + try dest.writeStr("hsla("); + try CSSNumberFns.toCss(&hsl.h, W, dest); + try dest.delim(',', false); + try (Percentage{ .v = hsl.s }).toCss(W, dest); + try dest.delim(',', false); + try (Percentage{ .v = hsl.l }).toCss(W, dest); + try dest.delim(',', false); + try hsl.alpha.toCss(W, dest, is_custom_property); + try dest.writeChar(')'); + return; + } + + try dest.writeStr("hsl("); + try CSSNumberFns.toCss(&hsl.h, W, dest); + try dest.writeChar(' '); + try (Percentage{ .v = hsl.s }).toCss(W, dest); + try dest.writeChar(' '); + try (Percentage{ .v = hsl.l }).toCss(W, dest); + try dest.delim('/', true); + try hsl.alpha.toCss(W, dest, is_custom_property); + try dest.writeChar(')'); + return; + }, + .light_dark => |*ld| { + const light: *const TokenList = &ld.light; + const dark: *const TokenList = &ld.dark; + + if (!dest.targets.isCompatible(.light_dark)) { + // TODO(zack): lightningcss -> buncss + try dest.writeStr("var(--lightningcss-light)"); + try dest.delim(',', false); + try light.toCss(W, dest, is_custom_property); + try dest.writeChar(')'); + try dest.whitespace(); + try dest.writeStr("var(--lightningcss-dark"); + try dest.delim(',', false); + try dark.toCss(W, dest, is_custom_property); + return dest.writeChar(')'); + } + + try dest.writeStr("light-dark("); + try light.toCss(W, dest, is_custom_property); + try dest.delim(',', false); + try dark.toCss(W, dest, is_custom_property); + try dest.writeChar(')'); + }, + } + } + + pub fn parse( + input: *css.Parser, + f: []const u8, + options: *const css.ParserOptions, + ) Result(UnresolvedColor) { + var parser = ComponentParser.new(false); + // css.todo_stuff.match_ignore_ascii_case + if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(f, "rgb")) { + const Closure = struct { + options: *const css.ParserOptions, + parser: *ComponentParser, + pub fn parsefn(this: *@This(), input2: *css.Parser) Result(UnresolvedColor) { + return this.parser.parseRelative(input2, SRGB, UnresolvedColor, @This().innerParseFn, .{this.options}); + } + pub fn innerParseFn(i: *css.Parser, p: *ComponentParser, opts: *const css.ParserOptions) Result(UnresolvedColor) { + const r, const g, const b, const is_legacy = switch (css.css_values.color.parseRGBComponents(i, p)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + if (is_legacy) { + return .{ .err = i.newCustomError(css.ParserError.invalid_value) }; + } + if (i.expectDelim('/').asErr()) |e| return .{ .err = e }; + const alpha = switch (TokenListFns.parse(i, opts, 0)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + return .{ .result = UnresolvedColor{ + .RGB = .{ + .r = r, + .g = g, + .b = b, + .alpha = alpha, + }, + } }; + } + }; + var closure = Closure{ + .options = options, + .parser = &parser, + }; + return input.parseNestedBlock(UnresolvedColor, &closure, Closure.parsefn); + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(f, "hsl")) { + const Closure = struct { + options: *const css.ParserOptions, + parser: *ComponentParser, + pub fn parsefn(this: *@This(), input2: *css.Parser) Result(UnresolvedColor) { + return this.parser.parseRelative(input2, HSL, UnresolvedColor, @This().innerParseFn, .{this.options}); + } + pub fn innerParseFn(i: *css.Parser, p: *ComponentParser, opts: *const css.ParserOptions) Result(UnresolvedColor) { + const h, const s, const l, const is_legacy = switch (css.css_values.color.parseHSLHWBComponents(HSL, i, p, false)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + if (is_legacy) { + return .{ .err = i.newCustomError(css.ParserError.invalid_value) }; + } + if (i.expectDelim('/').asErr()) |e| return .{ .err = e }; + const alpha = switch (TokenListFns.parse(i, opts, 0)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + return .{ .result = UnresolvedColor{ + .HSL = .{ + .h = h, + .s = s, + .l = l, + .alpha = alpha, + }, + } }; + } + }; + var closure = Closure{ + .options = options, + .parser = &parser, + }; + return input.parseNestedBlock(UnresolvedColor, &closure, Closure.parsefn); + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(f, "light-dark")) { + const Closure = struct { + options: *const css.ParserOptions, + parser: *ComponentParser, + pub fn parsefn(this: *@This(), input2: *css.Parser) Result(UnresolvedColor) { + const light = switch (input2.parseUntilBefore(css.Delimiters{ .comma = true }, TokenList, this, @This().parsefn2)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + // TODO: fix this + errdefer light.deinit(); + if (input2.expectComma().asErr()) |e| return .{ .err = e }; + const dark = switch (TokenListFns.parse(input2, this.options, 0)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + // TODO: fix this + errdefer dark.deinit(); + return .{ .result = UnresolvedColor{ + .light_dark = .{ + .light = light, + .dark = dark, + }, + } }; + } + + pub fn parsefn2(this: *@This(), input2: *css.Parser) Result(TokenList) { + return TokenListFns.parse(input2, this.options, 1); + } + }; + var closure = Closure{ + .options = options, + .parser = &parser, + }; + return input.parseNestedBlock(UnresolvedColor, &closure, Closure.parsefn); + } else { + return .{ .err = input.newCustomError(css.ParserError.invalid_value) }; + } + } + + pub fn lightDarkOwned(allocator: Allocator, light: UnresolvedColor, dark: UnresolvedColor) UnresolvedColor { + var lightlist = ArrayList(TokenOrValue).initCapacity(allocator, 1) catch bun.outOfMemory(); + lightlist.append(allocator, TokenOrValue{ .unresolved_color = light }) catch bun.outOfMemory(); + var darklist = ArrayList(TokenOrValue).initCapacity(allocator, 1) catch bun.outOfMemory(); + darklist.append(allocator, TokenOrValue{ .unresolved_color = dark }) catch bun.outOfMemory(); + return UnresolvedColor{ + .light_dark = .{ + .light = css.TokenList{ .v = lightlist }, + .dark = css.TokenList{ .v = darklist }, + }, + }; + } +}; + +/// A CSS variable reference. +pub const Variable = struct { + /// The variable name. + name: DashedIdentReference, + /// A fallback value in case the variable is not defined. + fallback: ?TokenList, + + const This = @This(); + + pub fn deinit(this: *Variable, allocator: Allocator) void { + if (this.fallback) |*fallback| { + fallback.deinit(allocator); + } + } + + pub fn parse( + input: *css.Parser, + options: *const css.ParserOptions, + depth: usize, + ) Result(This) { + const name = switch (DashedIdentReference.parseWithOptions(input, options)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + + const fallback = if (input.tryParse(css.Parser.expectComma, .{}).isOk()) + switch (TokenList.parse(input, options, depth)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + } + else + null; + + return .{ .result = Variable{ .name = name, .fallback = fallback } }; + } + + pub fn toCss( + this: *const This, + comptime W: type, + dest: *Printer(W), + is_custom_property: bool, + ) PrintErr!void { + try dest.writeStr("var("); + try this.name.toCss(W, dest); + if (this.fallback) |*fallback| { + try dest.delim(',', false); + try fallback.toCss(W, dest, is_custom_property); + } + return try dest.writeChar(')'); + } + + pub fn getFallback(this: *const Variable, allocator: Allocator, kind: ColorFallbackKind) @This() { + return Variable{ + .name = this.name, + .fallback = if (this.fallback) |*fallback| fallback.getFallback(allocator, kind) else null, + }; + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub fn hash(this: *const @This(), hasher: *std.hash.Wyhash) void { + return css.implementHash(@This(), this, hasher); + } + + pub fn deepClone(this: *const Variable, allocator: Allocator) Variable { + return .{ + .name = this.name, + .fallback = if (this.fallback) |*fallback| fallback.deepClone(allocator) else null, + }; + } +}; + +/// A CSS environment variable reference. +pub const EnvironmentVariable = struct { + /// The environment variable name. + name: EnvironmentVariableName, + /// Optional indices into the dimensions of the environment variable. + /// TODO(zack): this could totally be a smallvec, why isn't it? + indices: ArrayList(CSSInteger) = ArrayList(CSSInteger){}, + /// A fallback value in case the variable is not defined. + fallback: ?TokenList, + + pub fn deinit(this: *EnvironmentVariable, allocator: Allocator) void { + this.indices.deinit(allocator); + if (this.fallback) |*fallback| { + fallback.deinit(allocator); + } + } + + pub fn parse(input: *css.Parser, options: *const css.ParserOptions, depth: usize) Result(EnvironmentVariable) { + if (input.expectFunctionMatching("env").asErr()) |e| return .{ .err = e }; + const Closure = struct { + options: *const css.ParserOptions, + depth: usize, + pub fn parsefn(this: *@This(), i: *css.Parser) Result(EnvironmentVariable) { + return EnvironmentVariable.parseNested(i, this.options, this.depth); + } + }; + var closure = Closure{ + .options = options, + .depth = depth, + }; + return input.parseNestedBlock(EnvironmentVariable, &closure, Closure.parsefn); + } + + pub fn parseNested(input: *css.Parser, options: *const css.ParserOptions, depth: usize) Result(EnvironmentVariable) { + const name = switch (EnvironmentVariableName.parse(input)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + var indices = ArrayList(i32){}; + while (switch (input.tryParse(CSSIntegerFns.parse, .{})) { + .result => |v| v, + .err => null, + }) |idx| { + indices.append( + input.allocator(), + idx, + ) catch unreachable; + } + + const fallback = if (input.tryParse(css.Parser.expectComma, .{}).isOk()) + switch (TokenListFns.parse(input, options, depth + 1)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + } + else + null; + + return .{ .result = EnvironmentVariable{ + .name = name, + .indices = indices, + .fallback = fallback, + } }; + } + + pub fn toCss( + this: *const EnvironmentVariable, + comptime W: type, + dest: *Printer(W), + is_custom_property: bool, + ) PrintErr!void { + try dest.writeStr("env("); + try this.name.toCss(W, dest); + + for (this.indices.items) |index| { + try dest.writeChar(' '); + try css.to_css.integer(i32, index, W, dest); + } + + if (this.fallback) |*fallback| { + try dest.delim(',', false); + try fallback.toCss(W, dest, is_custom_property); + } + + return try dest.writeChar(')'); + } + + pub fn getFallback(this: *const EnvironmentVariable, allocator: Allocator, kind: ColorFallbackKind) @This() { + return EnvironmentVariable{ + .name = this.name, + .indices = this.indices.clone(allocator) catch bun.outOfMemory(), + .fallback = if (this.fallback) |*fallback| fallback.getFallback(allocator, kind) else null, + }; + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub fn hash(this: *const @This(), hasher: *std.hash.Wyhash) void { + return css.implementHash(@This(), this, hasher); + } + + pub fn deepClone(this: *const EnvironmentVariable, allocator: Allocator) EnvironmentVariable { + return .{ + .name = this.name, + .indices = this.indices.clone(allocator) catch bun.outOfMemory(), + .fallback = if (this.fallback) |*fallback| fallback.deepClone(allocator) else null, + }; + } +}; + +/// A CSS environment variable name. +pub const EnvironmentVariableName = union(enum) { + /// A UA-defined environment variable. + ua: UAEnvironmentVariable, + /// A custom author-defined environment variable. + custom: DashedIdentReference, + /// An unknown environment variable. + unknown: CustomIdent, + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + pub fn hash(this: *const @This(), hasher: *std.hash.Wyhash) void { + return css.implementHash(@This(), this, hasher); + } + + pub fn parse(input: *css.Parser) Result(EnvironmentVariableName) { + if (input.tryParse(UAEnvironmentVariable.parse, .{}).asValue()) |ua| { + return .{ .result = .{ .ua = ua } }; + } + + if (input.tryParse(DashedIdentReference.parseWithOptions, .{ + &css.ParserOptions.default( + input.allocator(), + null, + ), + }).asValue()) |dashed| { + return .{ .result = .{ .custom = dashed } }; + } + + const ident = switch (CustomIdentFns.parse(input)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + return .{ .result = .{ .unknown = ident } }; + } + + pub fn toCss(this: *const @This(), comptime W: type, dest: *Printer(W)) PrintErr!void { + return switch (this.*) { + .ua => |ua| ua.toCss(W, dest), + .custom => |custom| custom.toCss(W, dest), + .unknown => |unknown| CustomIdentFns.toCss(&unknown, W, dest), + }; + } +}; + +/// A UA-defined environment variable name. +pub const UAEnvironmentVariable = enum { + /// The safe area inset from the top of the viewport. + @"safe-area-inset-top", + /// The safe area inset from the right of the viewport. + @"safe-area-inset-right", + /// The safe area inset from the bottom of the viewport. + @"safe-area-inset-bottom", + /// The safe area inset from the left of the viewport. + @"safe-area-inset-left", + /// The viewport segment width. + @"viewport-segment-width", + /// The viewport segment height. + @"viewport-segment-height", + /// The viewport segment top position. + @"viewport-segment-top", + /// The viewport segment left position. + @"viewport-segment-left", + /// The viewport segment bottom position. + @"viewport-segment-bottom", + /// The viewport segment right position. + @"viewport-segment-right", + + pub usingnamespace css.DefineEnumProperty(@This()); + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } +}; + +/// A custom CSS function. +pub const Function = struct { + /// The function name. + name: Ident, + /// The function arguments. + arguments: TokenList, + + const This = @This(); + + pub fn deinit(this: *Function, allocator: Allocator) void { + this.arguments.deinit(allocator); + } + + pub fn toCss( + this: *const This, + comptime W: type, + dest: *Printer(W), + is_custom_property: bool, + ) PrintErr!void { + try IdentFns.toCss(&this.name, W, dest); + try dest.writeChar('('); + try this.arguments.toCss(W, dest, is_custom_property); + return try dest.writeChar(')'); + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub fn hash(this: *const @This(), hasher: *std.hash.Wyhash) void { + return css.implementHash(@This(), this, hasher); + } + + pub fn deepClone(this: *const Function, allocator: Allocator) Function { + return .{ + .name = this.name, + .arguments = this.arguments.deepClone(allocator), + }; + } + + pub fn getFallback(this: *const Function, allocator: Allocator, kind: ColorFallbackKind) @This() { + return Function{ + .name = this.name.deepClone(allocator), + .arguments = this.arguments.getFallback(allocator, kind), + }; + } +}; + +/// A raw CSS token, or a parsed value. +pub const TokenOrValue = union(enum) { + /// A token. + token: css.Token, + /// A parsed CSS color. + color: CssColor, + /// A color with unresolved components. + unresolved_color: UnresolvedColor, + /// A parsed CSS url. + url: Url, + /// A CSS variable reference. + @"var": Variable, + /// A CSS environment variable reference. + env: EnvironmentVariable, + /// A custom CSS function. + function: Function, + /// A length. + length: LengthValue, + /// An angle. + angle: Angle, + /// A time. + time: Time, + /// A resolution. + resolution: Resolution, + /// A dashed ident. + dashed_ident: DashedIdent, + /// An animation name. + animation_name: AnimationName, + + pub fn eql(lhs: *const TokenOrValue, rhs: *const TokenOrValue) bool { + return css.implementEql(TokenOrValue, lhs, rhs); + } + + pub fn hash(this: *const @This(), hasher: *std.hash.Wyhash) void { + return css.implementHash(@This(), this, hasher); + } + + pub fn deepClone(this: *const TokenOrValue, allocator: Allocator) TokenOrValue { + return switch (this.*) { + .token => this.*, + .color => |*color| .{ .color = color.deepClone(allocator) }, + .unresolved_color => |*color| .{ .unresolved_color = color.deepClone(allocator) }, + .url => this.*, + .@"var" => |*@"var"| .{ .@"var" = @"var".deepClone(allocator) }, + .env => |*env| .{ .env = env.deepClone(allocator) }, + .function => |*f| .{ .function = f.deepClone(allocator) }, + .length => this.*, + .angle => this.*, + .time => this.*, + .resolution => this.*, + .dashed_ident => this.*, + .animation_name => this.*, + }; + } + + pub fn deinit(this: *TokenOrValue, allocator: Allocator) void { + return switch (this.*) { + .token => {}, + .color => |*color| color.deinit(allocator), + .unresolved_color => |*color| color.deinit(allocator), + .url => {}, + .@"var" => |*@"var"| @"var".deinit(allocator), + .env => |*env| env.deinit(allocator), + .function => |*f| f.deinit(allocator), + .length => {}, + .angle => {}, + .time => {}, + .resolution => {}, + .dashed_ident => {}, + .animation_name => {}, + }; + } + + pub fn isWhitespace(self: *const TokenOrValue) bool { + switch (self.*) { + .token => |tok| return tok == .whitespace, + else => return false, + } + } +}; + +/// A known property with an unparsed value. +/// +/// This type is used when the value of a known property could not +/// be parsed, e.g. in the case css `var()` references are encountered. +/// In this case, the raw tokens are stored instead. +pub const UnparsedProperty = struct { + /// The id of the property. + property_id: css.PropertyId, + /// The property value, stored as a raw token list. + value: TokenList, + + pub fn parse(property_id: css.PropertyId, input: *css.Parser, options: *const css.ParserOptions) Result(UnparsedProperty) { + const Closure = struct { options: *const css.ParserOptions }; + const value = switch (input.parseUntilBefore(css.Delimiters{ .bang = true, .semicolon = true }, css.TokenList, &Closure{ .options = options }, struct { + pub fn parseFn(self: *const Closure, i: *css.Parser) Result(TokenList) { + return TokenList.parse(i, self.options, 0); + } + }.parseFn)) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + + return .{ .result = .{ .property_id = property_id, .value = value } }; + } + + /// Returns a new UnparsedProperty with the same value and the given property id. + pub fn withPropertyId(this: *const @This(), allocator: Allocator, property_id: css.PropertyId) UnparsedProperty { + return UnparsedProperty{ .property_id = property_id, .value = this.value.deepClone(allocator) }; + } + + pub fn deepClone(this: *const @This(), allocator: Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } +}; + +/// A CSS custom property, representing any unknown property. +pub const CustomProperty = struct { + /// The name of the property. + name: CustomPropertyName, + /// The property value, stored as a raw token list. + value: TokenList, + + pub fn parse(name: CustomPropertyName, input: *css.Parser, options: *const css.ParserOptions) Result(CustomProperty) { + const Closure = struct { + options: *const css.ParserOptions, + + pub fn parsefn(this: *@This(), input2: *css.Parser) Result(TokenList) { + return TokenListFns.parse(input2, this.options, 0); + } + }; + + var closure = Closure{ + .options = options, + }; + + const value = switch (input.parseUntilBefore( + css.Delimiters{ + .bang = true, + .semicolon = true, + }, + TokenList, + &closure, + Closure.parsefn, + )) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + + return .{ .result = CustomProperty{ + .name = name, + .value = value, + } }; + } + + pub fn deepClone(this: *const @This(), allocator: Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } +}; + +/// A CSS custom property name. +pub const CustomPropertyName = union(enum) { + /// An author-defined CSS custom property. + custom: DashedIdent, + /// An unknown CSS property. + unknown: Ident, + + pub fn toCss(this: *const CustomPropertyName, comptime W: type, dest: *Printer(W)) PrintErr!void { + return switch (this.*) { + .custom => |custom| try custom.toCss(W, dest), + .unknown => |unknown| css.serializer.serializeIdentifier(unknown.v, dest) catch return dest.addFmtError(), + }; + } + + pub fn fromStr(name: []const u8) CustomPropertyName { + if (bun.strings.startsWith(name, "--")) return .{ .custom = .{ .v = name } }; + return .{ .unknown = .{ .v = name } }; + } + + pub fn asStr(self: *const CustomPropertyName) []const u8 { + switch (self.*) { + .custom => |custom| return custom.v, + .unknown => |unknown| return unknown.v, + } + } + + pub fn deepClone(this: *const @This(), allocator: Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } +}; + +pub fn tryParseColorToken(f: []const u8, state: *const css.ParserState, input: *css.Parser) ?CssColor { + // css.todo_stuff.match_ignore_ascii_case + if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(f, "rgb") or + bun.strings.eqlCaseInsensitiveASCIIICheckLength(f, "rgba") or + bun.strings.eqlCaseInsensitiveASCIIICheckLength(f, "hsl") or + bun.strings.eqlCaseInsensitiveASCIIICheckLength(f, "hsla") or + bun.strings.eqlCaseInsensitiveASCIIICheckLength(f, "hwb") or + bun.strings.eqlCaseInsensitiveASCIIICheckLength(f, "lab") or + bun.strings.eqlCaseInsensitiveASCIIICheckLength(f, "lch") or + bun.strings.eqlCaseInsensitiveASCIIICheckLength(f, "oklab") or + bun.strings.eqlCaseInsensitiveASCIIICheckLength(f, "oklch") or + bun.strings.eqlCaseInsensitiveASCIIICheckLength(f, "color") or + bun.strings.eqlCaseInsensitiveASCIIICheckLength(f, "color-mix") or + bun.strings.eqlCaseInsensitiveASCIIICheckLength(f, "light-dark")) + { + const s = input.state(); + input.reset(state); + if (CssColor.parse(input).asValue()) |color| { + return color; + } + input.reset(&s); + } + + return null; +} diff --git a/src/css/properties/display.zig b/src/css/properties/display.zig new file mode 100644 index 0000000000..eba2fee7cd --- /dev/null +++ b/src/css/properties/display.zig @@ -0,0 +1,292 @@ +const std = @import("std"); +const bun = @import("root").bun; +const Allocator = std.mem.Allocator; +const ArrayList = std.ArrayListUnmanaged; + +pub const css = @import("../css_parser.zig"); + +const SmallList = css.SmallList; +const Printer = css.Printer; +const PrintErr = css.PrintErr; +const Error = css.Error; + +const ContainerName = css.css_rules.container.ContainerName; + +const LengthPercentage = css.css_values.length.LengthPercentage; +const CustomIdent = css.css_values.ident.CustomIdent; +const CSSString = css.css_values.string.CSSString; +const CSSNumber = css.css_values.number.CSSNumber; +const LengthPercentageOrAuto = css.css_values.length.LengthPercentageOrAuto; +const Size2D = css.css_values.size.Size2D; +const DashedIdent = css.css_values.ident.DashedIdent; +const Image = css.css_values.image.Image; +const CssColor = css.css_values.color.CssColor; +const Ratio = css.css_values.ratio.Ratio; +const Length = css.css_values.length.LengthValue; +const Rect = css.css_values.rect.Rect; +const NumberOrPercentage = css.css_values.percentage.NumberOrPercentage; +const CustomIdentList = css.css_values.ident.CustomIdentList; + +/// A value for the [display](https://drafts.csswg.org/css-display-3/#the-display-properties) property. +pub const Display = union(enum) { + /// A display keyword. + keyword: DisplayKeyword, + /// The inside and outside display values. + pair: DisplayPair, + + pub usingnamespace css.DeriveParse(@This()); + pub usingnamespace css.DeriveToCss(@This()); + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn hash(this: *const @This(), hasher: *std.hash.Wyhash) void { + return css.implementHash(@This(), this, hasher); + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } +}; + +/// A value for the [visibility](https://drafts.csswg.org/css-display-3/#visibility) property. +pub const Visibility = enum { + /// The element is visible. + visible, + /// The element is hidden. + hidden, + /// The element is collapsed. + collapse, + + pub usingnamespace css.DefineEnumProperty(@This()); +}; + +/// A `display` keyword. +/// +/// See [Display](Display). +pub const DisplayKeyword = enum { + none, + contents, + @"table-row-group", + @"table-header-group", + @"table-footer-group", + @"table-row", + @"table-cell", + @"table-column-group", + @"table-column", + @"table-caption", + @"ruby-base", + @"ruby-text", + @"ruby-base-container", + @"ruby-text-container", + + pub usingnamespace css.DefineEnumProperty(@This()); +}; + +/// A pair of inside and outside display values, as used in the `display` property. +/// +/// See [Display](Display). +pub const DisplayPair = struct { + /// The outside display value. + outside: DisplayOutside, + /// The inside display value. + inside: DisplayInside, + /// Whether this is a list item. + is_list_item: bool, + + pub fn parse(input: *css.Parser) css.Result(@This()) { + var list_item = false; + var outside: ?DisplayOutside = null; + var inside: ?DisplayInside = null; + + while (true) { + if (input.tryParse(css.Parser.expectIdentMatching, .{"list-item"}).isOk()) { + list_item = true; + continue; + } + + if (outside == null) { + if (input.tryParse(DisplayOutside.parse, .{}).asValue()) |o| { + outside = o; + continue; + } + } + + if (inside == null) { + if (input.tryParse(DisplayInside.parse, .{}).asValue()) |i| { + inside = i; + continue; + } + } + + break; + } + + if (list_item or inside != null or outside != null) { + const final_inside: DisplayInside = inside orelse DisplayInside.flow; + const final_outside: DisplayOutside = outside orelse switch (final_inside) { + // "If is omitted, the element’s outside display type + // defaults to block — except for ruby, which defaults to inline." + // https://drafts.csswg.org/css-display/#inside-model + .ruby => .@"inline", + else => .block, + }; + + if (list_item and !(final_inside == .flow or final_inside == .flow_root)) { + return .{ .err = input.newCustomError(.invalid_declaration) }; + } + + return .{ .result = .{ + .outside = final_outside, + .inside = final_inside, + .is_list_item = list_item, + } }; + } + + const location = input.currentSourceLocation(); + const ident = switch (input.expectIdent()) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + + const displayIdentMap = bun.ComptimeStringMap(DisplayPair, .{ + .{ "inline-block", DisplayPair{ .outside = .@"inline", .inside = .flow_root, .is_list_item = false } }, + .{ "inline-table", DisplayPair{ .outside = .@"inline", .inside = .table, .is_list_item = false } }, + .{ "inline-flex", DisplayPair{ .outside = .@"inline", .inside = .{ .flex = css.VendorPrefix{ .none = true } }, .is_list_item = false } }, + .{ "-webkit-inline-flex", DisplayPair{ .outside = .@"inline", .inside = .{ .flex = css.VendorPrefix{ .webkit = true } }, .is_list_item = false } }, + .{ "-ms-inline-flexbox", DisplayPair{ .outside = .@"inline", .inside = .{ .flex = css.VendorPrefix{ .ms = true } }, .is_list_item = false } }, + .{ "-webkit-inline-box", DisplayPair{ .outside = .@"inline", .inside = .{ .box = css.VendorPrefix{ .webkit = true } }, .is_list_item = false } }, + .{ "-moz-inline-box", DisplayPair{ .outside = .@"inline", .inside = .{ .box = css.VendorPrefix{ .moz = true } }, .is_list_item = false } }, + .{ "inline-grid", DisplayPair{ .outside = .@"inline", .inside = .grid, .is_list_item = false } }, + }); + if (displayIdentMap.getASCIIICaseInsensitive(ident)) |pair| { + return .{ .result = pair }; + } + + return .{ .err = location.newUnexpectedTokenError(.{ .ident = ident }) }; + } + + pub fn toCss(this: *const DisplayPair, comptime W: type, dest: *css.Printer(W)) css.PrintErr!void { + if (this.outside == .@"inline" and this.inside == .flow_root and !this.is_list_item) { + return dest.writeStr("inline-block"); + } else if (this.outside == .@"inline" and this.inside == .table and !this.is_list_item) { + return dest.writeStr("inline-table"); + } else if (this.outside == .@"inline" and this.inside == .flex and !this.is_list_item) { + try this.inside.flex.toCss(W, dest); + if (this.inside.flex.eql(css.VendorPrefix{ .ms = true })) { + return dest.writeStr("inline-flexbox"); + } else { + return dest.writeStr("inline-flex"); + } + } else if (this.outside == .@"inline" and this.inside == .box and !this.is_list_item) { + try this.inside.box.toCss(W, dest); + return dest.writeStr("inline-box"); + } else if (this.outside == .@"inline" and this.inside == .grid and !this.is_list_item) { + return dest.writeStr("inline-grid"); + } else { + const default_outside: DisplayOutside = switch (this.inside) { + .ruby => .@"inline", + else => .block, + }; + + var needs_space = false; + if (!this.outside.eql(&default_outside) or (this.inside.eql(&DisplayInside{ .flow = {} }) and !this.is_list_item)) { + try this.outside.toCss(W, dest); + needs_space = true; + } + + if (!this.inside.eql(&DisplayInside{ .flow = {} })) { + if (needs_space) { + try dest.writeChar(' '); + } + try this.inside.toCss(W, dest); + needs_space = true; + } + + if (this.is_list_item) { + if (needs_space) { + try dest.writeChar(' '); + } + try dest.writeStr("list-item"); + } + } + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } +}; + +/// A [``](https://drafts.csswg.org/css-display-3/#typedef-display-outside) value. +pub const DisplayOutside = enum { + block, + @"inline", + @"run-in", + + pub usingnamespace css.DefineEnumProperty(@This()); +}; + +/// A [``](https://drafts.csswg.org/css-display-3/#typedef-display-inside) value. +pub const DisplayInside = union(enum) { + flow, + flow_root, + table, + flex: css.VendorPrefix, + box: css.VendorPrefix, + grid, + ruby, + + pub fn parse(input: *css.Parser) css.Result(@This()) { + const displayInsideMap = bun.ComptimeStringMap(DisplayInside, .{ + .{ "flow", DisplayInside.flow }, + .{ "flow-root", DisplayInside.flow_root }, + .{ "table", .table }, + .{ "flex", .{ .flex = css.VendorPrefix{ .none = true } } }, + .{ "-webkit-flex", .{ .flex = css.VendorPrefix{ .webkit = true } } }, + .{ "-ms-flexbox", .{ .flex = css.VendorPrefix{ .ms = true } } }, + .{ "-webkit-box", .{ .box = css.VendorPrefix{ .webkit = true } } }, + .{ "-moz-box", .{ .box = css.VendorPrefix{ .moz = true } } }, + .{ "grid", .grid }, + .{ "ruby", .ruby }, + }); + + const location = input.currentSourceLocation(); + const ident = switch (input.expectIdent()) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + + if (displayInsideMap.getASCIIICaseInsensitive(ident)) |value| { + return .{ .result = value }; + } + + return .{ .err = location.newUnexpectedTokenError(.{ .ident = ident }) }; + } + + pub fn toCss(this: *const DisplayInside, comptime W: type, dest: *css.Printer(W)) css.PrintErr!void { + switch (this.*) { + .flow => try dest.writeStr("flow"), + .flow_root => try dest.writeStr("flow-root"), + .table => try dest.writeStr("table"), + .flex => |prefix| { + try prefix.toCss(W, dest); + if (prefix.eql(css.VendorPrefix{ .ms = true })) { + try dest.writeStr("flexbox"); + } else { + try dest.writeStr("flex"); + } + }, + .box => |prefix| { + try prefix.toCss(W, dest); + try dest.writeStr("box"); + }, + .grid => try dest.writeStr("grid"), + .ruby => try dest.writeStr("ruby"), + } + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } +}; diff --git a/src/css/properties/effects.zig b/src/css/properties/effects.zig new file mode 100644 index 0000000000..34da8b7759 --- /dev/null +++ b/src/css/properties/effects.zig @@ -0,0 +1,77 @@ +const std = @import("std"); +const bun = @import("root").bun; +const Allocator = std.mem.Allocator; +const ArrayList = std.ArrayListUnmanaged; + +pub const css = @import("../css_parser.zig"); + +const SmallList = css.SmallList; +const Printer = css.Printer; +const PrintErr = css.PrintErr; +const Error = css.Error; + +const ContainerName = css.css_rules.container.ContainerName; + +const LengthPercentage = css.css_values.length.LengthPercentage; +const CustomIdent = css.css_values.ident.CustomIdent; +const CSSString = css.css_values.string.CSSString; +const CSSNumber = css.css_values.number.CSSNumber; +const LengthPercentageOrAuto = css.css_values.length.LengthPercentageOrAuto; +const Size2D = css.css_values.size.Size2D; +const DashedIdent = css.css_values.ident.DashedIdent; +const Image = css.css_values.image.Image; +const CssColor = css.css_values.color.CssColor; +const Ratio = css.css_values.ratio.Ratio; +const Length = css.css_values.length.LengthValue; +const Rect = css.css_values.rect.Rect; +const NumberOrPercentage = css.css_values.percentage.NumberOrPercentage; +const CustomIdentList = css.css_values.ident.CustomIdentList; +const Angle = css.css_values.angle.Angle; +const Url = css.css_values.url.Url; + +/// A value for the [filter](https://drafts.fxtf.org/filter-effects-1/#FilterProperty) and +/// [backdrop-filter](https://drafts.fxtf.org/filter-effects-2/#BackdropFilterProperty) properties. +pub const FilterList = union(enum) { + /// The `none` keyword. + none, + /// A list of filter functions. + filters: SmallList(Filter, 1), +}; + +/// A [filter](https://drafts.fxtf.org/filter-effects-1/#filter-functions) function. +pub const Filter = union(enum) { + /// A `blur()` filter. + blur: Length, + /// A `brightness()` filter. + brightness: NumberOrPercentage, + /// A `contrast()` filter. + contrast: NumberOrPercentage, + /// A `grayscale()` filter. + grayscale: NumberOrPercentage, + /// A `hue-rotate()` filter. + hue_rotate: Angle, + /// An `invert()` filter. + invert: NumberOrPercentage, + /// An `opacity()` filter. + opacity: NumberOrPercentage, + /// A `saturate()` filter. + saturate: NumberOrPercentage, + /// A `sepia()` filter. + sepia: NumberOrPercentage, + /// A `drop-shadow()` filter. + drop_shadow: DropShadow, + /// A `url()` reference to an SVG filter. + url: Url, +}; + +/// A [`drop-shadow()`](https://drafts.fxtf.org/filter-effects-1/#funcdef-filter-drop-shadow) filter function. +pub const DropShadow = struct { + /// The color of the drop shadow. + color: CssColor, + /// The x offset of the drop shadow. + x_offset: Length, + /// The y offset of the drop shadow. + y_offset: Length, + /// The blur radius of the drop shadow. + blur: Length, +}; diff --git a/src/css/properties/flex.zig b/src/css/properties/flex.zig new file mode 100644 index 0000000000..c94bfeb637 --- /dev/null +++ b/src/css/properties/flex.zig @@ -0,0 +1,395 @@ +const std = @import("std"); +const bun = @import("root").bun; +const Allocator = std.mem.Allocator; +const ArrayList = std.ArrayListUnmanaged; + +pub const css = @import("../css_parser.zig"); + +const SmallList = css.SmallList; +const Printer = css.Printer; +const PrintErr = css.PrintErr; +const Error = css.Error; + +const ContainerName = css.css_rules.container.ContainerName; + +const CSSNumberFns = css.css_values.number.CSSNumberFns; +const LengthPercentage = css.css_values.length.LengthPercentage; +const CustomIdent = css.css_values.ident.CustomIdent; +const CSSString = css.css_values.string.CSSString; +const CSSNumber = css.css_values.number.CSSNumber; +const LengthPercentageOrAuto = css.css_values.length.LengthPercentageOrAuto; +const Size2D = css.css_values.size.Size2D; +const DashedIdent = css.css_values.ident.DashedIdent; +const Image = css.css_values.image.Image; +const CssColor = css.css_values.color.CssColor; +const Ratio = css.css_values.ratio.Ratio; +const Length = css.css_values.length.LengthValue; +const Rect = css.css_values.rect.Rect; +const NumberOrPercentage = css.css_values.percentage.NumberOrPercentage; +const CustomIdentList = css.css_values.ident.CustomIdentList; +const Angle = css.css_values.angle.Angle; +const Url = css.css_values.url.Url; + +/// A value for the [flex-direction](https://www.w3.org/TR/2018/CR-css-flexbox-1-20181119/#propdef-flex-direction) property. +/// A value for the [flex-direction](https://www.w3.org/TR/2018/CR-css-flexbox-1-20181119/#propdef-flex-direction) property. +pub const FlexDirection = enum { + /// Flex items are laid out in a row. + row, + /// Flex items are laid out in a row, and reversed. + @"row-reverse", + /// Flex items are laid out in a column. + column, + /// Flex items are laid out in a column, and reversed. + @"column-reverse", + + pub usingnamespace css.DefineEnumProperty(@This()); + + pub fn default() FlexDirection { + return .row; + } +}; + +/// A value for the [flex-wrap](https://www.w3.org/TR/2018/CR-css-flexbox-1-20181119/#flex-wrap-property) property. +/// A value for the [flex-wrap](https://www.w3.org/TR/2018/CR-css-flexbox-1-20181119/#flex-wrap-property) property. +pub const FlexWrap = enum { + /// The flex items do not wrap. + nowrap, + /// The flex items wrap. + wrap, + /// The flex items wrap, in reverse. + @"wrap-reverse", + + pub usingnamespace css.DefineEnumProperty(@This()); + + pub fn default() FlexWrap { + return .nowrap; + } +}; + +/// A value for the [flex-flow](https://www.w3.org/TR/2018/CR-css-flexbox-1-20181119/#flex-flow-property) shorthand property. +/// A value for the [flex-flow](https://www.w3.org/TR/2018/CR-css-flexbox-1-20181119/#flex-flow-property) shorthand property. +pub const FlexFlow = struct { + /// The direction that flex items flow. + direction: FlexDirection, + /// How the flex items wrap. + wrap: FlexWrap, + + pub usingnamespace css.DefineShorthand(@This(), css.PropertyIdTag.@"flex-flow"); + + pub const PropertyFieldMap = .{ + .direction = css.PropertyIdTag.@"flex-direction", + .wrap = css.PropertyIdTag.@"flex-wrap", + }; + + pub const VendorPrefixMap = .{ + .direction = true, + .wrap = true, + }; + + pub fn parse(input: *css.Parser) css.Result(@This()) { + var direction: ?FlexDirection = null; + var wrap: ?FlexWrap = null; + + while (true) { + if (direction == null) { + if (input.tryParse(FlexDirection.parse, .{}).asValue()) |value| { + direction = value; + continue; + } + } + if (wrap == null) { + if (input.tryParse(FlexWrap.parse, .{}).asValue()) |value| { + wrap = value; + continue; + } + } + break; + } + + return .{ + .result = FlexFlow{ + .direction = direction orelse FlexDirection.row, + .wrap = wrap orelse FlexWrap.nowrap, + }, + }; + } + + pub fn toCss(this: *const FlexFlow, comptime W: type, dest: *css.Printer(W)) css.PrintErr!void { + var needs_space = false; + if (!this.direction.eql(&FlexDirection.default()) or this.wrap.eql(&FlexWrap.default())) { + try this.direction.toCss(W, dest); + needs_space = true; + } + + if (!this.wrap.eql(&FlexWrap.default())) { + if (needs_space) { + try dest.writeStr(" "); + } + try this.wrap.toCss(W, dest); + } + + return; + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } +}; + +/// A value for the [flex](https://www.w3.org/TR/2018/CR-css-flexbox-1-20181119/#flex-property) shorthand property. +/// A value for the [flex](https://www.w3.org/TR/2018/CR-css-flexbox-1-20181119/#flex-property) shorthand property. +pub const Flex = struct { + /// The flex grow factor. + grow: CSSNumber, + /// The flex shrink factor. + shrink: CSSNumber, + /// The flex basis. + basis: LengthPercentageOrAuto, + + pub usingnamespace css.DefineShorthand(@This(), css.PropertyIdTag.flex); + + pub const PropertyFieldMap = .{ + .grow = css.PropertyIdTag.@"flex-grow", + .shrink = css.PropertyIdTag.@"flex-shrink", + .basis = css.PropertyIdTag.@"flex-basis", + }; + + pub const VendorPrefixMap = .{ + .grow = true, + .shrink = true, + .basis = true, + }; + + pub fn parse(input: *css.Parser) css.Result(@This()) { + if (input.tryParse(css.Parser.expectIdentMatching, .{"none"}).isOk()) { + return .{ + .result = .{ + .grow = 0.0, + .shrink = 0.0, + .basis = LengthPercentageOrAuto.auto, + }, + }; + } + + var grow: ?CSSNumber = null; + var shrink: ?CSSNumber = null; + var basis: ?LengthPercentageOrAuto = null; + + while (true) { + if (grow == null) { + if (input.tryParse(CSSNumberFns.parse, .{}).asValue()) |value| { + grow = value; + shrink = input.tryParse(CSSNumberFns.parse, .{}).asValue(); + continue; + } + } + + if (basis == null) { + if (input.tryParse(LengthPercentageOrAuto.parse, .{}).asValue()) |value| { + basis = value; + continue; + } + } + + break; + } + + return .{ + .result = .{ + .grow = grow orelse 1.0, + .shrink = shrink orelse 1.0, + .basis = basis orelse LengthPercentageOrAuto{ .length = LengthPercentage{ .percentage = .{ .v = 0.0 } } }, + }, + }; + } + + pub fn toCss(this: *const Flex, comptime W: type, dest: *css.Printer(W)) css.PrintErr!void { + if (this.grow == 0.0 and this.shrink == 0.0 and this.basis == .auto) { + try dest.writeStr("none"); + return; + } + + const ZeroKind = enum { + NonZero, + Length, + Percentage, + }; + + // If the basis is unitless 0, we must write all three components to disambiguate. + // If the basis is 0%, we can omit the basis. + const basis_kind = switch (this.basis) { + .length => |lp| brk: { + if (lp == .dimension and lp.dimension.isZero()) break :brk ZeroKind.Length; + if (lp == .percentage and lp.percentage.isZero()) break :brk ZeroKind.Percentage; + break :brk ZeroKind.NonZero; + }, + else => ZeroKind.NonZero, + }; + + if (this.grow != 1.0 or this.shrink != 1.0 or basis_kind != .NonZero) { + try CSSNumberFns.toCss(&this.grow, W, dest); + if (this.shrink != 1.0 or basis_kind == .Length) { + try dest.writeStr(" "); + try CSSNumberFns.toCss(&this.shrink, W, dest); + } + } + + if (basis_kind != .Percentage) { + if (this.grow != 1.0 or this.shrink != 1.0 or basis_kind == .Length) { + try dest.writeStr(" "); + } + try this.basis.toCss(W, dest); + } + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } +}; + +/// A value for the legacy (prefixed) [box-orient](https://www.w3.org/TR/2009/WD-css3-flexbox-20090723/#orientation) property. +/// Partially equivalent to `flex-direction` in the standard syntax. +/// A value for the legacy (prefixed) [box-orient](https://www.w3.org/TR/2009/WD-css3-flexbox-20090723/#orientation) property. +/// Partially equivalent to `flex-direction` in the standard syntax. +pub const BoxOrient = enum { + /// Items are laid out horizontally. + horizontal, + /// Items are laid out vertically. + vertical, + /// Items are laid out along the inline axis, according to the writing direction. + @"inline-axis", + /// Items are laid out along the block axis, according to the writing direction. + @"block-axis", + + pub usingnamespace css.DefineEnumProperty(@This()); +}; + +/// A value for the legacy (prefixed) [box-direction](https://www.w3.org/TR/2009/WD-css3-flexbox-20090723/#displayorder) property. +/// Partially equivalent to the `flex-direction` property in the standard syntax. +pub const BoxDirection = enum { + /// Items flow in the natural direction. + normal, + /// Items flow in the reverse direction. + reverse, + + pub usingnamespace css.DefineEnumProperty(@This()); +}; + +/// A value for the legacy (prefixed) [box-align](https://www.w3.org/TR/2009/WD-css3-flexbox-20090723/#alignment) property. +/// Equivalent to the `align-items` property in the standard syntax. +/// A value for the legacy (prefixed) [box-align](https://www.w3.org/TR/2009/WD-css3-flexbox-20090723/#alignment) property. +/// Equivalent to the `align-items` property in the standard syntax. +pub const BoxAlign = enum { + /// Items are aligned to the start. + start, + /// Items are aligned to the end. + end, + /// Items are centered. + center, + /// Items are aligned to the baseline. + baseline, + /// Items are stretched. + stretch, + + pub usingnamespace css.DefineEnumProperty(@This()); +}; + +/// A value for the legacy (prefixed) [box-pack](https://www.w3.org/TR/2009/WD-css3-flexbox-20090723/#packing) property. +/// Equivalent to the `justify-content` property in the standard syntax. +/// A value for the legacy (prefixed) [box-pack](https://www.w3.org/TR/2009/WD-css3-flexbox-20090723/#packing) property. +/// Equivalent to the `justify-content` property in the standard syntax. +pub const BoxPack = enum { + /// Items are justified to the start. + start, + /// Items are justified to the end. + end, + /// Items are centered. + center, + /// Items are justified to the start and end. + justify, + + pub usingnamespace css.DefineEnumProperty(@This()); +}; + +/// A value for the legacy (prefixed) [box-lines](https://www.w3.org/TR/2009/WD-css3-flexbox-20090723/#multiple) property. +/// Equivalent to the `flex-wrap` property in the standard syntax. +/// A value for the legacy (prefixed) [box-lines](https://www.w3.org/TR/2009/WD-css3-flexbox-20090723/#multiple) property. +/// Equivalent to the `flex-wrap` property in the standard syntax. +pub const BoxLines = enum { + /// Items are laid out in a single line. + single, + /// Items may wrap into multiple lines. + multiple, + + pub usingnamespace css.DefineEnumProperty(@This()); +}; + +// Old flex (2012): https://www.w3.org/TR/2012/WD-css3-flexbox-20120322/ +/// A value for the legacy (prefixed) [flex-pack](https://www.w3.org/TR/2012/WD-css3-flexbox-20120322/#flex-pack) property. +/// Equivalent to the `justify-content` property in the standard syntax. +/// A value for the legacy (prefixed) [flex-pack](https://www.w3.org/TR/2012/WD-css3-flexbox-20120322/#flex-pack) property. +/// Equivalent to the `justify-content` property in the standard syntax. +pub const FlexPack = enum { + /// Items are justified to the start. + start, + /// Items are justified to the end. + end, + /// Items are centered. + center, + /// Items are justified to the start and end. + justify, + /// Items are distributed evenly, with half size spaces on either end. + distribute, + + pub usingnamespace css.DefineEnumProperty(@This()); +}; + +/// A value for the legacy (prefixed) [flex-item-align](https://www.w3.org/TR/2012/WD-css3-flexbox-20120322/#flex-align) property. +/// Equivalent to the `align-self` property in the standard syntax. +/// A value for the legacy (prefixed) [flex-item-align](https://www.w3.org/TR/2012/WD-css3-flexbox-20120322/#flex-align) property. +/// Equivalent to the `align-self` property in the standard syntax. +pub const FlexItemAlign = enum { + /// Equivalent to the value of `flex-align`. + auto, + /// The item is aligned to the start. + start, + /// The item is aligned to the end. + end, + /// The item is centered. + center, + /// The item is aligned to the baseline. + baseline, + /// The item is stretched. + stretch, + + pub usingnamespace css.DefineEnumProperty(@This()); +}; + +/// A value for the legacy (prefixed) [flex-line-pack](https://www.w3.org/TR/2012/WD-css3-flexbox-20120322/#flex-line-pack) property. +/// Equivalent to the `align-content` property in the standard syntax. +/// A value for the legacy (prefixed) [flex-line-pack](https://www.w3.org/TR/2012/WD-css3-flexbox-20120322/#flex-line-pack) property. +/// Equivalent to the `align-content` property in the standard syntax. +pub const FlexLinePack = enum { + /// Content is aligned to the start. + start, + /// Content is aligned to the end. + end, + /// Content is centered. + center, + /// Content is justified. + justify, + /// Content is distributed evenly, with half size spaces on either end. + distribute, + /// Content is stretched. + stretch, + + pub usingnamespace css.DefineEnumProperty(@This()); +}; diff --git a/src/css/properties/font.zig b/src/css/properties/font.zig new file mode 100644 index 0000000000..fe1980e19a --- /dev/null +++ b/src/css/properties/font.zig @@ -0,0 +1,655 @@ +const std = @import("std"); +const bun = @import("root").bun; +const Allocator = std.mem.Allocator; + +pub const css = @import("../css_parser.zig"); +const Error = css.Error; + +const ArrayList = std.ArrayListUnmanaged; +const SmallList = css.SmallList; + +const Printer = css.Printer; +const PrintErr = css.PrintErr; + +const css_values = css.css_values; +const CssColor = css.css_values.color.CssColor; +const Image = css.css_values.image.Image; +const Length = css.css_values.length.LengthValue; +const LengthPercentage = css_values.length.LengthPercentage; +const LengthPercentageOrAuto = css_values.length.LengthPercentageOrAuto; +const PropertyCategory = css.PropertyCategory; +const LogicalGroup = css.LogicalGroup; +const CSSNumber = css.css_values.number.CSSNumber; +const CSSNumberFns = css.css_values.number.CSSNumberFns; +const CSSInteger = css.css_values.number.CSSInteger; +const NumberOrPercentage = css.css_values.percentage.NumberOrPercentage; +const Percentage = css.css_values.percentage.Percentage; +const Angle = css.css_values.angle.Angle; +const DashedIdentReference = css.css_values.ident.DashedIdentReference; +const Time = css.css_values.time.Time; +const EasingFunction = css.css_values.easing.EasingFunction; +const CustomIdent = css.css_values.ident.CustomIdent; +const CSSString = css.css_values.string.CSSString; +const DashedIdent = css.css_values.ident.DashedIdent; +const Url = css.css_values.url.Url; +const CustomIdentList = css.css_values.ident.CustomIdentList; +const Location = css.Location; +const HorizontalPosition = css.css_values.position.HorizontalPosition; +const VerticalPosition = css.css_values.position.VerticalPosition; +const ContainerName = css.css_rules.container.ContainerName; + +/// A value for the [font-weight](https://www.w3.org/TR/css-fonts-4/#font-weight-prop) property. +pub const FontWeight = union(enum) { + /// An absolute font weight. + absolute: AbsoluteFontWeight, + /// The `bolder` keyword. + bolder, + /// The `lighter` keyword. + lighter, + + // TODO: implement this + pub usingnamespace css.DeriveParse(@This()); + pub usingnamespace css.DeriveToCss(@This()); + + pub inline fn default() FontWeight { + return .{ .absolute = AbsoluteFontWeight.default() }; + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } +}; + +/// An [absolute font weight](https://www.w3.org/TR/css-fonts-4/#font-weight-absolute-values), +/// as used in the `font-weight` property. +/// +/// See [FontWeight](FontWeight). +pub const AbsoluteFontWeight = union(enum) { + /// An explicit weight. + weight: CSSNumber, + /// Same as `400`. + normal, + /// Same as `700`. + bold, + + pub usingnamespace css.DeriveParse(@This()); + + pub fn toCss(this: *const AbsoluteFontWeight, comptime W: type, dest: *css.Printer(W)) css.PrintErr!void { + return switch (this.*) { + .weight => |*weight| CSSNumberFns.toCss(weight, W, dest), + .normal => try dest.writeStr(if (dest.minify) "400" else "normal"), + .bold => try dest.writeStr(if (dest.minify) "700" else "bold"), + }; + } + + pub inline fn default() AbsoluteFontWeight { + return .normal; + } + + pub fn eql(lhs: *const AbsoluteFontWeight, rhs: *const AbsoluteFontWeight) bool { + return css.implementEql(@This(), lhs, rhs); + } +}; + +/// A value for the [font-size](https://www.w3.org/TR/css-fonts-4/#font-size-prop) property. +pub const FontSize = union(enum) { + /// An explicit size. + length: LengthPercentage, + /// An absolute font size keyword. + absolute: AbsoluteFontSize, + /// A relative font size keyword. + relative: RelativeFontSize, + + pub usingnamespace css.DeriveParse(@This()); + pub usingnamespace css.DeriveToCss(@This()); + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } +}; + +/// An [absolute font size](https://www.w3.org/TR/css-fonts-3/#absolute-size-value), +/// as used in the `font-size` property. +/// +/// See [FontSize](FontSize). +pub const AbsoluteFontSize = enum { + /// "xx-small" + @"xx-small", + /// "x-small" + @"x-small", + /// "small" + small, + /// "medium" + medium, + /// "large" + large, + /// "x-large" + @"x-large", + /// "xx-large" + @"xx-large", + /// "xxx-large" + @"xxx-large", + + pub usingnamespace css.DefineEnumProperty(@This()); +}; + +/// A [relative font size](https://www.w3.org/TR/css-fonts-3/#relative-size-value), +/// as used in the `font-size` property. +/// +/// See [FontSize](FontSize). +pub const RelativeFontSize = enum { + smaller, + larger, + + pub usingnamespace css.DefineEnumProperty(@This()); +}; + +/// A value for the [font-stretch](https://www.w3.org/TR/css-fonts-4/#font-stretch-prop) property. +pub const FontStretch = union(enum) { + /// A font stretch keyword. + keyword: FontStretchKeyword, + /// A percentage. + percentage: Percentage, + + // TODO: implement this + // pub usingnamespace css.DeriveParse(@This()); + + pub fn parse(input: *css.Parser) css.Result(FontStretch) { + _ = input; // autofix + @panic(css.todo_stuff.depth); + } + + pub fn toCss(this: *const FontStretch, comptime W: type, dest: *css.Printer(W)) css.PrintErr!void { + _ = this; // autofix + _ = dest; // autofix + @panic(css.todo_stuff.depth); + } + + pub fn eql(lhs: *const FontStretch, rhs: *const FontStretch) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub inline fn default() FontStretch { + return .{ .keyword = FontStretchKeyword.default() }; + } +}; + +/// A [font stretch keyword](https://www.w3.org/TR/css-fonts-4/#font-stretch-prop), +/// as used in the `font-stretch` property. +/// +/// See [FontStretch](FontStretch). +pub const FontStretchKeyword = enum { + /// 100% + normal, + /// 50% + @"ultra-condensed", + /// 62.5% + @"extra-condensed", + /// 75% + condensed, + /// 87.5% + @"semi-condensed", + /// 112.5% + @"semi-expanded", + /// 125% + expanded, + /// 150% + @"extra-expanded", + /// 200% + @"ultra-expanded", + + pub usingnamespace css.DefineEnumProperty(@This()); + + pub inline fn default() FontStretchKeyword { + return .normal; + } +}; + +/// A value for the [font-family](https://www.w3.org/TR/css-fonts-4/#font-family-prop) property. +pub const FontFamily = union(enum) { + /// A generic family name. + generic: GenericFontFamily, + /// A custom family name. + family_name: []const u8, + + pub fn parse(input: *css.Parser) css.Result(@This()) { + if (input.tryParse(css.Parser.expectString, .{}).asValue()) |value| { + return .{ .result = .{ .family_name = value } }; + } + + if (input.tryParse(GenericFontFamily.parse, .{}).asValue()) |value| { + return .{ .result = .{ .generic = value } }; + } + + const stralloc = input.allocator(); + const value = switch (input.expectIdent()) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + var string: ?ArrayList(u8) = null; + while (input.tryParse(css.Parser.expectIdent, .{}).asValue()) |ident| { + if (string == null) { + string = ArrayList(u8){}; + string.?.appendSlice(stralloc, value) catch bun.outOfMemory(); + } + + if (string) |*s| { + s.append(stralloc, ' ') catch bun.outOfMemory(); + s.appendSlice(stralloc, ident) catch bun.outOfMemory(); + } + } + + const final_value = if (string) |s| s.items else value; + + return .{ .result = .{ .family_name = final_value } }; + } + + pub fn toCss(this: *const @This(), comptime W: type, dest: *Printer(W)) PrintErr!void { + switch (this.*) { + .generic => |val| { + try val.toCss(W, dest); + }, + .family_name => |val| { + // Generic family names such as sans-serif must be quoted if parsed as a string. + // CSS wide keywords, as well as "default", must also be quoted. + // https://www.w3.org/TR/css-fonts-4/#family-name-syntax + + if (val.len > 0 and + !css.parse_utility.parseString( + dest.allocator, + GenericFontFamily, + val, + GenericFontFamily.parse, + ).isOk()) { + var id = ArrayList(u8){}; + defer id.deinit(dest.allocator); + var first = true; + var split_iter = std.mem.splitScalar(u8, val, ' '); + while (split_iter.next()) |slice| { + if (first) { + first = false; + } else { + id.append(dest.allocator, ' ') catch bun.outOfMemory(); + } + css.serializer.serializeIdentifier(slice, dest) catch return dest.addFmtError(); + } + if (id.items.len < val.len + 2) { + return dest.writeStr(id.items); + } + } + return css.serializer.serializeString(val, dest) catch return dest.addFmtError(); + }, + } + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } +}; + +/// A [generic font family](https://www.w3.org/TR/css-fonts-4/#generic-font-families) name, +/// as used in the `font-family` property. +/// +/// See [FontFamily](FontFamily). +pub const GenericFontFamily = enum { + serif, + @"sans-serif", + cursive, + fantasy, + monospace, + @"system-ui", + emoji, + math, + fangsong, + @"ui-serif", + @"ui-sans-serif", + @"ui-monospace", + @"ui-rounded", + + // CSS wide keywords. These must be parsed as identifiers so they + // don't get serialized as strings. + // https://www.w3.org/TR/css-values-4/#common-keywords + initial, + inherit, + unset, + // Default is also reserved by the type. + // https://www.w3.org/TR/css-values-4/#custom-idents + default, + + // CSS defaulting keywords + // https://drafts.csswg.org/css-cascade-5/#defaulting-keywords + revert, + @"revert-layer", + + pub usingnamespace css.DefineEnumProperty(@This()); +}; + +/// A value for the [font-style](https://www.w3.org/TR/css-fonts-4/#font-style-prop) property. +pub const FontStyle = union(enum) { + /// Normal font style. + normal, + /// Italic font style. + italic, + /// Oblique font style, with a custom angle. + oblique: Angle, + + pub fn default() FontStyle { + return .normal; + } + + pub fn parse(input: *css.Parser) css.Result(FontStyle) { + const location = input.currentSourceLocation(); + const ident = switch (input.expectIdent()) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + // todo_stuff.match_ignore_ascii_case + if (bun.strings.eqlCaseInsensitiveASCIIICheckLength("normal", ident)) { + return .{ .result = .normal }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength("italic", ident)) { + return .{ .result = .italic }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength("oblique", ident)) { + const angle = input.tryParse(Angle.parse, .{}).unwrapOr(FontStyle.defaultObliqueAngle()); + return .{ .result = .{ .oblique = angle } }; + } else { + // + return .{ .err = location.newUnexpectedTokenError(.{ .ident = ident }) }; + } + } + + pub fn toCss(this: *const FontStyle, comptime W: type, dest: *Printer(W)) PrintErr!void { + switch (this.*) { + .normal => try dest.writeStr("normal"), + .italic => try dest.writeStr("italic"), + .oblique => |angle| { + try dest.writeStr("oblique"); + if (angle.eql(&FontStyle.defaultObliqueAngle())) { + try dest.writeChar(' '); + try angle.toCss(W, dest); + } + }, + } + } + + pub fn defaultObliqueAngle() Angle { + return Angle{ .deg = 14.0 }; + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } +}; + +/// A value for the [font-variant-caps](https://www.w3.org/TR/css-fonts-4/#font-variant-caps-prop) property. +pub const FontVariantCaps = enum { + /// No special capitalization features are applied. + normal, + /// The small capitals feature is used for lower case letters. + @"small-caps", + /// Small capitals are used for both upper and lower case letters. + @"all-small-caps", + /// Petite capitals are used. + @"petite-caps", + /// Petite capitals are used for both upper and lower case letters. + @"all-petite-caps", + /// Enables display of mixture of small capitals for uppercase letters with normal lowercase letters. + unicase, + /// Uses titling capitals. + @"titling-caps", + + pub usingnamespace css.DefineEnumProperty(@This()); + + pub fn default() FontVariantCaps { + return .normal; + } + + fn isCss2(this: *const FontVariantCaps) bool { + return switch (this.*) { + .normal, .@"small-caps" => true, + else => false, + }; + } + + pub fn parseCss2(input: *css.Parser) css.Result(FontVariantCaps) { + const value = switch (FontVariantCaps.parse(input)) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + if (!value.isCss2()) { + return .{ .err = input.newCustomError(css.ParserError.invalid_value) }; + } + return .{ .result = value }; + } +}; + +/// A value for the [line-height](https://www.w3.org/TR/2020/WD-css-inline-3-20200827/#propdef-line-height) property. +pub const LineHeight = union(enum) { + /// The UA sets the line height based on the font. + normal, + /// A multiple of the element's font size. + number: CSSNumber, + /// An explicit height. + length: LengthPercentage, + + pub usingnamespace @call(.auto, css.DeriveParse, .{@This()}); + pub usingnamespace @call(.auto, css.DeriveToCss, .{@This()}); + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn default() LineHeight { + return .normal; + } +}; + +/// A value for the [font](https://www.w3.org/TR/css-fonts-4/#font-prop) shorthand property. +pub const Font = struct { + /// The font family. + family: bun.BabyList(FontFamily), + /// The font size. + size: FontSize, + /// The font style. + style: FontStyle, + /// The font weight. + weight: FontWeight, + /// The font stretch. + stretch: FontStretch, + /// The line height. + line_height: LineHeight, + /// How the text should be capitalized. Only CSS 2.1 values are supported. + variant_caps: FontVariantCaps, + + pub usingnamespace css.DefineShorthand(@This(), css.PropertyIdTag.font); + + pub const PropertyFieldMap = .{ + .family = css.PropertyIdTag.@"font-family", + .size = css.PropertyIdTag.@"font-size", + .style = css.PropertyIdTag.@"font-style", + .weight = css.PropertyIdTag.@"font-weight", + .stretch = css.PropertyIdTag.@"font-stretch", + .line_height = css.PropertyIdTag.@"line-height", + .variant_caps = css.PropertyIdTag.@"font-variant-caps", + }; + + pub fn parse(input: *css.Parser) css.Result(Font) { + var style: ?FontStyle = null; + var weight: ?FontWeight = null; + var stretch: ?FontStretch = null; + var size: ?FontSize = null; + var variant_caps: ?FontVariantCaps = null; + var count: i32 = 0; + + while (true) { + // Skip "normal" since it is valid for several properties, but we don't know which ones it will be used for yet. + if (input.tryParse(css.Parser.expectIdentMatching, .{"normal"}).isOk()) { + count += 1; + continue; + } + + if (style == null) { + if (input.tryParse(FontStyle.parse, .{}).asValue()) |value| { + style = value; + count += 1; + continue; + } + } + + if (weight == null) { + if (input.tryParse(FontWeight.parse, .{}).asValue()) |value| { + weight = value; + count += 1; + continue; + } + } + + if (variant_caps != null) { + if (input.tryParse(FontVariantCaps.parseCss2, .{}).asValue()) |value| { + variant_caps = value; + count += 1; + continue; + } + } + + if (stretch == null) { + if (input.tryParse(FontStretchKeyword.parse, .{}).asValue()) |value| { + stretch = .{ .keyword = value }; + count += 1; + continue; + } + } + + size = switch (@call(.auto, @field(FontSize, "parse"), .{input})) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + break; + } + + if (count > 4) return .{ .err = input.newCustomError(css.ParserError.invalid_declaration) }; + + const final_size = size orelse return .{ .err = input.newCustomError(css.ParserError.invalid_declaration) }; + + const line_height = if (input.tryParse(css.Parser.expectDelim, .{'/'}).isOk()) switch (LineHeight.parse(input)) { + .result => |v| v, + .err => |e| return .{ .err = e }, + } else null; + + const family = switch (bun.BabyList(FontFamily).parse(input)) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + + return .{ .result = Font{ + .family = family, + .size = final_size, + .style = style orelse FontStyle.default(), + .weight = weight orelse FontWeight.default(), + .stretch = stretch orelse FontStretch.default(), + .line_height = line_height orelse LineHeight.default(), + .variant_caps = variant_caps orelse FontVariantCaps.default(), + } }; + } + + pub fn toCss(this: *const Font, comptime W: type, dest: *Printer(W)) PrintErr!void { + if (!this.style.eql(&FontStyle.default())) { + try this.style.toCss(W, dest); + try dest.writeChar(' '); + } + + if (!this.variant_caps.eql(&FontVariantCaps.default())) { + try this.variant_caps.toCss(W, dest); + try dest.writeChar(' '); + } + + if (!this.weight.eql(&FontWeight.default())) { + try this.weight.toCss(W, dest); + try dest.writeChar(' '); + } + + if (!this.stretch.eql(&FontStretch.default())) { + try this.stretch.toCss(W, dest); + try dest.writeChar(' '); + } + + try this.size.toCss(W, dest); + + if (!this.line_height.eql(&LineHeight.default())) { + try dest.delim('/', true); + try this.line_height.toCss(W, dest); + } + + try dest.writeChar(' '); + + const len = this.family.len; + for (this.family.sliceConst(), 0..) |*val, idx| { + try val.toCss(W, dest); + if (idx < len - 1) { + try dest.delim(',', false); + } + } + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } +}; + +/// A value for the [vertical align](https://drafts.csswg.org/css2/#propdef-vertical-align) property. +// TODO: there is a more extensive spec in CSS3 but it doesn't seem any browser implements it? https://www.w3.org/TR/css-inline-3/#transverse-alignment +pub const VerticalAlign = union(enum) { + /// A vertical align keyword. + keyword: VerticalAlignKeyword, + /// An explicit length. + length: LengthPercentage, +}; + +/// A keyword for the [vertical align](https://drafts.csswg.org/css2/#propdef-vertical-align) property. +pub const VerticalAlignKeyword = enum { + /// Align the baseline of the box with the baseline of the parent box. + baseline, + /// Lower the baseline of the box to the proper position for subscripts of the parent’s box. + sub, + /// Raise the baseline of the box to the proper position for superscripts of the parent’s box. + super, + /// Align the top of the aligned subtree with the top of the line box. + top, + /// Align the top of the box with the top of the parent’s content area. + @"text-top", + /// Align the vertical midpoint of the box with the baseline of the parent box plus half the x-height of the parent. + middle, + /// Align the bottom of the aligned subtree with the bottom of the line box. + bottom, + /// Align the bottom of the box with the bottom of the parent’s content area. + @"text-bottom", + + pub usingnamespace css.DefineEnumProperty(@This()); +}; diff --git a/src/css/properties/generate_properties.ts b/src/css/properties/generate_properties.ts new file mode 100644 index 0000000000..e7f79c1e2e --- /dev/null +++ b/src/css/properties/generate_properties.ts @@ -0,0 +1,1993 @@ +type VendorPrefixes = "none" | "webkit" | "moz" | "ms" | "o"; + +type LogicalGroup = + | "border_color" + | "border_style" + | "border_width" + | "border_radius" + | "margin" + | "scroll_margin" + | "padding" + | "scroll_padding" + | "inset" + | "size" + | "min_size" + | "max_size"; + +type PropertyCategory = "logical" | "physical"; + +type PropertyDef = { + ty: string; + shorthand?: boolean; + valid_prefixes?: VendorPrefixes[]; + logical_group?: { + ty: LogicalGroup; + category: PropertyCategory; + }; + /// By default true + unprefixed?: boolean; + conditional?: { + css_modules: boolean; + }; + eval_branch_quota?: number; +}; + +const OUTPUT_FILE = "src/css/properties/properties_generated.zig"; + +async function generateCode(property_defs: Record) { + const EMIT_COMPLETED_MD_FILE = true; + if (EMIT_COMPLETED_MD_FILE) { + const completed = Object.entries(property_defs) + .map(([name, meta]) => `- [x] \`${name}\``) + .join("\n"); + await Bun.$`echo ${completed} > completed.md`; + } + await Bun.$`echo ${prelude()} > ${OUTPUT_FILE}`; + await Bun.$`echo ${generateProperty(property_defs)} >> ${OUTPUT_FILE}`; + await Bun.$`echo ${generatePropertyId(property_defs)} >> ${OUTPUT_FILE}`; + await Bun.$`echo ${generatePropertyIdTag(property_defs)} >> ${OUTPUT_FILE}`; + await Bun.$`vendor/zig/zig.exe fmt ${OUTPUT_FILE}`; +} + +function generatePropertyIdTag(property_defs: Record): string { + return `pub const PropertyIdTag = enum(u16) { + ${Object.keys(property_defs) + .map(key => `${escapeIdent(key)},`) + .join("\n")} + all, + unparsed, + custom, + + /// Helper function used in comptime code to know whether to access the underlying value + /// with tuple indexing syntax because it may have a VendorPrefix associated with it. + pub fn hasVendorPrefix(this: PropertyIdTag) bool { + return switch (this) { + ${Object.entries(property_defs) + .map(([name, meta]) => `.${escapeIdent(name)} => ${meta.valid_prefixes === undefined ? "false" : "true"},`) + .join("\n")} + .unparsed => false, + .custom => false, + .all => false, + }; + } + + /// Helper function used in comptime code to know whether to access the underlying value + /// with tuple indexing syntax because it may have a VendorPrefix associated with it. + pub fn valueType(this: PropertyIdTag) type { + return switch (this) { + ${Object.entries(property_defs) + .map(([name, meta]) => `.${escapeIdent(name)} => ${meta.ty},`) + .join("\n")} + .all => CSSWideKeyword, + .unparsed => UnparsedProperty, + .custom => CustomProperty, + }; + } +};`; +} + +function generateProperty(property_defs: Record): string { + return `pub const Property = union(PropertyIdTag) { +${Object.entries(property_defs) + .map(([name, meta]) => generatePropertyField(name, meta)) + .join("\n")} + all: CSSWideKeyword, + unparsed: UnparsedProperty, + custom: CustomProperty, + + ${generatePropertyImpl(property_defs)} +};`; +} + +function generatePropertyImpl(property_defs: Record): string { + const required_functions = ["deepClone", "parse", "toCss", "eql"]; + + return ` + pub usingnamespace PropertyImpl(); + + // Sanity check to make sure all types have the following functions: + // - deepClone() + // - eql() + // - parse() + // - toCss() + // + // We do this string concatenation thing so we get all the errors at once, + // instead of relying on Zig semantic analysis which usualy stops at the first error. + comptime { + const compile_error: []const u8 = compile_error: { + var compile_error: []const u8 = ""; + ${Object.entries(property_defs) + .map(([name, meta]) => { + if (meta.ty != "void" && meta.ty != "CSSNumber" && meta.ty != "CSSInteger") { + return required_functions + .map( + fn => ` + if (!@hasDecl(${meta.ty}, "${fn}")) { + compile_error = compile_error ++ @typeName(${meta.ty}) ++ ": does not have a ${fn}() function.\\n"; + } + `, + ) + .join("\n"); + } + return ""; + }) + .join("\n")} + const final_compile_error = compile_error; + break :compile_error final_compile_error; + }; + if (compile_error.len > 0) { + @compileError(compile_error); + } + } + + /// Parses a CSS property by name. + pub fn parse(property_id: PropertyId, input: *css.Parser, options: *const css.ParserOptions) Result(Property) { + const state = input.state(); + + switch (property_id) { + ${generatePropertyImplParseCases(property_defs)} + .all => return .{ .result = .{ .all = switch (CSSWideKeyword.parse(input)) { + .result => |v| v, + .err => |e| return .{ .err = e }, + } } }, + .custom => |name| return .{ .result = .{ .custom = switch (CustomProperty.parse(name, input, options)) { + .result => |v| v, + .err => |e| return .{ .err = e }, + } } }, + else => {}, + } + + // If a value was unable to be parsed, treat as an unparsed property. + // This is different from a custom property, handled below, in that the property name is known + // and stored as an enum rather than a string. This lets property handlers more easily deal with it. + // Ideally we'd only do this if var() or env() references were seen, but err on the safe side for now. + input.reset(&state); + return .{ .result = .{ .unparsed = switch (UnparsedProperty.parse(property_id, input, options)) { + .result => |v| v, + .err => |e| return .{ .err = e }, + } } }; + } + + pub fn propertyId(this: *const Property) PropertyId { + return switch (this.*) { + ${Object.entries(property_defs) + .map(([name, meta]) => { + if (meta.valid_prefixes !== undefined) { + return `.${escapeIdent(name)} => |*v| PropertyId{ .${escapeIdent(name)} = v[1] },`; + } + return `.${escapeIdent(name)} => .${escapeIdent(name)},`; + }) + .join("\n")} + .all => PropertyId.all, + .unparsed => |unparsed| unparsed.property_id, + .custom => |c| .{ .custom = c.name }, + }; + } + + pub fn deepClone(this: *const Property, allocator: std.mem.Allocator) Property { + return switch (this.*) { + ${Object.entries(property_defs) + .map(([name, meta]) => { + if (meta.valid_prefixes !== undefined) { + const clone_expr = + meta.ty === "CSSNumber" || meta.ty === "CSSInteger" ? "v[0]" : "v[0].deepClone(allocator)"; + return `.${escapeIdent(name)} => |*v| .{ .${escapeIdent(name)} = .{ ${clone_expr}, v[1] } },`; + } + const clone_expr = + meta.ty === "CSSNumber" || meta.ty === "CSSInteger" + ? "v.*" + : meta.ty.includes("BabyList(") + ? `css.generic.deepClone(${meta.ty}, v, allocator)` + : "v.deepClone(allocator)"; + return `.${escapeIdent(name)} => |*v| .{ .${escapeIdent(name)} = ${clone_expr} },`; + }) + .join("\n")} + .all => |*a| return .{ .all = a.deepClone(allocator) }, + .unparsed => |*u| return .{ .unparsed = u.deepClone(allocator) }, + .custom => |*c| return .{ .custom = c.deepClone(allocator) }, + }; + } + + /// We're going to have this empty for now since not every property has a deinit function. + /// It's not strictly necessary since all allocations are into an arena. + /// It's mostly intended as a performance optimization in the case where mimalloc arena is used, + /// since it can reclaim the memory and use it for subsequent allocations. + /// I haven't benchmarked that though, so I don't actually know how much faster it would actually make it. + pub fn deinit(this: *@This(), allocator: std.mem.Allocator) void { + _ = this; + _ = allocator; + } + + pub inline fn __toCssHelper(this: *const Property) struct{[]const u8, VendorPrefix} { + return switch (this.*) { + ${generatePropertyImplToCssHelper(property_defs)} + .all => .{ "all", VendorPrefix{ .none = true } }, + .unparsed => |*unparsed| brk: { + var prefix = unparsed.property_id.prefix(); + if (prefix.isEmpty()) { + prefix = VendorPrefix{ .none = true }; + } + break :brk .{ unparsed.property_id.name(), prefix }; + }, + .custom => unreachable, + }; + } + + /// Serializes the value of a CSS property without its name or \`!important\` flag. + pub fn valueToCss(this: *const Property, comptime W: type, dest: *css.Printer(W)) PrintErr!void { + return switch(this.*) { + ${Object.entries(property_defs) + .map(([name, meta]) => { + const value = meta.valid_prefixes === undefined ? "value" : "value[0]"; + const to_css = + meta.ty === "CSSNumber" + ? `CSSNumberFns.toCss(&${value}, W, dest)` + : meta.ty === "CSSInteger" + ? `CSSIntegerFns.toCss(&${value}, W, dest)` + : meta.ty.includes("ArrayList") + ? `css.generic.toCss(${meta.ty}, ${value}, W, dest)` + : `${value}.toCss(W, dest)`; + return `.${escapeIdent(name)} => |*value| ${to_css},`; + }) + .join("\n")} + .all => |*keyword| keyword.toCss(W, dest), + .unparsed => |*unparsed| unparsed.value.toCss(W, dest, false), + .custom => |*c| c.value.toCss(W, dest, c.name == .custom), + }; + } + + /// Returns the given longhand property for a shorthand. + pub fn longhand(this: *const Property, property_id: *const PropertyId) ?Property { + switch (this.*) { + ${Object.entries(property_defs) + .filter(([_, meta]) => meta.shorthand) + .map(([name, meta]) => { + if (meta.valid_prefixes !== undefined) { + return `.${escapeIdent(name)} => |*v| { + if (!v[1].eq(property_id.prefix())) return null; + return v[0].longhand(property_id); + },`; + } + + return `.${escapeIdent(name)} => |*v| return v.longhand(property_id),`; + }) + .join("\n")} + else => {}, + } + return null; + } + + pub fn eql(lhs: *const Property, rhs: *const Property) bool { + if (@intFromEnum(lhs.*) != @intFromEnum(rhs.*)) return false; + return switch (lhs.*) { + ${Object.entries(property_defs) + .map(([name, meta]) => { + if (meta.valid_prefixes !== undefined) + return `.${escapeIdent(name)} => |*v| css.generic.eql(${meta.ty}, &v[0], &v[0]) and v[1].eq(rhs.${escapeIdent(name)}[1]),`; + return `.${escapeIdent(name)} => |*v| css.generic.eql(${meta.ty}, v, &rhs.${escapeIdent(name)}),`; + }) + .join("\n")} + .all, .unparsed => true, + .custom => |*c| c.eql(&rhs.custom), + }; + } +`; +} + +function generatePropertyImplToCssHelper(property_defs: Record): string { + return Object.entries(property_defs) + .map(([name, meta]) => { + const capture = meta.valid_prefixes === undefined ? "" : "|*x|"; + const prefix = meta.valid_prefixes === undefined ? "VendorPrefix{ .none = true }" : 'x.@"1"'; + return `.${escapeIdent(name)} => ${capture} .{"${name}", ${prefix}},`; + }) + .join("\n"); +} + +function generatePropertyImplParseCases(property_defs: Record): string { + return Object.entries(property_defs) + .map(([name, meta]) => { + const capture = meta.valid_prefixes === undefined ? "" : "|pre|"; + const ret = + meta.valid_prefixes === undefined + ? `.{ .${escapeIdent(name)} = c }` + : `.{ .${escapeIdent(name)} = .{ c, pre } }`; + return `.${escapeIdent(name)} => ${capture} { + ${meta.eval_branch_quota !== undefined ? `@setEvalBranchQuota(${meta.eval_branch_quota});` : ""} + if (css.generic.parseWithOptions(${meta.ty}, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = ${ret} }; + } + } +},`; + }) + .join("\n"); +} + +function generatePropertyField(name: string, meta: PropertyDef): string { + if (meta.valid_prefixes !== undefined) { + return ` ${escapeIdent(name)}: struct{ ${meta.ty}, VendorPrefix },`; + } + return ` ${escapeIdent(name)}: ${meta.ty},`; +} + +function generatePropertyId(property_defs: Record): string { + return `pub const PropertyId = union(PropertyIdTag) { +${Object.entries(property_defs) + .map(([name, meta]) => generatePropertyIdField(name, meta)) + .join("\n")} + all, + unparsed, + custom: CustomPropertyName, + +pub usingnamespace PropertyIdImpl(); + +${generatePropertyIdImpl(property_defs)} +};`; +} + +function generatePropertyIdField(name: string, meta: PropertyDef): string { + if (meta.valid_prefixes !== undefined) { + return ` ${escapeIdent(name)}: VendorPrefix,`; + } + return ` ${escapeIdent(name)},`; +} + +function generatePropertyIdImpl(property_defs: Record): string { + return ` + /// Returns the property name, without any vendor prefixes. + pub inline fn name(this: *const PropertyId) []const u8 { + return @tagName(this.*); + } + + /// Returns the vendor prefix for this property id. + pub fn prefix(this: *const PropertyId) VendorPrefix { + return switch (this.*) { + ${generatePropertyIdImplPrefix(property_defs)} + .all, .custom, .unparsed => VendorPrefix.empty(), + }; + } + + pub fn fromNameAndPrefix(name1: []const u8, pre: VendorPrefix) ?PropertyId { + const Enum = enum { ${Object.entries(property_defs) + .map( + ([prop_name, def], i) => `${escapeIdent(prop_name)}${i === Object.keys(property_defs).length - 1 ? "" : ", "}`, + ) + .join("")} }; + const Map = comptime bun.ComptimeEnumMap(Enum); + if (Map.getASCIIICaseInsensitive(name1)) |prop| { + switch (prop) { + ${Object.entries(property_defs).map(([name, meta]) => { + return `.${escapeIdent(name)} => { + const allowed_prefixes = ${constructVendorPrefix(meta.valid_prefixes)}; + if (allowed_prefixes.contains(pre)) return ${meta.valid_prefixes === undefined ? `.${escapeIdent(name)}` : `.{ .${escapeIdent(name)} = pre }`}; + }`; + })} + } + } + + return null; + } + + pub fn withPrefix(this: *const PropertyId, pre: VendorPrefix) PropertyId { + return switch (this.*) { + ${Object.entries(property_defs) + .map(([prop_name, def]) => { + if (def.valid_prefixes === undefined) return `.${escapeIdent(prop_name)} => .${escapeIdent(prop_name)},`; + return `.${escapeIdent(prop_name)} => .{ .${escapeIdent(prop_name)} = pre },`; + }) + .join("\n")} + else => this.*, + }; + } + + pub fn addPrefix(this: *PropertyId, pre: VendorPrefix) void { + return switch (this.*) { + ${Object.entries(property_defs) + .map(([prop_name, def]) => { + if (def.valid_prefixes === undefined) return `.${escapeIdent(prop_name)} => {},`; + return `.${escapeIdent(prop_name)} => |*p| { p.insert(pre); },`; + }) + .join("\n")} + else => {}, + }; + } + + pub inline fn deepClone(this: *const PropertyId, _: std.mem.Allocator) PropertyId { + return this.*; + } + + pub fn eql(lhs: *const PropertyId, rhs: *const PropertyId) bool { + if (@intFromEnum(lhs.*) != @intFromEnum(rhs.*)) return false; + inline for (bun.meta.EnumFields(PropertyId), std.meta.fields(PropertyId)) |enum_field, union_field| { + if (enum_field.value == @intFromEnum(lhs.*)) { + if (comptime union_field.type == css.VendorPrefix) { + return @field(lhs, union_field.name).eql(@field(rhs, union_field.name)); + } else { + return true; + } + } + } + unreachable; + } + + pub fn hash(this: *const PropertyId, hasher: *std.hash.Wyhash) void { + const tag = @intFromEnum(this.*); + hasher.update(std.mem.asBytes(&tag)); + } +`; +} + +function generatePropertyIdImplPrefix(property_defs: Record): string { + return Object.entries(property_defs) + .map(([name, meta]) => { + if (meta.valid_prefixes === undefined) return `.${escapeIdent(name)} => VendorPrefix.empty(),`; + return `.${escapeIdent(name)} => |p| p,`; + }) + .join("\n"); +} + +// TODO: todo_stuff.match_ignore_ascii_case +function generatePropertyIdImplFromNameAndPrefix(property_defs: Record): string { + return Object.entries(property_defs) + .map(([name, meta]) => { + if (name === "unparsed") return ""; + return `if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name1, "${name}")) { + const allowed_prefixes = ${constructVendorPrefix(meta.valid_prefixes)}; + if (allowed_prefixes.contains(pre)) return ${meta.valid_prefixes === undefined ? `.${escapeIdent(name)}` : `.{ .${escapeIdent(name)} = pre }`}; +} else `; + }) + .join("\n"); +} + +function constructVendorPrefix(prefixes: VendorPrefixes[] | undefined): string { + if (prefixes === undefined) return `VendorPrefix{ .none = true }`; + return `VendorPrefix{ ${[`.none = true`, ...prefixes.map(prefix => `.${prefix} = true`)].join(", ")} }`; +} + +function needsEscaping(name: string): boolean { + switch (name) { + case "align": + return true; + case "var": + default: { + return ["-", "(", ")", " ", ":", ";", ","].some(c => name.includes(c)); + } + } +} + +function escapeIdent(name: string): string { + if (needsEscaping(name)) { + return `@"${name}"`; + } + return name; +} + +generateCode({ + "background-color": { + ty: "CssColor", + }, + "background-image": { + ty: "SmallList(Image, 1)", + }, + "background-position-x": { + ty: "SmallList(css_values.position.HorizontalPosition, 1)", + }, + "background-position-y": { + ty: "SmallList(css_values.position.VerticalPosition, 1)", + }, + "background-position": { + ty: "SmallList(background.BackgroundPosition, 1)", + shorthand: true, + }, + "background-size": { + ty: "SmallList(background.BackgroundSize, 1)", + }, + "background-repeat": { + ty: "SmallList(background.BackgroundRepeat, 1)", + }, + "background-attachment": { + ty: "SmallList(background.BackgroundAttachment, 1)", + }, + "background-clip": { + ty: "SmallList(background.BackgroundClip, 1)", + valid_prefixes: ["webkit", "moz"], + }, + "background-origin": { + ty: "SmallList(background.BackgroundOrigin, 1)", + }, + background: { + ty: "SmallList(background.Background, 1)", + }, + "box-shadow": { + ty: "SmallList(box_shadow.BoxShadow, 1)", + valid_prefixes: ["webkit", "moz"], + }, + opacity: { + ty: "css.css_values.alpha.AlphaValue", + }, + color: { + ty: "CssColor", + }, + display: { + ty: "display.Display", + }, + visibility: { + ty: "display.Visibility", + }, + width: { + ty: "size.Size", + logical_group: { ty: "size", category: "physical" }, + }, + height: { + ty: "size.Size", + logical_group: { ty: "size", category: "physical" }, + }, + "min-width": { + ty: "size.Size", + logical_group: { ty: "min_size", category: "physical" }, + }, + "min-height": { + ty: "size.Size", + logical_group: { ty: "min_size", category: "physical" }, + }, + "max-width": { + ty: "size.MaxSize", + logical_group: { ty: "max_size", category: "physical" }, + }, + "max-height": { + ty: "size.MaxSize", + logical_group: { ty: "max_size", category: "physical" }, + }, + "block-size": { + ty: "size.Size", + logical_group: { ty: "size", category: "logical" }, + }, + "inline-size": { + ty: "size.Size", + logical_group: { ty: "size", category: "logical" }, + }, + "min-block-size": { + ty: "size.Size", + logical_group: { ty: "min_size", category: "logical" }, + }, + "min-inline-size": { + ty: "size.Size", + logical_group: { ty: "min_size", category: "logical" }, + }, + "max-block-size": { + ty: "size.MaxSize", + logical_group: { ty: "max_size", category: "logical" }, + }, + "max-inline-size": { + ty: "size.MaxSize", + logical_group: { ty: "max_size", category: "logical" }, + }, + "box-sizing": { + ty: "size.BoxSizing", + valid_prefixes: ["webkit", "moz"], + }, + "aspect-ratio": { + ty: "size.AspectRatio", + }, + overflow: { + ty: "overflow.Overflow", + shorthand: true, + }, + "overflow-x": { + ty: "overflow.OverflowKeyword", + }, + "overflow-y": { + ty: "overflow.OverflowKeyword", + }, + "text-overflow": { + ty: "overflow.TextOverflow", + valid_prefixes: ["o"], + }, + position: { + ty: "position.Position", + }, + top: { + ty: "LengthPercentageOrAuto", + logical_group: { ty: "inset", category: "physical" }, + }, + bottom: { + ty: "LengthPercentageOrAuto", + logical_group: { ty: "inset", category: "physical" }, + }, + left: { + ty: "LengthPercentageOrAuto", + logical_group: { ty: "inset", category: "physical" }, + }, + right: { + ty: "LengthPercentageOrAuto", + logical_group: { ty: "inset", category: "physical" }, + }, + "inset-block-start": { + ty: "LengthPercentageOrAuto", + logical_group: { ty: "inset", category: "logical" }, + }, + "inset-block-end": { + ty: "LengthPercentageOrAuto", + logical_group: { ty: "inset", category: "logical" }, + }, + "inset-inline-start": { + ty: "LengthPercentageOrAuto", + logical_group: { ty: "inset", category: "logical" }, + }, + "inset-inline-end": { + ty: "LengthPercentageOrAuto", + logical_group: { ty: "inset", category: "logical" }, + }, + "inset-block": { + ty: "margin_padding.InsetBlock", + shorthand: true, + }, + "inset-inline": { + ty: "margin_padding.InsetInline", + shorthand: true, + }, + inset: { + ty: "margin_padding.Inset", + shorthand: true, + }, + "border-spacing": { + ty: "css.css_values.size.Size2D(Length)", + }, + "border-top-color": { + ty: "CssColor", + logical_group: { ty: "border_color", category: "physical" }, + }, + "border-bottom-color": { + ty: "CssColor", + logical_group: { ty: "border_color", category: "physical" }, + }, + "border-left-color": { + ty: "CssColor", + logical_group: { ty: "border_color", category: "physical" }, + }, + "border-right-color": { + ty: "CssColor", + logical_group: { ty: "border_color", category: "physical" }, + }, + "border-block-start-color": { + ty: "CssColor", + logical_group: { ty: "border_color", category: "logical" }, + }, + "border-block-end-color": { + ty: "CssColor", + logical_group: { ty: "border_color", category: "logical" }, + }, + "border-inline-start-color": { + ty: "CssColor", + logical_group: { ty: "border_color", category: "logical" }, + }, + "border-inline-end-color": { + ty: "CssColor", + logical_group: { ty: "border_color", category: "logical" }, + }, + "border-top-style": { + ty: "border.LineStyle", + logical_group: { ty: "border_style", category: "physical" }, + }, + "border-bottom-style": { + ty: "border.LineStyle", + logical_group: { ty: "border_style", category: "physical" }, + }, + "border-left-style": { + ty: "border.LineStyle", + logical_group: { ty: "border_style", category: "physical" }, + }, + "border-right-style": { + ty: "border.LineStyle", + logical_group: { ty: "border_style", category: "physical" }, + }, + "border-block-start-style": { + ty: "border.LineStyle", + logical_group: { ty: "border_style", category: "logical" }, + }, + "border-block-end-style": { + ty: "border.LineStyle", + logical_group: { ty: "border_style", category: "logical" }, + }, + "border-inline-start-style": { + ty: "border.LineStyle", + logical_group: { ty: "border_style", category: "logical" }, + }, + "border-inline-end-style": { + ty: "border.LineStyle", + logical_group: { ty: "border_style", category: "logical" }, + }, + "border-top-width": { + ty: "BorderSideWidth", + logical_group: { ty: "border_width", category: "physical" }, + }, + "border-bottom-width": { + ty: "BorderSideWidth", + logical_group: { ty: "border_width", category: "physical" }, + }, + "border-left-width": { + ty: "BorderSideWidth", + logical_group: { ty: "border_width", category: "physical" }, + }, + "border-right-width": { + ty: "BorderSideWidth", + logical_group: { ty: "border_width", category: "physical" }, + }, + "border-block-start-width": { + ty: "BorderSideWidth", + logical_group: { ty: "border_width", category: "logical" }, + }, + "border-block-end-width": { + ty: "BorderSideWidth", + logical_group: { ty: "border_width", category: "logical" }, + }, + "border-inline-start-width": { + ty: "BorderSideWidth", + logical_group: { ty: "border_width", category: "logical" }, + }, + "border-inline-end-width": { + ty: "BorderSideWidth", + logical_group: { ty: "border_width", category: "logical" }, + }, + "border-top-left-radius": { + ty: "Size2D(LengthPercentage)", + valid_prefixes: ["webkit", "moz"], + logical_group: { ty: "border_radius", category: "physical" }, + }, + "border-top-right-radius": { + ty: "Size2D(LengthPercentage)", + valid_prefixes: ["webkit", "moz"], + logical_group: { ty: "border_radius", category: "physical" }, + }, + "border-bottom-left-radius": { + ty: "Size2D(LengthPercentage)", + valid_prefixes: ["webkit", "moz"], + logical_group: { ty: "border_radius", category: "physical" }, + }, + "border-bottom-right-radius": { + ty: "Size2D(LengthPercentage)", + valid_prefixes: ["webkit", "moz"], + logical_group: { ty: "border_radius", category: "physical" }, + }, + "border-start-start-radius": { + ty: "Size2D(LengthPercentage)", + logical_group: { ty: "border_radius", category: "logical" }, + }, + "border-start-end-radius": { + ty: "Size2D(LengthPercentage)", + logical_group: { ty: "border_radius", category: "logical" }, + }, + "border-end-start-radius": { + ty: "Size2D(LengthPercentage)", + logical_group: { ty: "border_radius", category: "logical" }, + }, + "border-end-end-radius": { + ty: "Size2D(LengthPercentage)", + logical_group: { ty: "border_radius", category: "logical" }, + }, + "border-radius": { + ty: "BorderRadius", + valid_prefixes: ["webkit", "moz"], + shorthand: true, + }, + "border-image-source": { + ty: "Image", + }, + "border-image-outset": { + ty: "Rect(LengthOrNumber)", + }, + "border-image-repeat": { + ty: "BorderImageRepeat", + }, + "border-image-width": { + ty: "Rect(BorderImageSideWidth)", + }, + "border-image-slice": { + ty: "BorderImageSlice", + }, + "border-image": { + ty: "BorderImage", + valid_prefixes: ["webkit", "moz", "o"], + shorthand: true, + }, + "border-color": { + ty: "BorderColor", + shorthand: true, + }, + "border-style": { + ty: "BorderStyle", + shorthand: true, + }, + "border-width": { + ty: "BorderWidth", + shorthand: true, + }, + "border-block-color": { + ty: "BorderBlockColor", + shorthand: true, + }, + "border-block-style": { + ty: "BorderBlockStyle", + shorthand: true, + }, + "border-block-width": { + ty: "BorderBlockWidth", + shorthand: true, + }, + "border-inline-color": { + ty: "BorderInlineColor", + shorthand: true, + }, + "border-inline-style": { + ty: "BorderInlineStyle", + shorthand: true, + }, + "border-inline-width": { + ty: "BorderInlineWidth", + shorthand: true, + }, + border: { + ty: "Border", + shorthand: true, + }, + "border-top": { + ty: "BorderTop", + shorthand: true, + }, + "border-bottom": { + ty: "BorderBottom", + shorthand: true, + }, + "border-left": { + ty: "BorderLeft", + shorthand: true, + }, + "border-right": { + ty: "BorderRight", + shorthand: true, + }, + "border-block": { + ty: "BorderBlock", + shorthand: true, + }, + "border-block-start": { + ty: "BorderBlockStart", + shorthand: true, + }, + "border-block-end": { + ty: "BorderBlockEnd", + shorthand: true, + }, + "border-inline": { + ty: "BorderInline", + shorthand: true, + }, + "border-inline-start": { + ty: "BorderInlineStart", + shorthand: true, + }, + "border-inline-end": { + ty: "BorderInlineEnd", + shorthand: true, + }, + outline: { + ty: "Outline", + shorthand: true, + }, + "outline-color": { + ty: "CssColor", + }, + "outline-style": { + ty: "OutlineStyle", + }, + "outline-width": { + ty: "BorderSideWidth", + }, + "flex-direction": { + ty: "FlexDirection", + valid_prefixes: ["webkit", "ms"], + }, + "flex-wrap": { + ty: "FlexWrap", + valid_prefixes: ["webkit", "ms"], + }, + "flex-flow": { + ty: "FlexFlow", + valid_prefixes: ["webkit", "ms"], + shorthand: true, + }, + "flex-grow": { + ty: "CSSNumber", + valid_prefixes: ["webkit"], + }, + "flex-shrink": { + ty: "CSSNumber", + valid_prefixes: ["webkit"], + }, + "flex-basis": { + ty: "LengthPercentageOrAuto", + valid_prefixes: ["webkit"], + }, + flex: { + ty: "Flex", + valid_prefixes: ["webkit", "ms"], + shorthand: true, + }, + order: { + ty: "CSSInteger", + valid_prefixes: ["webkit"], + }, + "align-content": { + ty: "AlignContent", + valid_prefixes: ["webkit"], + }, + "justify-content": { + ty: "JustifyContent", + valid_prefixes: ["webkit"], + }, + "place-content": { + ty: "PlaceContent", + shorthand: true, + }, + "align-self": { + ty: "AlignSelf", + valid_prefixes: ["webkit"], + }, + "justify-self": { + ty: "JustifySelf", + }, + "place-self": { + ty: "PlaceSelf", + shorthand: true, + }, + "align-items": { + ty: "AlignItems", + valid_prefixes: ["webkit"], + }, + "justify-items": { + ty: "JustifyItems", + }, + "place-items": { + ty: "PlaceItems", + shorthand: true, + }, + "row-gap": { + ty: "GapValue", + }, + "column-gap": { + ty: "GapValue", + }, + gap: { + ty: "Gap", + shorthand: true, + }, + "box-orient": { + ty: "BoxOrient", + valid_prefixes: ["webkit", "moz"], + unprefixed: false, + }, + "box-direction": { + ty: "BoxDirection", + valid_prefixes: ["webkit", "moz"], + unprefixed: false, + }, + "box-ordinal-group": { + ty: "CSSInteger", + valid_prefixes: ["webkit", "moz"], + unprefixed: false, + }, + "box-align": { + ty: "BoxAlign", + valid_prefixes: ["webkit", "moz"], + unprefixed: false, + }, + "box-flex": { + ty: "CSSNumber", + valid_prefixes: ["webkit", "moz"], + unprefixed: false, + }, + "box-flex-group": { + ty: "CSSInteger", + valid_prefixes: ["webkit"], + unprefixed: false, + }, + "box-pack": { + ty: "BoxPack", + valid_prefixes: ["webkit", "moz"], + unprefixed: false, + }, + "box-lines": { + ty: "BoxLines", + valid_prefixes: ["webkit", "moz"], + unprefixed: false, + }, + "flex-pack": { + ty: "FlexPack", + valid_prefixes: ["ms"], + unprefixed: false, + }, + "flex-order": { + ty: "CSSInteger", + valid_prefixes: ["ms"], + unprefixed: false, + }, + "flex-align": { + ty: "BoxAlign", + valid_prefixes: ["ms"], + unprefixed: false, + }, + "flex-item-align": { + ty: "FlexItemAlign", + valid_prefixes: ["ms"], + unprefixed: false, + }, + "flex-line-pack": { + ty: "FlexLinePack", + valid_prefixes: ["ms"], + unprefixed: false, + }, + "flex-positive": { + ty: "CSSNumber", + valid_prefixes: ["ms"], + unprefixed: false, + }, + "flex-negative": { + ty: "CSSNumber", + valid_prefixes: ["ms"], + unprefixed: false, + }, + "flex-preferred-size": { + ty: "LengthPercentageOrAuto", + valid_prefixes: ["ms"], + unprefixed: false, + }, + "margin-top": { + ty: "LengthPercentageOrAuto", + logical_group: { ty: "margin", category: "physical" }, + }, + "margin-bottom": { + ty: "LengthPercentageOrAuto", + logical_group: { ty: "margin", category: "physical" }, + }, + "margin-left": { + ty: "LengthPercentageOrAuto", + logical_group: { ty: "margin", category: "physical" }, + }, + "margin-right": { + ty: "LengthPercentageOrAuto", + logical_group: { ty: "margin", category: "physical" }, + }, + "margin-block-start": { + ty: "LengthPercentageOrAuto", + logical_group: { ty: "margin", category: "logical" }, + eval_branch_quota: 5000, + }, + "margin-block-end": { + ty: "LengthPercentageOrAuto", + logical_group: { ty: "margin", category: "logical" }, + }, + "margin-inline-start": { + ty: "LengthPercentageOrAuto", + logical_group: { ty: "margin", category: "logical" }, + }, + "margin-inline-end": { + ty: "LengthPercentageOrAuto", + logical_group: { ty: "margin", category: "logical" }, + }, + "margin-block": { + ty: "MarginBlock", + shorthand: true, + }, + "margin-inline": { + ty: "MarginInline", + shorthand: true, + }, + margin: { + ty: "Margin", + shorthand: true, + eval_branch_quota: 5000, + }, + "padding-top": { + ty: "LengthPercentageOrAuto", + logical_group: { ty: "padding", category: "physical" }, + }, + "padding-bottom": { + ty: "LengthPercentageOrAuto", + logical_group: { ty: "padding", category: "physical" }, + }, + "padding-left": { + ty: "LengthPercentageOrAuto", + logical_group: { ty: "padding", category: "physical" }, + }, + "padding-right": { + ty: "LengthPercentageOrAuto", + logical_group: { ty: "padding", category: "physical" }, + }, + "padding-block-start": { + ty: "LengthPercentageOrAuto", + logical_group: { ty: "padding", category: "logical" }, + }, + "padding-block-end": { + ty: "LengthPercentageOrAuto", + logical_group: { ty: "padding", category: "logical" }, + }, + "padding-inline-start": { + ty: "LengthPercentageOrAuto", + logical_group: { ty: "padding", category: "logical" }, + }, + "padding-inline-end": { + ty: "LengthPercentageOrAuto", + logical_group: { ty: "padding", category: "logical" }, + }, + "padding-block": { + ty: "PaddingBlock", + shorthand: true, + }, + "padding-inline": { + ty: "PaddingInline", + shorthand: true, + }, + padding: { + ty: "Padding", + shorthand: true, + }, + "scroll-margin-top": { + ty: "LengthPercentageOrAuto", + logical_group: { ty: "scroll_margin", category: "physical" }, + }, + "scroll-margin-bottom": { + ty: "LengthPercentageOrAuto", + logical_group: { ty: "scroll_margin", category: "physical" }, + }, + "scroll-margin-left": { + ty: "LengthPercentageOrAuto", + logical_group: { ty: "scroll_margin", category: "physical" }, + }, + "scroll-margin-right": { + ty: "LengthPercentageOrAuto", + logical_group: { ty: "scroll_margin", category: "physical" }, + }, + "scroll-margin-block-start": { + ty: "LengthPercentageOrAuto", + logical_group: { ty: "scroll_margin", category: "logical" }, + }, + "scroll-margin-block-end": { + ty: "LengthPercentageOrAuto", + logical_group: { ty: "scroll_margin", category: "logical" }, + }, + "scroll-margin-inline-start": { + ty: "LengthPercentageOrAuto", + logical_group: { ty: "scroll_margin", category: "logical" }, + }, + "scroll-margin-inline-end": { + ty: "LengthPercentageOrAuto", + logical_group: { ty: "scroll_margin", category: "logical" }, + }, + "scroll-margin-block": { + ty: "ScrollMarginBlock", + shorthand: true, + }, + "scroll-margin-inline": { + ty: "ScrollMarginInline", + shorthand: true, + }, + "scroll-margin": { + ty: "ScrollMargin", + shorthand: true, + }, + "scroll-padding-top": { + ty: "LengthPercentageOrAuto", + logical_group: { ty: "scroll_padding", category: "physical" }, + }, + "scroll-padding-bottom": { + ty: "LengthPercentageOrAuto", + logical_group: { ty: "scroll_padding", category: "physical" }, + }, + "scroll-padding-left": { + ty: "LengthPercentageOrAuto", + logical_group: { ty: "scroll_padding", category: "physical" }, + }, + "scroll-padding-right": { + ty: "LengthPercentageOrAuto", + logical_group: { ty: "scroll_padding", category: "physical" }, + }, + "scroll-padding-block-start": { + ty: "LengthPercentageOrAuto", + logical_group: { ty: "scroll_padding", category: "logical" }, + }, + "scroll-padding-block-end": { + ty: "LengthPercentageOrAuto", + logical_group: { ty: "scroll_padding", category: "logical" }, + }, + "scroll-padding-inline-start": { + ty: "LengthPercentageOrAuto", + logical_group: { ty: "scroll_padding", category: "logical" }, + }, + "scroll-padding-inline-end": { + ty: "LengthPercentageOrAuto", + logical_group: { ty: "scroll_padding", category: "logical" }, + }, + "scroll-padding-block": { + ty: "ScrollPaddingBlock", + shorthand: true, + }, + "scroll-padding-inline": { + ty: "ScrollPaddingInline", + shorthand: true, + }, + "scroll-padding": { + ty: "ScrollPadding", + shorthand: true, + }, + "font-weight": { + ty: "FontWeight", + }, + "font-size": { + ty: "FontSize", + }, + "font-stretch": { + ty: "FontStretch", + }, + "font-family": { + ty: "BabyList(FontFamily)", + }, + "font-style": { + ty: "FontStyle", + }, + "font-variant-caps": { + ty: "FontVariantCaps", + }, + "line-height": { + ty: "LineHeight", + }, + font: { + ty: "Font", + shorthand: true, + }, + // "vertical-align": { + // ty: "VerticalAlign", + // }, + // "font-palette": { + // ty: "DashedIdentReference", + // }, + // "transition-property": { + // ty: "SmallList(PropertyId, 1)", + // valid_prefixes: ["webkit", "moz", "ms"], + // }, + // "transition-duration": { + // ty: "SmallList(Time, 1)", + // valid_prefixes: ["webkit", "moz", "ms"], + // }, + // "transition-delay": { + // ty: "SmallList(Time, 1)", + // valid_prefixes: ["webkit", "moz", "ms"], + // }, + // "transition-timing-function": { + // ty: "SmallList(EasingFunction, 1)", + // valid_prefixes: ["webkit", "moz", "ms"], + // }, + // transition: { + // ty: "SmallList(Transition, 1)", + // valid_prefixes: ["webkit", "moz", "ms"], + // shorthand: true, + // }, + // "animation-name": { + // ty: "AnimationNameList", + // valid_prefixes: ["webkit", "moz", "o"], + // }, + // "animation-duration": { + // ty: "SmallList(Time, 1)", + // valid_prefixes: ["webkit", "moz", "o"], + // }, + // "animation-timing-function": { + // ty: "SmallList(EasingFunction, 1)", + // valid_prefixes: ["webkit", "moz", "o"], + // }, + // "animation-iteration-count": { + // ty: "SmallList(AnimationIterationCount, 1)", + // valid_prefixes: ["webkit", "moz", "o"], + // }, + // "animation-direction": { + // ty: "SmallList(AnimationDirection, 1)", + // valid_prefixes: ["webkit", "moz", "o"], + // }, + // "animation-play-state": { + // ty: "SmallList(AnimationPlayState, 1)", + // valid_prefixes: ["webkit", "moz", "o"], + // }, + // "animation-delay": { + // ty: "SmallList(Time, 1)", + // valid_prefixes: ["webkit", "moz", "o"], + // }, + // "animation-fill-mode": { + // ty: "SmallList(AnimationFillMode, 1)", + // valid_prefixes: ["webkit", "moz", "o"], + // }, + // "animation-composition": { + // ty: "SmallList(AnimationComposition, 1)", + // }, + // "animation-timeline": { + // ty: "SmallList(AnimationTimeline, 1)", + // }, + // "animation-range-start": { + // ty: "SmallList(AnimationRangeStart, 1)", + // }, + // "animation-range-end": { + // ty: "SmallList(AnimationRangeEnd, 1)", + // }, + // "animation-range": { + // ty: "SmallList(AnimationRange, 1)", + // }, + // animation: { + // ty: "AnimationList", + // valid_prefixes: ["webkit", "moz", "o"], + // shorthand: true, + // }, + // transform: { + // ty: "TransformList", + // valid_prefixes: ["webkit", "moz", "ms", "o"], + // }, + // "transform-origin": { + // ty: "Position", + // valid_prefixes: ["webkit", "moz", "ms", "o"], + // }, + // "transform-style": { + // ty: "TransformStyle", + // valid_prefixes: ["webkit", "moz"], + // }, + // "transform-box": { + // ty: "TransformBox", + // }, + // "backface-visibility": { + // ty: "BackfaceVisibility", + // valid_prefixes: ["webkit", "moz"], + // }, + // perspective: { + // ty: "Perspective", + // valid_prefixes: ["webkit", "moz"], + // }, + // "perspective-origin": { + // ty: "Position", + // valid_prefixes: ["webkit", "moz"], + // }, + // translate: { + // ty: "Translate", + // }, + // rotate: { + // ty: "Rotate", + // }, + // scale: { + // ty: "Scale", + // }, + // "text-transform": { + // ty: "TextTransform", + // }, + // "white-space": { + // ty: "WhiteSpace", + // }, + // "tab-size": { + // ty: "LengthOrNumber", + // valid_prefixes: ["moz", "o"], + // }, + // "word-break": { + // ty: "WordBreak", + // }, + // "line-break": { + // ty: "LineBreak", + // }, + // hyphens: { + // ty: "Hyphens", + // valid_prefixes: ["webkit", "moz", "ms"], + // }, + // "overflow-wrap": { + // ty: "OverflowWrap", + // }, + // "word-wrap": { + // ty: "OverflowWrap", + // }, + // "text-align": { + // ty: "TextAlign", + // }, + // "text-align-last": { + // ty: "TextAlignLast", + // valid_prefixes: ["moz"], + // }, + // "text-justify": { + // ty: "TextJustify", + // }, + // "word-spacing": { + // ty: "Spacing", + // }, + // "letter-spacing": { + // ty: "Spacing", + // }, + // "text-indent": { + // ty: "TextIndent", + // }, + // "text-decoration-line": { + // ty: "TextDecorationLine", + // valid_prefixes: ["webkit", "moz"], + // }, + // "text-decoration-style": { + // ty: "TextDecorationStyle", + // valid_prefixes: ["webkit", "moz"], + // }, + "text-decoration-color": { + ty: "CssColor", + valid_prefixes: ["webkit", "moz"], + }, + // "text-decoration-thickness": { + // ty: "TextDecorationThickness", + // }, + // "text-decoration": { + // ty: "TextDecoration", + // valid_prefixes: ["webkit", "moz"], + // shorthand: true, + // }, + // "text-decoration-skip-ink": { + // ty: "TextDecorationSkipInk", + // valid_prefixes: ["webkit"], + // }, + // "text-emphasis-style": { + // ty: "TextEmphasisStyle", + // valid_prefixes: ["webkit"], + // }, + "text-emphasis-color": { + ty: "CssColor", + valid_prefixes: ["webkit"], + }, + // "text-emphasis": { + // ty: "TextEmphasis", + // valid_prefixes: ["webkit"], + // shorthand: true, + // }, + // "text-emphasis-position": { + // ty: "TextEmphasisPosition", + // valid_prefixes: ["webkit"], + // }, + "text-shadow": { + ty: "SmallList(TextShadow, 1)", + }, + // "text-size-adjust": { + // ty: "TextSizeAdjust", + // valid_prefixes: ["webkit", "moz", "ms"], + // }, + direction: { + ty: "Direction", + }, + // "unicode-bidi": { + // ty: "UnicodeBidi", + // }, + // "box-decoration-break": { + // ty: "BoxDecorationBreak", + // valid_prefixes: ["webkit"], + // }, + // resize: { + // ty: "Resize", + // }, + // cursor: { + // ty: "Cursor", + // }, + // TODO: Hello future Zack, if you uncomment this, remember to uncomment the corresponding value in FallbackHandler in prefix_handler.zig :) + // "caret-color": { + // ty: "ColorOrAuto", + // }, + // "caret-shape": { + // ty: "CaretShape", + // }, + // TODO: Hello future Zack, if you uncomment this, remember to uncomment the corresponding value in FallbackHandler in prefix_handler.zig :) + // caret: { + // ty: "Caret", + // shorthand: true, + // }, + // "user-select": { + // ty: "UserSelect", + // valid_prefixes: ["webkit", "moz", "ms"], + // }, + // "accent-color": { + // ty: "ColorOrAuto", + // }, + // appearance: { + // ty: "Appearance", + // valid_prefixes: ["webkit", "moz", "ms"], + // }, + // "list-style-type": { + // ty: "ListStyleType", + // }, + // "list-style-image": { + // ty: "Image", + // }, + // "list-style-position": { + // ty: "ListStylePosition", + // }, + // "list-style": { + // ty: "ListStyle", + // shorthand: true, + // }, + // "marker-side": { + // ty: "MarkerSide", + // }, + composes: { + ty: "Composes", + conditional: { css_modules: true }, + }, + // TODO: Hello future Zack, if you uncomment this, remember to uncomment the corresponding value in FallbackHandler in prefix_handler.zig :) + // fill: { + // ty: "SVGPaint", + // }, + // "fill-rule": { + // ty: "FillRule", + // }, + // "fill-opacity": { + // ty: "AlphaValue", + // }, + // TODO: Hello future Zack, if you uncomment this, remember to uncomment the corresponding value in FallbackHandler in prefix_handler.zig :) + // stroke: { + // ty: "SVGPaint", + // }, + // "stroke-opacity": { + // ty: "AlphaValue", + // }, + // "stroke-width": { + // ty: "LengthPercentage", + // }, + // "stroke-linecap": { + // ty: "StrokeLinecap", + // }, + // "stroke-linejoin": { + // ty: "StrokeLinejoin", + // }, + // "stroke-miterlimit": { + // ty: "CSSNumber", + // }, + // "stroke-dasharray": { + // ty: "StrokeDasharray", + // }, + // "stroke-dashoffset": { + // ty: "LengthPercentage", + // }, + // "marker-start": { + // ty: "Marker", + // }, + // "marker-mid": { + // ty: "Marker", + // }, + // "marker-end": { + // ty: "Marker", + // }, + // marker: { + // ty: "Marker", + // }, + // "color-interpolation": { + // ty: "ColorInterpolation", + // }, + // "color-interpolation-filters": { + // ty: "ColorInterpolation", + // }, + // "color-rendering": { + // ty: "ColorRendering", + // }, + // "shape-rendering": { + // ty: "ShapeRendering", + // }, + // "text-rendering": { + // ty: "TextRendering", + // }, + // "image-rendering": { + // ty: "ImageRendering", + // }, + // "clip-path": { + // ty: "ClipPath", + // valid_prefixes: ["webkit"], + // }, + // "clip-rule": { + // ty: "FillRule", + // }, + "mask-image": { + ty: "SmallList(Image, 1)", + valid_prefixes: ["webkit"], + }, + "mask-mode": { + ty: "SmallList(MaskMode, 1)", + }, + "mask-repeat": { + ty: "SmallList(BackgroundRepeat, 1)", + valid_prefixes: ["webkit"], + }, + "mask-position-x": { + ty: "SmallList(HorizontalPosition, 1)", + }, + "mask-position-y": { + ty: "SmallList(VerticalPosition, 1)", + }, + "mask-position": { + ty: "SmallList(Position, 1)", + valid_prefixes: ["webkit"], + }, + "mask-clip": { + ty: "SmallList(MaskClip, 1)", + valid_prefixes: ["webkit"], + eval_branch_quota: 5000, + }, + "mask-origin": { + ty: "SmallList(GeometryBox, 1)", + valid_prefixes: ["webkit"], + }, + "mask-size": { + ty: "SmallList(BackgroundSize, 1)", + valid_prefixes: ["webkit"], + }, + "mask-composite": { + ty: "SmallList(MaskComposite, 1)", + }, + "mask-type": { + ty: "MaskType", + }, + mask: { + ty: "SmallList(Mask, 1)", + valid_prefixes: ["webkit"], + shorthand: true, + }, + "mask-border-source": { + ty: "Image", + }, + "mask-border-mode": { + ty: "MaskBorderMode", + }, + "mask-border-slice": { + ty: "BorderImageSlice", + }, + "mask-border-width": { + ty: "Rect(BorderImageSideWidth)", + }, + "mask-border-outset": { + ty: "Rect(LengthOrNumber)", + }, + "mask-border-repeat": { + ty: "BorderImageRepeat", + }, + "mask-border": { + ty: "MaskBorder", + shorthand: true, + }, + "-webkit-mask-composite": { + ty: "SmallList(WebKitMaskComposite, 1)", + }, + "mask-source-type": { + ty: "SmallList(WebKitMaskSourceType, 1)", + valid_prefixes: ["webkit"], + unprefixed: false, + }, + "mask-box-image": { + ty: "BorderImage", + valid_prefixes: ["webkit"], + unprefixed: false, + }, + "mask-box-image-source": { + ty: "Image", + valid_prefixes: ["webkit"], + unprefixed: false, + }, + "mask-box-image-slice": { + ty: "BorderImageSlice", + valid_prefixes: ["webkit"], + unprefixed: false, + }, + "mask-box-image-width": { + ty: "Rect(BorderImageSideWidth)", + valid_prefixes: ["webkit"], + unprefixed: false, + }, + "mask-box-image-outset": { + ty: "Rect(LengthOrNumber)", + valid_prefixes: ["webkit"], + unprefixed: false, + }, + "mask-box-image-repeat": { + ty: "BorderImageRepeat", + valid_prefixes: ["webkit"], + unprefixed: false, + }, + // TODO: Hello future Zack, if you uncomment this, remember to uncomment the corresponding value in FallbackHandler in prefix_handler.zig :) + // filter: { + // ty: "FilterList", + // valid_prefixes: ["webkit"], + // }, + // TODO: Hello future Zack, if you uncomment this, remember to uncomment the corresponding value in FallbackHandler in prefix_handler.zig :) + // "backdrop-filter": { + // ty: "FilterList", + // valid_prefixes: ["webkit"], + // }, + // "z-index": { + // ty: "position.ZIndex", + // }, + // "container-type": { + // ty: "ContainerType", + // }, + // "container-name": { + // ty: "ContainerNameList", + // }, + // container: { + // ty: "Container", + // shorthand: true, + // }, + // "view-transition-name": { + // ty: "CustomIdent", + // }, + // "color-scheme": { + // ty: "ColorScheme", + // }, +}); + +function prelude() { + return /* zig */ `const std = @import("std"); +const bun = @import("root").bun; +const Allocator = std.mem.Allocator; + +pub const css = @import("../css_parser.zig"); + +const Printer = css.Printer; +const PrintErr = css.PrintErr; +const VendorPrefix = css.VendorPrefix; + + +const PropertyImpl = @import("./properties_impl.zig").PropertyImpl; +const PropertyIdImpl = @import("./properties_impl.zig").PropertyIdImpl; + +const CSSWideKeyword = css.css_properties.CSSWideKeyword; +const UnparsedProperty = css.css_properties.custom.UnparsedProperty; +const CustomProperty = css.css_properties.custom.CustomProperty; + +const css_values = css.css_values; +const CssColor = css.css_values.color.CssColor; +const Image = css.css_values.image.Image; +const Length = css.css_values.length.Length; +const LengthValue = css.css_values.length.LengthValue; +const LengthPercentage = css_values.length.LengthPercentage; +const LengthPercentageOrAuto = css_values.length.LengthPercentageOrAuto; +const PropertyCategory = css.PropertyCategory; +const LogicalGroup = css.LogicalGroup; +const CSSNumber = css.css_values.number.CSSNumber; +const CSSNumberFns = css.css_values.number.CSSNumberFns; +const CSSInteger = css.css_values.number.CSSInteger; +const CSSIntegerFns = css.css_values.number.CSSIntegerFns; +const NumberOrPercentage = css.css_values.percentage.NumberOrPercentage; +const Percentage = css.css_values.percentage.Percentage; +const Angle = css.css_values.angle.Angle; +const DashedIdentReference = css.css_values.ident.DashedIdentReference; +const Time = css.css_values.time.Time; +const EasingFunction = css.css_values.easing.EasingFunction; +const CustomIdent = css.css_values.ident.CustomIdent; +const CSSString = css.css_values.string.CSSString; +const DashedIdent = css.css_values.ident.DashedIdent; +const Url = css.css_values.url.Url; +const CustomIdentList = css.css_values.ident.CustomIdentList; +const Location = css.Location; +const HorizontalPosition = css.css_values.position.HorizontalPosition; +const VerticalPosition = css.css_values.position.VerticalPosition; +const ContainerName = css.css_rules.container.ContainerName; + +pub const font = css.css_properties.font; +const border = css.css_properties.border; +const border_radius = css.css_properties.border_radius; +const border_image = css.css_properties.border_image; +const outline = css.css_properties.outline; +const flex = css.css_properties.flex; +const @"align" = css.css_properties.@"align"; +const margin_padding = css.css_properties.margin_padding; +const transition = css.css_properties.transition; +const animation = css.css_properties.animation; +const transform = css.css_properties.transform; +const text = css.css_properties.text; +const ui = css.css_properties.ui; +const list = css.css_properties.list; +const css_modules = css.css_properties.css_modules; +const svg = css.css_properties.svg; +const shape = css.css_properties.shape; +const masking = css.css_properties.masking; +const background = css.css_properties.background; +const effects = css.css_properties.effects; +const contain = css.css_properties.contain; +const custom = css.css_properties.custom; +const position = css.css_properties.position; +const box_shadow = css.css_properties.box_shadow; +const size = css.css_properties.size; +const overflow = css.css_properties.overflow; + +const BorderSideWidth = border.BorderSideWidth; +const Size2D = css_values.size.Size2D; +const BorderRadius = border_radius.BorderRadius; +const Rect = css_values.rect.Rect; +const LengthOrNumber = css_values.length.LengthOrNumber; +const BorderImageRepeat = border_image.BorderImageRepeat; +const BorderImageSideWidth = border_image.BorderImageSideWidth; +const BorderImageSlice = border_image.BorderImageSlice; +const BorderImage = border_image.BorderImage; +const BorderColor = border.BorderColor; +const BorderStyle = border.BorderStyle; +const BorderWidth = border.BorderWidth; +const BorderBlockColor = border.BorderBlockColor; +const BorderBlockStyle = border.BorderBlockStyle; +const BorderBlockWidth = border.BorderBlockWidth; +const BorderInlineColor = border.BorderInlineColor; +const BorderInlineStyle = border.BorderInlineStyle; +const BorderInlineWidth = border.BorderInlineWidth; +const Border = border.Border; +const BorderTop = border.BorderTop; +const BorderRight = border.BorderRight; +const BorderLeft = border.BorderLeft; +const BorderBottom = border.BorderBottom; +const BorderBlockStart = border.BorderBlockStart; +const BorderBlockEnd = border.BorderBlockEnd; +const BorderInlineStart = border.BorderInlineStart; +const BorderInlineEnd = border.BorderInlineEnd; +const BorderBlock = border.BorderBlock; +const BorderInline = border.BorderInline; +const Outline = outline.Outline; +const OutlineStyle = outline.OutlineStyle; +const FlexDirection = flex.FlexDirection; +const FlexWrap = flex.FlexWrap; +const FlexFlow = flex.FlexFlow; +const Flex = flex.Flex; +const BoxOrient = flex.BoxOrient; +const BoxDirection = flex.BoxDirection; +const BoxAlign = flex.BoxAlign; +const BoxPack = flex.BoxPack; +const BoxLines = flex.BoxLines; +const FlexPack = flex.FlexPack; +const FlexItemAlign = flex.FlexItemAlign; +const FlexLinePack = flex.FlexLinePack; +const AlignContent = @"align".AlignContent; +const JustifyContent = @"align".JustifyContent; +const PlaceContent = @"align".PlaceContent; +const AlignSelf = @"align".AlignSelf; +const JustifySelf = @"align".JustifySelf; +const PlaceSelf = @"align".PlaceSelf; +const AlignItems = @"align".AlignItems; +const JustifyItems = @"align".JustifyItems; +const PlaceItems = @"align".PlaceItems; +const GapValue = @"align".GapValue; +const Gap = @"align".Gap; +const MarginBlock = margin_padding.MarginBlock; +const Margin = margin_padding.Margin; +const MarginInline = margin_padding.MarginInline; +const PaddingBlock = margin_padding.PaddingBlock; +const PaddingInline = margin_padding.PaddingInline; +const Padding = margin_padding.Padding; +const ScrollMarginBlock = margin_padding.ScrollMarginBlock; +const ScrollMarginInline = margin_padding.ScrollMarginInline; +const ScrollMargin = margin_padding.ScrollMargin; +const ScrollPaddingBlock = margin_padding.ScrollPaddingBlock; +const ScrollPaddingInline = margin_padding.ScrollPaddingInline; +const ScrollPadding = margin_padding.ScrollPadding; +const FontWeight = font.FontWeight; +const FontSize = font.FontSize; +const FontStretch = font.FontStretch; +const FontFamily = font.FontFamily; +const FontStyle = font.FontStyle; +const FontVariantCaps = font.FontVariantCaps; +const LineHeight = font.LineHeight; +const Font = font.Font; +// const VerticalAlign = font.VerticalAlign; +// const Transition = transition.Transition; +// const AnimationNameList = animation.AnimationNameList; +// const AnimationList = animation.AnimationList; +// const AnimationIterationCount = animation.AnimationIterationCount; +// const AnimationDirection = animation.AnimationDirection; +// const AnimationPlayState = animation.AnimationPlayState; +// const AnimationFillMode = animation.AnimationFillMode; +// const AnimationComposition = animation.AnimationComposition; +// const AnimationTimeline = animation.AnimationTimeline; +// const AnimationRangeStart = animation.AnimationRangeStart; +// const AnimationRangeEnd = animation.AnimationRangeEnd; +// const AnimationRange = animation.AnimationRange; +// const TransformList = transform.TransformList; +// const TransformStyle = transform.TransformStyle; +// const TransformBox = transform.TransformBox; +// const BackfaceVisibility = transform.BackfaceVisibility; +// const Perspective = transform.Perspective; +// const Translate = transform.Translate; +// const Rotate = transform.Rotate; +// const Scale = transform.Scale; +// const TextTransform = text.TextTransform; +// const WhiteSpace = text.WhiteSpace; +// const WordBreak = text.WordBreak; +// const LineBreak = text.LineBreak; +// const Hyphens = text.Hyphens; +// const OverflowWrap = text.OverflowWrap; +// const TextAlign = text.TextAlign; +// const TextIndent = text.TextIndent; +// const Spacing = text.Spacing; +// const TextJustify = text.TextJustify; +// const TextAlignLast = text.TextAlignLast; +// const TextDecorationLine = text.TextDecorationLine; +// const TextDecorationStyle = text.TextDecorationStyle; +// const TextDecorationThickness = text.TextDecorationThickness; +// const TextDecoration = text.TextDecoration; +// const TextDecorationSkipInk = text.TextDecorationSkipInk; +// const TextEmphasisStyle = text.TextEmphasisStyle; +// const TextEmphasis = text.TextEmphasis; +// const TextEmphasisPositionVertical = text.TextEmphasisPositionVertical; +// const TextEmphasisPositionHorizontal = text.TextEmphasisPositionHorizontal; +// const TextEmphasisPosition = text.TextEmphasisPosition; +const TextShadow = text.TextShadow; +// const TextSizeAdjust = text.TextSizeAdjust; +const Direction = text.Direction; +// const UnicodeBidi = text.UnicodeBidi; +// const BoxDecorationBreak = text.BoxDecorationBreak; +// const Resize = ui.Resize; +// const Cursor = ui.Cursor; +// const ColorOrAuto = ui.ColorOrAuto; +// const CaretShape = ui.CaretShape; +// const Caret = ui.Caret; +// const UserSelect = ui.UserSelect; +// const Appearance = ui.Appearance; +// const ColorScheme = ui.ColorScheme; +// const ListStyleType = list.ListStyleType; +// const ListStylePosition = list.ListStylePosition; +// const ListStyle = list.ListStyle; +// const MarkerSide = list.MarkerSide; +const Composes = css_modules.Composes; +// const SVGPaint = svg.SVGPaint; +// const FillRule = shape.FillRule; +// const AlphaValue = shape.AlphaValue; +// const StrokeLinecap = svg.StrokeLinecap; +// const StrokeLinejoin = svg.StrokeLinejoin; +// const StrokeDasharray = svg.StrokeDasharray; +// const Marker = svg.Marker; +// const ColorInterpolation = svg.ColorInterpolation; +// const ColorRendering = svg.ColorRendering; +// const ShapeRendering = svg.ShapeRendering; +// const TextRendering = svg.TextRendering; +// const ImageRendering = svg.ImageRendering; +const ClipPath = masking.ClipPath; +const MaskMode = masking.MaskMode; +const MaskClip = masking.MaskClip; +const GeometryBox = masking.GeometryBox; +const MaskComposite = masking.MaskComposite; +const MaskType = masking.MaskType; +const Mask = masking.Mask; +const MaskBorderMode = masking.MaskBorderMode; +const MaskBorder = masking.MaskBorder; +const WebKitMaskComposite = masking.WebKitMaskComposite; +const WebKitMaskSourceType = masking.WebKitMaskSourceType; +const BackgroundRepeat = background.BackgroundRepeat; +const BackgroundSize = background.BackgroundSize; +// const FilterList = effects.FilterList; +// const ContainerType = contain.ContainerType; +// const Container = contain.Container; +// const ContainerNameList = contain.ContainerNameList; +const CustomPropertyName = custom.CustomPropertyName; +const display = css.css_properties.display; + +const Position = position.Position; + +const Result = css.Result; + +const BabyList = bun.BabyList; +const ArrayList = std.ArrayListUnmanaged; +const SmallList = css.SmallList; + +`; +} diff --git a/src/css/properties/list.zig b/src/css/properties/list.zig new file mode 100644 index 0000000000..9c6e488a4f --- /dev/null +++ b/src/css/properties/list.zig @@ -0,0 +1,86 @@ +const std = @import("std"); +const bun = @import("root").bun; +const Allocator = std.mem.Allocator; +const ArrayList = std.ArrayListUnmanaged; + +pub const css = @import("../css_parser.zig"); + +const SmallList = css.SmallList; +const Printer = css.Printer; +const PrintErr = css.PrintErr; +const Error = css.Error; + +const ContainerName = css.css_rules.container.ContainerName; + +const LengthPercentage = css.css_values.length.LengthPercentage; +const CustomIdent = css.css_values.ident.CustomIdent; +const CSSString = css.css_values.string.CSSString; +const CSSNumber = css.css_values.number.CSSNumber; +const LengthPercentageOrAuto = css.css_values.length.LengthPercentageOrAuto; +const Size2D = css.css_values.size.Size2D; +const DashedIdent = css.css_values.ident.DashedIdent; +const Image = css.css_values.image.Image; +const CssColor = css.css_values.color.CssColor; +const Ratio = css.css_values.ratio.Ratio; +const Length = css.css_values.length.LengthValue; +const Rect = css.css_values.rect.Rect; +const NumberOrPercentage = css.css_values.percentage.NumberOrPercentage; +const CustomIdentList = css.css_values.ident.CustomIdentList; +const Angle = css.css_values.angle.Angle; +const Url = css.css_values.url.Url; + +/// A value for the [list-style-type](https://www.w3.org/TR/2020/WD-css-lists-3-20201117/#text-markers) property. +pub const ListStyleType = union(enum) { + /// No marker. + none, + /// An explicit marker string. + string: CSSString, + /// A named counter style. + counter_style: CounterStyle, +}; + +/// A [counter-style](https://www.w3.org/TR/css-counter-styles-3/#typedef-counter-style) name. +pub const CounterStyle = union(enum) { + /// A predefined counter style name. + predefined: PredefinedCounterStyle, + /// A custom counter style name. + name: CustomIdent, + /// An inline `symbols()` definition. + symbols: Symbols, + + const Symbols = struct { + /// The counter system. + system: SymbolsType, + /// The symbols. + symbols: ArrayList(Symbol), + }; +}; + +/// A single [symbol](https://www.w3.org/TR/css-counter-styles-3/#funcdef-symbols) as used in the +/// `symbols()` function. +/// +/// See [CounterStyle](CounterStyle). +const Symbol = union(enum) { + /// A string. + string: CSSString, + /// An image. + image: Image, +}; + +/// A [predefined counter](https://www.w3.org/TR/css-counter-styles-3/#predefined-counters) style. +pub const PredefinedCounterStyle = @compileError(css.todo_stuff.depth); + +/// A [``](https://www.w3.org/TR/css-counter-styles-3/#typedef-symbols-type) value, +/// as used in the `symbols()` function. +/// +/// See [CounterStyle](CounterStyle). +pub const SymbolsType = css.DefineEnumProperty(@compileError(css.todo_stuff.depth)); + +/// A value for the [list-style-position](https://www.w3.org/TR/2020/WD-css-lists-3-20201117/#list-style-position-property) property. +pub const ListStylePosition = css.DefineEnumProperty(@compileError(css.todo_stuff.depth)); + +/// A value for the [list-style](https://www.w3.org/TR/2020/WD-css-lists-3-20201117/#list-style-property) shorthand property. +pub const ListStyle = @compileError(css.todo_stuff.depth); + +/// A value for the [marker-side](https://www.w3.org/TR/2020/WD-css-lists-3-20201117/#marker-side) property. +pub const MarkerSide = css.DefineEnumProperty(@compileError(css.todo_stuff.depth)); diff --git a/src/css/properties/margin_padding.zig b/src/css/properties/margin_padding.zig new file mode 100644 index 0000000000..523a674d17 --- /dev/null +++ b/src/css/properties/margin_padding.zig @@ -0,0 +1,846 @@ +const std = @import("std"); +const bun = @import("root").bun; +const Allocator = std.mem.Allocator; +const ArrayList = std.ArrayListUnmanaged; + +pub const css = @import("../css_parser.zig"); +const Property = css.Property; +const PropertyId = css.PropertyId; +const PropertyIdTag = css.PropertyIdTag; +const PropertyCategory = css.logical.PropertyCategory; + +const SmallList = css.SmallList; +const Printer = css.Printer; +const PrintErr = css.PrintErr; +const Error = css.Error; + +const ContainerName = css.css_rules.container.ContainerName; + +const LengthPercentage = css.css_values.length.LengthPercentage; +const CustomIdent = css.css_values.ident.CustomIdent; +const CSSString = css.css_values.string.CSSString; +const CSSNumber = css.css_values.number.CSSNumber; +const LengthPercentageOrAuto = css.css_values.length.LengthPercentageOrAuto; +const Size2D = css.css_values.size.Size2D; +const DashedIdent = css.css_values.ident.DashedIdent; +const Image = css.css_values.image.Image; +const CssColor = css.css_values.color.CssColor; +const Ratio = css.css_values.ratio.Ratio; +const Length = css.css_values.length.LengthValue; +const Rect = css.css_values.rect.Rect; +const NumberOrPercentage = css.css_values.percentage.NumberOrPercentage; +const CustomIdentList = css.css_values.ident.CustomIdentList; +const Angle = css.css_values.angle.Angle; +const Url = css.css_values.url.Url; + +/// A value for the [inset](https://drafts.csswg.org/css-logical/#propdef-inset) shorthand property. +pub const Inset = struct { + top: LengthPercentageOrAuto, + right: LengthPercentageOrAuto, + bottom: LengthPercentageOrAuto, + left: LengthPercentageOrAuto, + + // TODO: bring this back + // pub usingnamespace css.DefineShorthand(@This(), css.PropertyIdTag.inset); + pub usingnamespace css.DefineRectShorthand(@This(), LengthPercentageOrAuto); + + pub const PropertyFieldMap = .{ + .top = css.PropertyIdTag.top, + .right = css.PropertyIdTag.right, + .bottom = css.PropertyIdTag.bottom, + .left = css.PropertyIdTag.left, + }; + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } +}; + +/// A value for the [inset-block](https://drafts.csswg.org/css-logical/#propdef-inset-block) shorthand property. +pub const InsetBlock = struct { + /// The block start value. + block_start: LengthPercentageOrAuto, + /// The block end value. + block_end: LengthPercentageOrAuto, + + // TODO: bring this back + // pub usingnamespace css.DefineShorthand(@This(), css.PropertyIdTag.@"inset-block"); + pub usingnamespace css.DefineSizeShorthand(@This(), LengthPercentageOrAuto); + + pub const PropertyFieldMap = .{ + .block_start = css.PropertyIdTag.@"inset-block-start", + .block_end = css.PropertyIdTag.@"inset-block-end", + }; + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } +}; + +/// A value for the [inset-inline](https://drafts.csswg.org/css-logical/#propdef-inset-inline) shorthand property. +pub const InsetInline = struct { + /// The inline start value. + inline_start: LengthPercentageOrAuto, + /// The inline end value. + inline_end: LengthPercentageOrAuto, + + pub const PropertyFieldMap = .{ + .inline_start = css.PropertyIdTag.@"inset-inline-start", + .inline_end = css.PropertyIdTag.@"inset-inline-end", + }; + + // TODO: bring this back + // pub usingnamespace css.DefineShorthand(@This(), css.PropertyIdTag.@"inset-inline"); + pub usingnamespace css.DefineSizeShorthand(@This(), LengthPercentageOrAuto); + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } +}; + +/// A value for the [margin-block](https://drafts.csswg.org/css-logical/#propdef-margin-block) shorthand property. +pub const MarginBlock = struct { + /// The block start value. + block_start: LengthPercentageOrAuto, + /// The block end value. + block_end: LengthPercentageOrAuto, + + // TODO: bring this back + // pub usingnamespace css.DefineShorthand(@This(), css.PropertyIdTag.@"margin-block"); + pub usingnamespace css.DefineSizeShorthand(@This(), LengthPercentageOrAuto); + + pub const PropertyFieldMap = .{ + .block_start = css.PropertyIdTag.@"margin-block-start", + .block_end = css.PropertyIdTag.@"margin-block-end", + }; + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } +}; + +/// A value for the [margin-inline](https://drafts.csswg.org/css-logical/#propdef-margin-inline) shorthand property. +pub const MarginInline = struct { + /// The inline start value. + inline_start: LengthPercentageOrAuto, + /// The inline end value. + inline_end: LengthPercentageOrAuto, + + // TODO: bring this back + // pub usingnamespace css.DefineShorthand(@This(), css.PropertyIdTag.@"margin-inline"); + pub usingnamespace css.DefineSizeShorthand(@This(), LengthPercentageOrAuto); + + pub const PropertyFieldMap = .{ + .inline_start = css.PropertyIdTag.@"margin-inline-start", + .inline_end = css.PropertyIdTag.@"margin-inline-end", + }; + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } +}; + +/// A value for the [margin](https://drafts.csswg.org/css-box-4/#propdef-margin) shorthand property. +pub const Margin = struct { + top: LengthPercentageOrAuto, + right: LengthPercentageOrAuto, + bottom: LengthPercentageOrAuto, + left: LengthPercentageOrAuto, + + // TODO: bring this back + // pub usingnamespace css.DefineShorthand(@This(), css.PropertyIdTag.margin); + pub usingnamespace css.DefineRectShorthand(@This(), LengthPercentageOrAuto); + + pub const PropertyFieldMap = .{ + .top = css.PropertyIdTag.@"margin-top", + .right = css.PropertyIdTag.@"margin-right", + .bottom = css.PropertyIdTag.@"margin-bottom", + .left = css.PropertyIdTag.@"margin-left", + }; + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } +}; + +/// A value for the [padding-block](https://drafts.csswg.org/css-logical/#propdef-padding-block) shorthand property. +pub const PaddingBlock = struct { + /// The block start value. + block_start: LengthPercentageOrAuto, + /// The block end value. + block_end: LengthPercentageOrAuto, + + // TODO: bring this back + // pub usingnamespace css.DefineShorthand(@This(), css.PropertyIdTag.@"padding-block"); + pub usingnamespace css.DefineSizeShorthand(@This(), LengthPercentageOrAuto); + + pub const PropertyFieldMap = .{ + .block_start = css.PropertyIdTag.@"padding-block-start", + .block_end = css.PropertyIdTag.@"padding-block-end", + }; + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } +}; + +/// A value for the [padding-inline](https://drafts.csswg.org/css-logical/#propdef-padding-inline) shorthand property. +pub const PaddingInline = struct { + /// The inline start value. + inline_start: LengthPercentageOrAuto, + /// The inline end value. + inline_end: LengthPercentageOrAuto, + + // TODO: bring this back + // pub usingnamespace css.DefineShorthand(@This(), css.PropertyIdTag.@"padding-inline"); + pub usingnamespace css.DefineSizeShorthand(@This(), LengthPercentageOrAuto); + + pub const PropertyFieldMap = .{ + .inline_start = css.PropertyIdTag.@"padding-inline-start", + .inline_end = css.PropertyIdTag.@"padding-inline-end", + }; + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } +}; + +/// A value for the [padding](https://drafts.csswg.org/css-box-4/#propdef-padding) shorthand property. +pub const Padding = struct { + top: LengthPercentageOrAuto, + right: LengthPercentageOrAuto, + bottom: LengthPercentageOrAuto, + left: LengthPercentageOrAuto, + + // TODO: bring this back + // pub usingnamespace css.DefineShorthand(@This(), css.PropertyIdTag.padding); + pub usingnamespace css.DefineRectShorthand(@This(), LengthPercentageOrAuto); + + pub const PropertyFieldMap = .{ + .top = css.PropertyIdTag.@"padding-top", + .right = css.PropertyIdTag.@"padding-right", + .bottom = css.PropertyIdTag.@"padding-bottom", + .left = css.PropertyIdTag.@"padding-left", + }; + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } +}; + +/// A value for the [scroll-margin-block](https://drafts.csswg.org/css-scroll-snap/#propdef-scroll-margin-block) shorthand property. +pub const ScrollMarginBlock = struct { + /// The block start value. + block_start: LengthPercentageOrAuto, + /// The block end value. + block_end: LengthPercentageOrAuto, + + // TODO: bring this back + // pub usingnamespace css.DefineShorthand(@This(), css.PropertyIdTag.@"scroll-margin-block"); + pub usingnamespace css.DefineSizeShorthand(@This(), LengthPercentageOrAuto); + + pub const PropertyFieldMap = .{ + .block_start = css.PropertyIdTag.@"scroll-margin-block-start", + .block_end = css.PropertyIdTag.@"scroll-margin-block-end", + }; + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } +}; + +/// A value for the [scroll-margin-inline](https://drafts.csswg.org/css-scroll-snap/#propdef-scroll-margin-inline) shorthand property. +pub const ScrollMarginInline = struct { + /// The inline start value. + inline_start: LengthPercentageOrAuto, + /// The inline end value. + inline_end: LengthPercentageOrAuto, + + // TODO: bring this back + // pub usingnamespace css.DefineShorthand(@This(), css.PropertyIdTag.@"scroll-margin-inline"); + pub usingnamespace css.DefineSizeShorthand(@This(), LengthPercentageOrAuto); + + pub const PropertyFieldMap = .{ + .inline_start = css.PropertyIdTag.@"scroll-margin-inline-start", + .inline_end = css.PropertyIdTag.@"scroll-margin-inline-end", + }; + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } +}; + +/// A value for the [scroll-margin](https://drafts.csswg.org/css-scroll-snap/#scroll-margin) shorthand property. +pub const ScrollMargin = struct { + top: LengthPercentageOrAuto, + right: LengthPercentageOrAuto, + bottom: LengthPercentageOrAuto, + left: LengthPercentageOrAuto, + + // TODO: bring this back + // pub usingnamespace css.DefineShorthand(@This(), css.PropertyIdTag.@"scroll-margin"); + pub usingnamespace css.DefineRectShorthand(@This(), LengthPercentageOrAuto); + + pub const PropertyFieldMap = .{ + .top = css.PropertyIdTag.@"scroll-margin-top", + .right = css.PropertyIdTag.@"scroll-margin-right", + .bottom = css.PropertyIdTag.@"scroll-margin-bottom", + .left = css.PropertyIdTag.@"scroll-margin-left", + }; + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } +}; + +/// A value for the [scroll-padding-block](https://drafts.csswg.org/css-scroll-snap/#propdef-scroll-padding-block) shorthand property. +pub const ScrollPaddingBlock = struct { + /// The block start value. + block_start: LengthPercentageOrAuto, + /// The block end value. + block_end: LengthPercentageOrAuto, + + // TODO: bring this back + // pub usingnamespace css.DefineShorthand(@This(), css.PropertyIdTag.@"scroll-padding-block"); + pub usingnamespace css.DefineSizeShorthand(@This(), LengthPercentageOrAuto); + + pub const PropertyFieldMap = .{ + .block_start = css.PropertyIdTag.@"scroll-padding-block-start", + .block_end = css.PropertyIdTag.@"scroll-padding-block-end", + }; + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } +}; + +/// A value for the [scroll-padding-inline](https://drafts.csswg.org/css-scroll-snap/#propdef-scroll-padding-inline) shorthand property. +pub const ScrollPaddingInline = struct { + /// The inline start value. + inline_start: LengthPercentageOrAuto, + /// The inline end value. + inline_end: LengthPercentageOrAuto, + + // TODO: bring this back + // pub usingnamespace css.DefineShorthand(@This(), css.PropertyIdTag.@"scroll-padding-inline"); + pub usingnamespace css.DefineSizeShorthand(@This(), LengthPercentageOrAuto); + + pub const PropertyFieldMap = .{ + .inline_start = css.PropertyIdTag.@"scroll-padding-inline-start", + .inline_end = css.PropertyIdTag.@"scroll-padding-inline-end", + }; + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } +}; + +/// A value for the [scroll-padding](https://drafts.csswg.org/css-scroll-snap/#scroll-padding) shorthand property. +pub const ScrollPadding = struct { + top: LengthPercentageOrAuto, + right: LengthPercentageOrAuto, + bottom: LengthPercentageOrAuto, + left: LengthPercentageOrAuto, + + // TODO: bring this back + // pub usingnamespace css.DefineShorthand(@This(), css.PropertyIdTag.@"scroll-padding"); + pub usingnamespace css.DefineRectShorthand(@This(), LengthPercentageOrAuto); + + pub const PropertyFieldMap = .{ + .top = css.PropertyIdTag.@"scroll-padding-top", + .right = css.PropertyIdTag.@"scroll-padding-right", + .bottom = css.PropertyIdTag.@"scroll-padding-bottom", + .left = css.PropertyIdTag.@"scroll-padding-left", + }; + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } +}; + +pub const MarginHandler = NewSizeHandler( + PropertyIdTag.@"margin-top", + PropertyIdTag.@"margin-bottom", + PropertyIdTag.@"margin-left", + PropertyIdTag.@"margin-right", + PropertyIdTag.@"margin-block-start", + PropertyIdTag.@"margin-block-end", + PropertyIdTag.@"margin-inline-start", + PropertyIdTag.@"margin-inline-end", + PropertyIdTag.margin, + PropertyIdTag.@"margin-block", + PropertyIdTag.@"margin-inline", + PropertyCategory.physical, + .{ + .feature = css.Feature.logical_margin, + .shorthand_feature = css.Feature.logical_margin_shorthand, + }, +); + +pub const PaddingHandler = NewSizeHandler( + PropertyIdTag.@"padding-top", + PropertyIdTag.@"padding-bottom", + PropertyIdTag.@"padding-left", + PropertyIdTag.@"padding-right", + PropertyIdTag.@"padding-block-start", + PropertyIdTag.@"padding-block-end", + PropertyIdTag.@"padding-inline-start", + PropertyIdTag.@"padding-inline-end", + PropertyIdTag.padding, + PropertyIdTag.@"padding-block", + PropertyIdTag.@"padding-inline", + PropertyCategory.physical, + .{ + .feature = css.Feature.logical_padding, + .shorthand_feature = css.Feature.logical_padding_shorthand, + }, +); + +pub const ScrollMarginHandler = NewSizeHandler( + PropertyIdTag.@"scroll-margin-top", + PropertyIdTag.@"scroll-margin-bottom", + PropertyIdTag.@"scroll-margin-left", + PropertyIdTag.@"scroll-margin-right", + PropertyIdTag.@"scroll-margin-block-start", + PropertyIdTag.@"scroll-margin-block-end", + PropertyIdTag.@"scroll-margin-inline-start", + PropertyIdTag.@"scroll-margin-inline-end", + PropertyIdTag.@"scroll-margin", + PropertyIdTag.@"scroll-margin-block", + PropertyIdTag.@"scroll-margin-inline", + PropertyCategory.physical, + null, +); + +pub const InsetHandler = NewSizeHandler( + PropertyIdTag.top, + PropertyIdTag.bottom, + PropertyIdTag.left, + PropertyIdTag.right, + PropertyIdTag.@"inset-block-start", + PropertyIdTag.@"inset-block-end", + PropertyIdTag.@"inset-inline-start", + PropertyIdTag.@"inset-inline-end", + PropertyIdTag.inset, + PropertyIdTag.@"inset-block", + PropertyIdTag.@"inset-inline", + PropertyCategory.physical, + .{ + .feature = css.Feature.logical_inset, + .shorthand_feature = css.Feature.logical_inset, + }, +); + +pub fn NewSizeHandler( + comptime top_prop: css.PropertyIdTag, + comptime bottom_prop: css.PropertyIdTag, + comptime left_prop: css.PropertyIdTag, + comptime right_prop: css.PropertyIdTag, + comptime block_start_prop: css.PropertyIdTag, + comptime block_end_prop: css.PropertyIdTag, + comptime inline_start_prop: css.PropertyIdTag, + comptime inline_end_prop: css.PropertyIdTag, + comptime shorthand_prop: css.PropertyIdTag, + comptime block_shorthand: css.PropertyIdTag, + comptime inline_shorthand: css.PropertyIdTag, + comptime shorthand_category: css.logical.PropertyCategory, + comptime shorthand_extra: ?struct { feature: css.compat.Feature, shorthand_feature: css.compat.Feature }, +) type { + return struct { + top: ?LengthPercentageOrAuto = null, + bottom: ?LengthPercentageOrAuto = null, + left: ?LengthPercentageOrAuto = null, + right: ?LengthPercentageOrAuto = null, + block_start: ?Property = null, + block_end: ?Property = null, + inline_start: ?Property = null, + inline_end: ?Property = null, + has_any: bool = false, + category: css.logical.PropertyCategory = css.logical.PropertyCategory.default(), + + pub fn handleProperty( + this: *@This(), + property: *const Property, + dest: *css.DeclarationList, + context: *css.PropertyHandlerContext, + ) bool { + switch (@as(PropertyIdTag, property.*)) { + top_prop => this.propertyHelper("top", top_prop.valueType(), &@field(property, @tagName(top_prop)), PropertyCategory.physical, dest, context), + bottom_prop => this.propertyHelper("bottom", bottom_prop.valueType(), &@field(property, @tagName(bottom_prop)), PropertyCategory.physical, dest, context), + left_prop => this.propertyHelper("left", left_prop.valueType(), &@field(property, @tagName(left_prop)), PropertyCategory.physical, dest, context), + right_prop => this.propertyHelper("right", right_prop.valueType(), &@field(property, @tagName(right_prop)), PropertyCategory.physical, dest, context), + block_start_prop => { + this.flushHelper("block_start", block_start_prop.valueType(), &@field(property, @tagName(block_start_prop)), PropertyCategory.logical, dest, context); + this.logicalPropertyHelper("block_start", property.deepClone(context.allocator), dest, context); + }, + block_end_prop => { + this.flushHelper("block_end", block_end_prop.valueType(), &@field(property, @tagName(block_end_prop)), PropertyCategory.logical, dest, context); + this.logicalPropertyHelper("block_end", property.deepClone(context.allocator), dest, context); + }, + inline_start_prop => { + this.flushHelper("inline_start", inline_start_prop.valueType(), &@field(property, @tagName(inline_start_prop)), PropertyCategory.logical, dest, context); + this.logicalPropertyHelper("inline_start", property.deepClone(context.allocator), dest, context); + }, + inline_end_prop => { + this.flushHelper("inline_end", inline_end_prop.valueType(), &@field(property, @tagName(inline_end_prop)), PropertyCategory.logical, dest, context); + this.logicalPropertyHelper("inline_end", property.deepClone(context.allocator), dest, context); + }, + block_shorthand => { + const val = &@field(property, @tagName(block_shorthand)); + this.flushHelper("block_start", block_start_prop.valueType(), &val.block_start, .logical, dest, context); + this.flushHelper("block_end", block_end_prop.valueType(), &val.block_end, .logical, dest, context); + this.logicalPropertyHelper("block_start", @unionInit(Property, @tagName(block_start_prop), val.block_start.deepClone(context.allocator)), dest, context); + this.logicalPropertyHelper("block_end", @unionInit(Property, @tagName(block_end_prop), val.block_end.deepClone(context.allocator)), dest, context); + }, + inline_shorthand => { + const val = &@field(property, @tagName(inline_shorthand)); + this.flushHelper("inline_start", inline_start_prop.valueType(), &val.inline_start, .logical, dest, context); + this.flushHelper("inline_end", inline_end_prop.valueType(), &val.inline_end, .logical, dest, context); + this.logicalPropertyHelper("inline_start", @unionInit(Property, @tagName(inline_start_prop), val.inline_start.deepClone(context.allocator)), dest, context); + this.logicalPropertyHelper("inline_end", @unionInit(Property, @tagName(inline_end_prop), val.inline_end.deepClone(context.allocator)), dest, context); + }, + shorthand_prop => { + const val = &@field(property, @tagName(shorthand_prop)); + this.flushHelper("top", top_prop.valueType(), &val.top, shorthand_category, dest, context); + this.flushHelper("right", right_prop.valueType(), &val.right, shorthand_category, dest, context); + this.flushHelper("bottom", bottom_prop.valueType(), &val.bottom, shorthand_category, dest, context); + this.flushHelper("left", left_prop.valueType(), &val.left, shorthand_category, dest, context); + this.top = val.top.deepClone(context.allocator); + this.right = val.right.deepClone(context.allocator); + this.bottom = val.bottom.deepClone(context.allocator); + this.left = val.left.deepClone(context.allocator); + this.block_start = null; + this.block_end = null; + this.inline_start = null; + this.inline_end = null; + this.has_any = true; + }, + css.PropertyIdTag.unparsed => { + switch (property.unparsed.property_id) { + top_prop, bottom_prop, left_prop, right_prop, block_start_prop, block_end_prop, inline_start_prop, inline_end_prop, block_shorthand, inline_shorthand, shorthand_prop => { + // Even if we weren't able to parse the value (e.g. due to var() references), + // we can still add vendor prefixes to the property itself. + switch (property.unparsed.property_id) { + block_start_prop => this.logicalPropertyHelper("block_start", property.deepClone(context.allocator), dest, context), + block_end_prop => this.logicalPropertyHelper("block_end", property.deepClone(context.allocator), dest, context), + inline_start_prop => this.logicalPropertyHelper("inline_start", property.deepClone(context.allocator), dest, context), + inline_end_prop => this.logicalPropertyHelper("inline_end", property.deepClone(context.allocator), dest, context), + else => { + this.flush(dest, context); + dest.append(context.allocator, property.deepClone(context.allocator)) catch unreachable; + }, + } + }, + else => return false, + } + }, + else => return false, + } + + return true; + } + + pub fn finalize(this: *@This(), dest: *css.DeclarationList, context: *css.PropertyHandlerContext) void { + this.flush(dest, context); + } + + fn flushHelper( + this: *@This(), + comptime field: []const u8, + comptime T: type, + val: *const T, + comptime category: PropertyCategory, + dest: *css.DeclarationList, + context: *css.PropertyHandlerContext, + ) void { + // If the category changes betweet logical and physical, + // or if the value contains syntax that isn't supported across all targets, + // preserve the previous value as a fallback. + if (category != this.category or (@field(this, field) != null and context.targets.browsers != null and !val.isCompatible(context.targets.browsers.?))) { + this.flush(dest, context); + } + } + + fn propertyHelper( + this: *@This(), + comptime field: []const u8, + comptime T: type, + val: *const T, + comptime category: PropertyCategory, + dest: *css.DeclarationList, + context: *css.PropertyHandlerContext, + ) void { + this.flushHelper(field, T, val, category, dest, context); + @field(this, field) = val.deepClone(context.allocator); + this.category = category; + this.has_any = true; + } + + fn logicalPropertyHelper( + this: *@This(), + comptime field: []const u8, + val: css.Property, + dest: *css.DeclarationList, + context: *css.PropertyHandlerContext, + ) void { + // Assume unparsed properties might contain unsupported syntax that we must preserve as a fallback. + if (this.category != PropertyCategory.logical or (@field(this, field) != null and val == .unparsed)) { + this.flush(dest, context); + } + + if (@field(this, field)) |*p| p.deinit(context.allocator); + @field(this, field) = val; + this.category = PropertyCategory.logical; + this.has_any = true; + } + + fn flush(this: *@This(), dest: *css.DeclarationList, context: *css.PropertyHandlerContext) void { + if (!this.has_any) return; + + this.has_any = false; + + const top = bun.take(&this.top); + const bottom = bun.take(&this.bottom); + const left = bun.take(&this.left); + const right = bun.take(&this.right); + const logical_supported = if (comptime shorthand_extra != null) !context.shouldCompileLogical(shorthand_extra.?.feature) else true; + + if ((shorthand_category != .logical or logical_supported) and top != null and bottom != null and left != null and right != null) { + dest.append( + context.allocator, + @unionInit( + Property, + @tagName(shorthand_prop), + .{ + .top = top.?, + .bottom = bottom.?, + .left = left.?, + .right = right.?, + }, + ), + ) catch bun.outOfMemory(); + } else { + if (top) |t| { + dest.append( + context.allocator, + @unionInit(Property, @tagName(top_prop), t), + ) catch bun.outOfMemory(); + } + + if (bottom) |b| { + dest.append( + context.allocator, + @unionInit(Property, @tagName(bottom_prop), b), + ) catch bun.outOfMemory(); + } + + if (left) |b| { + dest.append( + context.allocator, + @unionInit(Property, @tagName(left_prop), b), + ) catch bun.outOfMemory(); + } + + if (right) |b| { + dest.append( + context.allocator, + @unionInit(Property, @tagName(right_prop), b), + ) catch bun.outOfMemory(); + } + } + + var block_start = bun.take(&this.block_start); + var block_end = bun.take(&this.block_end); + var inline_start = bun.take(&this.inline_start); + var inline_end = bun.take(&this.inline_end); + + if (logical_supported) { + this.logicalSideHelper(&block_start, &block_end, "block_start", "block_end", block_shorthand, block_start_prop, block_end_prop, logical_supported, dest, context); + } else { + this.prop(&block_start, block_start_prop, top_prop, dest, context); + this.prop(&block_end, block_end_prop, bottom_prop, dest, context); + } + + if (logical_supported) { + this.logicalSideHelper(&inline_start, &inline_end, "inline_start", "inline_end", inline_shorthand, inline_start_prop, inline_end_prop, logical_supported, dest, context); + } else if (inline_start != null or inline_end != null) { + if (inline_start != null and inline_start.? == @field(Property, @tagName(inline_start_prop)) and inline_end != null and inline_end.? == @field(Property, @tagName(inline_end_prop)) and + @field(inline_start.?, @tagName(inline_start_prop)).eql(&@field(inline_end.?, @tagName(inline_end_prop)))) + { + this.prop(&inline_start, inline_start_prop, left_prop, dest, context); + this.prop(&inline_end, inline_end_prop, right_prop, dest, context); + } else { + this.logicalPropHelper(&inline_start, inline_start_prop, left_prop, right_prop, dest, context); + this.logicalPropHelper(&inline_end, inline_end_prop, right_prop, left_prop, dest, context); + } + } + } + + inline fn logicalPropHelper( + this: *@This(), + val: *?Property, + comptime logical: css.PropertyIdTag, + comptime ltr: css.PropertyIdTag, + comptime rtl: css.PropertyIdTag, + dest: *css.DeclarationList, + context: *css.PropertyHandlerContext, + ) void { + _ = this; // autofix + _ = dest; // autofix + if (val.*) |*_v| { + if (@as(css.PropertyIdTag, _v.*) == logical) { + const v = &@field(_v, @tagName(logical)); + context.addLogicalRule( + context.allocator, + @unionInit(Property, @tagName(ltr), v.deepClone(context.allocator)), + @unionInit(Property, @tagName(rtl), v.deepClone(context.allocator)), + ); + } else if (_v.* == .unparsed) { + const v = &_v.unparsed; + context.addLogicalRule( + context.allocator, + Property{ + .unparsed = v.withPropertyId(context.allocator, ltr), + }, + Property{ + .unparsed = v.withPropertyId(context.allocator, rtl), + }, + ); + } + } + } + + inline fn logicalSideHelper( + this: *@This(), + start: *?Property, + end: *?Property, + comptime start_name: []const u8, + comptime end_name: []const u8, + comptime shorthand_property: css.PropertyIdTag, + comptime start_prop: css.PropertyIdTag, + comptime end_prop: css.PropertyIdTag, + logical_supported: bool, + dest: *css.DeclarationList, + context: *css.PropertyHandlerContext, + ) void { + _ = this; // autofix + const shorthand_supported = logical_supported and if (comptime shorthand_extra != null) !context.shouldCompileLogical(shorthand_extra.?.shorthand_feature) else true; + + if (start.* != null and @as(PropertyIdTag, start.*.?) == start_prop and + end.* != null and @as(PropertyIdTag, end.*.?) == end_prop and + shorthand_supported) + { + const ValueType = shorthand_property.valueType(); + var value: ValueType = undefined; + @field(value, start_name) = @field(start.*.?, @tagName(start_prop)).deepClone(context.allocator); + @field(value, end_name) = @field(end.*.?, @tagName(end_prop)).deepClone(context.allocator); + if (std.meta.fields(ValueType).len != 2) { + @compileError(@typeName(ValueType) ++ " has more than two fields. This could cause undefined memory."); + } + + dest.append(context.allocator, @unionInit( + Property, + @tagName(shorthand_property), + value, + )) catch bun.outOfMemory(); + } else { + if (start.* != null) { + dest.append(context.allocator, start.*.?) catch bun.outOfMemory(); + } + if (end.* != null) { + dest.append(context.allocator, end.*.?) catch bun.outOfMemory(); + } + } + } + + inline fn prop( + this: *@This(), + val: *?Property, + comptime logical: css.PropertyIdTag, + comptime physical: css.PropertyIdTag, + dest: *css.DeclarationList, + context: *css.PropertyHandlerContext, + ) void { + _ = this; // autofix + if (val.*) |*v| { + if (@as(css.PropertyIdTag, v.*) == logical) { + dest.append( + context.allocator, + @unionInit( + Property, + @tagName(physical), + @field(v, @tagName(logical)), + ), + ) catch bun.outOfMemory(); + } else if (v.* == .unparsed) { + dest.append( + context.allocator, + Property{ + .unparsed = v.unparsed.withPropertyId(context.allocator, physical), + }, + ) catch bun.outOfMemory(); + } + } + } + }; +} diff --git a/src/css/properties/masking.zig b/src/css/properties/masking.zig new file mode 100644 index 0000000000..e4d1573ef8 --- /dev/null +++ b/src/css/properties/masking.zig @@ -0,0 +1,541 @@ +const std = @import("std"); +const bun = @import("root").bun; +const Allocator = std.mem.Allocator; +const ArrayList = std.ArrayListUnmanaged; + +pub const css = @import("../css_parser.zig"); + +const SmallList = css.SmallList; +const Printer = css.Printer; +const PrintErr = css.PrintErr; +const Error = css.Error; + +const ContainerName = css.css_rules.container.ContainerName; + +const LengthPercentage = css.css_values.length.LengthPercentage; +const CustomIdent = css.css_values.ident.CustomIdent; +const CSSString = css.css_values.string.CSSString; +const CSSNumber = css.css_values.number.CSSNumber; +const LengthPercentageOrAuto = css.css_values.length.LengthPercentageOrAuto; +const Size2D = css.css_values.size.Size2D; +const DashedIdent = css.css_values.ident.DashedIdent; +const Image = css.css_values.image.Image; +const CssColor = css.css_values.color.CssColor; +const Ratio = css.css_values.ratio.Ratio; +const Length = css.css_values.length.LengthValue; +const Rect = css.css_values.rect.Rect; +const NumberOrPercentage = css.css_values.percentage.NumberOrPercentage; +const CustomIdentList = css.css_values.ident.CustomIdentList; +const Angle = css.css_values.angle.Angle; +const Url = css.css_values.url.Url; +const LengthOrNumber = css.css_values.length.LengthOrNumber; +const Position = css.css_values.position.Position; + +const BorderRadius = css.css_properties.border_radius.BorderRadius; +const FillRule = css.css_properties.shape.FillRule; + +const BackgroundSize = css.css_properties.background.BackgroundSize; +const BackgroundRepeat = css.css_properties.background.BackgroundRepeat; +const BorderImageSlice = css.css_properties.border_image.BorderImageSlice; +const BorderImageSideWidth = css.css_properties.border_image.BorderImageSideWidth; +const BorderImageRepeat = css.css_properties.border_image.BorderImageRepeat; +const BorderImage = css.css_properties.border_image.BorderImage; + +/// A value for the [clip-path](https://www.w3.org/TR/css-masking-1/#the-clip-path) property. +const ClipPath = union(enum) { + /// No clip path. + None, + /// A url reference to an SVG path element. + Url: Url, + /// A basic shape, positioned according to the reference box. + Shape: struct { + /// A basic shape. + // todo_stuff.think_about_mem_mgmt + shape: *BasicShape, + /// A reference box that the shape is positioned according to. + reference_box: GeometryBox, + }, + /// A reference box. + Box: GeometryBox, +}; + +/// A [``](https://www.w3.org/TR/css-masking-1/#typedef-geometry-box) value +/// as used in the `mask-clip` and `clip-path` properties. +pub const GeometryBox = enum { + /// The painted content is clipped to the content box. + @"border-box", + /// The painted content is clipped to the padding box. + @"padding-box", + /// The painted content is clipped to the border box. + @"content-box", + /// The painted content is clipped to the margin box. + @"margin-box", + /// The painted content is clipped to the object bounding box. + @"fill-box", + /// The painted content is clipped to the stroke bounding box. + @"stroke-box", + /// Uses the nearest SVG viewport as reference box. + @"view-box", + + pub usingnamespace css.DefineEnumProperty(@This()); + + pub fn intoMaskClip(this: *const @This()) MaskClip { + return MaskClip{ .@"geometry-box" = this.* }; + } + + pub fn default() GeometryBox { + return .@"border-box"; + } +}; + +/// A CSS [``](https://www.w3.org/TR/css-shapes-1/#basic-shape-functions) value. +pub const BasicShape = union(enum) { + /// An inset rectangle. + Inset: InsetRect, + /// A circle. + Circle: Circle, + /// An ellipse. + Ellipse: Ellipse, + /// A polygon. + Polygon: Polygon, +}; + +/// An [`inset()`](https://www.w3.org/TR/css-shapes-1/#funcdef-inset) rectangle shape. +const InsetRect = struct { + /// The rectangle. + rect: Rect(LengthPercentage), + /// A corner radius for the rectangle. + radius: BorderRadius, +}; + +/// A [`circle()`](https://www.w3.org/TR/css-shapes-1/#funcdef-circle) shape. +pub const Circle = struct { + /// The radius of the circle. + radius: ShapeRadius, + /// The position of the center of the circle. + position: Position, +}; + +/// An [`ellipse()`](https://www.w3.org/TR/css-shapes-1/#funcdef-ellipse) shape. +pub const Ellipse = struct { + /// The x-radius of the ellipse. + radius_x: ShapeRadius, + /// The y-radius of the ellipse. + radius_y: ShapeRadius, + /// The position of the center of the ellipse. + position: Position, +}; + +/// A [`polygon()`](https://www.w3.org/TR/css-shapes-1/#funcdef-polygon) shape. +pub const Polygon = struct { + /// The fill rule used to determine the interior of the polygon. + fill_rule: FillRule, + /// The points of each vertex of the polygon. + points: ArrayList(Point), +}; + +/// A [``](https://www.w3.org/TR/css-shapes-1/#typedef-shape-radius) value +/// that defines the radius of a `circle()` or `ellipse()` shape. +pub const ShapeRadius = union(enum) { + /// An explicit length or percentage. + LengthPercentage: LengthPercentage, + /// The length from the center to the closest side of the box. + ClosestSide, + /// The length from the center to the farthest side of the box. + FarthestSide, +}; + +/// A point within a `polygon()` shape. +/// +/// See [Polygon](Polygon). +pub const Point = struct { + /// The x position of the point. + x: LengthPercentage, + /// The y position of the point. + y: LengthPercentage, +}; + +/// A value for the [mask-mode](https://www.w3.org/TR/css-masking-1/#the-mask-mode) property. +pub const MaskMode = enum { + /// The luminance values of the mask image is used. + luminance, + /// The alpha values of the mask image is used. + alpha, + /// If an SVG source is used, the value matches the `mask-type` property. Otherwise, the alpha values are used. + @"match-source", + + pub usingnamespace css.DefineEnumProperty(@This()); + + pub fn default() MaskMode { + return .@"match-source"; + } +}; + +/// A value for the [mask-clip](https://www.w3.org/TR/css-masking-1/#the-mask-clip) property. +pub const MaskClip = union(enum) { + /// A geometry box. + @"geometry-box": GeometryBox, + /// The painted content is not clipped. + @"no-clip", + + pub usingnamespace @call(.auto, css.DeriveParse, .{@This()}); + pub usingnamespace @call(.auto, css.DeriveToCss, .{@This()}); + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } +}; + +/// A value for the [mask-composite](https://www.w3.org/TR/css-masking-1/#the-mask-composite) property. +pub const MaskComposite = enum { + /// The source is placed over the destination. + add, + /// The source is placed, where it falls outside of the destination. + subtract, + /// The parts of source that overlap the destination, replace the destination. + intersect, + /// The non-overlapping regions of source and destination are combined. + exclude, + + pub usingnamespace css.DefineEnumProperty(@This()); + + pub fn default() MaskComposite { + return .add; + } +}; + +/// A value for the [mask-type](https://www.w3.org/TR/css-masking-1/#the-mask-type) property. +pub const MaskType = enum { + /// The luminance values of the mask is used. + luminance, + /// The alpha values of the mask is used. + alpha, + + pub usingnamespace css.DefineEnumProperty(@This()); +}; + +/// A value for the [mask](https://www.w3.org/TR/css-masking-1/#the-mask) shorthand property. +pub const Mask = struct { + /// The mask image. + image: Image, + /// The position of the mask. + position: Position, + /// The size of the mask image. + size: BackgroundSize, + /// How the mask repeats. + repeat: BackgroundRepeat, + /// The box in which the mask is clipped. + clip: MaskClip, + /// The origin of the mask. + origin: GeometryBox, + /// How the mask is composited with the element. + composite: MaskComposite, + /// How the mask image is interpreted. + mode: MaskMode, + + pub usingnamespace css.DefineListShorthand(@This()); + + pub const PropertyFieldMap = .{ + .image = css.PropertyIdTag.@"mask-image", + .position = css.PropertyIdTag.@"mask-position", + .size = css.PropertyIdTag.@"mask-size", + .repeat = css.PropertyIdTag.@"mask-repeat", + .clip = css.PropertyIdTag.@"mask-clip", + .origin = css.PropertyIdTag.@"mask-origin", + .composite = css.PropertyIdTag.@"mask-composite", + .mode = css.PropertyIdTag.@"mask-mode", + }; + + pub const VendorPrefixMap = .{ + .image = true, + .position = true, + .size = true, + .repeat = true, + .clip = true, + .origin = true, + }; + + pub fn parse(input: *css.Parser) css.Result(@This()) { + var image: ?Image = null; + var position: ?Position = null; + var size: ?BackgroundSize = null; + var repeat: ?BackgroundRepeat = null; + var clip: ?MaskClip = null; + var origin: ?GeometryBox = null; + var composite: ?MaskComposite = null; + var mode: ?MaskMode = null; + + while (true) { + if (image == null) { + if (@call(.auto, @field(Image, "parse"), .{input}).asValue()) |value| { + image = value; + continue; + } + } + + if (position == null) { + if (Position.parse(input).asValue()) |value| { + position = value; + size = input.tryParse(struct { + pub inline fn parseFn(i: *css.Parser) css.Result(BackgroundSize) { + if (i.expectDelim('/').asErr()) |e| return .{ .err = e }; + return BackgroundSize.parse(i); + } + }.parseFn, .{}).asValue(); + continue; + } + } + + if (repeat == null) { + if (BackgroundRepeat.parse(input).asValue()) |value| { + repeat = value; + continue; + } + } + + if (origin == null) { + if (GeometryBox.parse(input).asValue()) |value| { + origin = value; + continue; + } + } + + if (clip == null) { + if (MaskClip.parse(input).asValue()) |value| { + clip = value; + continue; + } + } + + if (composite == null) { + if (MaskComposite.parse(input).asValue()) |value| { + composite = value; + continue; + } + } + + if (mode == null) { + if (MaskMode.parse(input).asValue()) |value| { + mode = value; + continue; + } + } + + break; + } + + if (clip == null) { + if (origin) |o| { + clip = o.intoMaskClip(); + } + } + + return .{ .result = .{ + .image = image orelse Image.default(), + .position = position orelse Position.default(), + .repeat = repeat orelse BackgroundRepeat.default(), + .size = size orelse BackgroundSize.default(), + .origin = origin orelse .@"border-box", + .clip = clip orelse GeometryBox.@"border-box".intoMaskClip(), + .composite = composite orelse .add, + .mode = mode orelse .@"match-source", + } }; + } + + pub fn toCss(this: *const Mask, comptime W: type, dest: *css.Printer(W)) css.PrintErr!void { + try this.image.toCss(W, dest); + + if (!this.position.eql(&Position.default()) or !this.size.eql(&BackgroundSize.default())) { + try dest.writeChar(' '); + try this.position.toCss(W, dest); + + if (!this.size.eql(&BackgroundSize.default())) { + try dest.delim('/', true); + try this.size.toCss(W, dest); + } + } + + if (!this.repeat.eql(&BackgroundRepeat.default())) { + try dest.writeChar(' '); + try this.repeat.toCss(W, dest); + } + + if (!this.origin.eql(&GeometryBox.@"border-box") or !this.clip.eql(&GeometryBox.@"border-box".intoMaskClip())) { + try dest.writeChar(' '); + try this.origin.toCss(W, dest); + + if (!this.clip.eql(&this.origin.intoMaskClip())) { + try dest.writeChar(' '); + try this.clip.toCss(W, dest); + } + } + + if (!this.composite.eql(&MaskComposite.default())) { + try dest.writeChar(' '); + try this.composite.toCss(W, dest); + } + + if (!this.mode.eql(&MaskMode.default())) { + try dest.writeChar(' '); + try this.mode.toCss(W, dest); + } + + return; + } + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } +}; + +/// A value for the [mask-border-mode](https://www.w3.org/TR/css-masking-1/#the-mask-border-mode) property. +pub const MaskBorderMode = enum { + /// The luminance values of the mask image is used. + luminance, + /// The alpha values of the mask image is used. + alpha, + + pub usingnamespace css.DefineEnumProperty(@This()); + + pub fn default() @This() { + return .alpha; + } +}; + +/// A value for the [mask-border](https://www.w3.org/TR/css-masking-1/#the-mask-border) shorthand property. +/// A value for the [mask-border](https://www.w3.org/TR/css-masking-1/#the-mask-border) shorthand property. +pub const MaskBorder = struct { + /// The mask image. + source: Image, + /// The offsets that define where the image is sliced. + slice: BorderImageSlice, + /// The width of the mask image. + width: Rect(BorderImageSideWidth), + /// The amount that the image extends beyond the border box. + outset: Rect(LengthOrNumber), + /// How the mask image is scaled and tiled. + repeat: BorderImageRepeat, + /// How the mask image is interpreted. + mode: MaskBorderMode, + + pub usingnamespace css.DefineShorthand(@This(), css.PropertyIdTag.@"mask-border"); + + pub const PropertyFieldMap = .{ + .source = css.PropertyIdTag.@"mask-border-source", + .slice = css.PropertyIdTag.@"mask-border-slice", + .width = css.PropertyIdTag.@"mask-border-width", + .outset = css.PropertyIdTag.@"mask-border-outset", + .repeat = css.PropertyIdTag.@"mask-border-repeat", + .mode = css.PropertyIdTag.@"mask-border-mode", + }; + + pub fn parse(input: *css.Parser) css.Result(@This()) { + const Closure = struct { + mode: ?MaskBorderMode = null, + }; + var closure = Closure{ .mode = null }; + const border_image = BorderImage.parseWithCallback(input, &closure, struct { + inline fn callback(c: *Closure, p: *css.Parser) bool { + if (c.mode == null) { + if (p.tryParse(MaskBorderMode.parse, .{}).asValue()) |value| { + c.mode = value; + return true; + } + } + return false; + } + }.callback); + + if (border_image.isOk() or closure.mode != null) { + const bi = border_image.unwrapOr(comptime BorderImage.default()); + return .{ .result = MaskBorder{ + .source = bi.source, + .slice = bi.slice, + .width = bi.width, + .outset = bi.outset, + .repeat = bi.repeat, + .mode = closure.mode orelse MaskBorderMode.default(), + } }; + } else { + return .{ .err = input.newCustomError(.invalid_declaration) }; + } + } + + pub fn toCss(this: *const MaskBorder, comptime W: type, dest: *css.Printer(W)) css.PrintErr!void { + try BorderImage.toCssInternal( + &this.source, + &this.slice, + &this.width, + &this.outset, + &this.repeat, + W, + dest, + ); + if (!this.mode.eql(&MaskBorderMode.default())) { + try dest.writeChar(' '); + try this.mode.toCss(W, dest); + } + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } +}; + +/// A value for the [-webkit-mask-composite](https://developer.mozilla.org/en-US/docs/Web/CSS/-webkit-mask-composite) +/// property. +/// +/// See also [MaskComposite](MaskComposite). +/// A value for the [-webkit-mask-composite](https://developer.mozilla.org/en-US/docs/Web/CSS/-webkit-mask-composite) +/// property. +/// +/// See also [MaskComposite](MaskComposite). +pub const WebKitMaskComposite = enum { + clear, + copy, + /// Equivalent to `add` in the standard `mask-composite` syntax. + @"source-over", + /// Equivalent to `intersect` in the standard `mask-composite` syntax. + @"source-in", + /// Equivalent to `subtract` in the standard `mask-composite` syntax. + @"source-out", + @"source-atop", + @"destination-over", + @"destination-in", + @"destination-out", + @"destination-atop", + /// Equivalent to `exclude` in the standard `mask-composite` syntax. + xor, + + pub usingnamespace css.DefineEnumProperty(@This()); +}; + +/// A value for the [-webkit-mask-source-type](https://github.com/WebKit/WebKit/blob/6eece09a1c31e47489811edd003d1e36910e9fd3/Source/WebCore/css/CSSProperties.json#L6578-L6587) +/// property. +/// +/// See also [MaskMode](MaskMode). +/// A value for the [-webkit-mask-source-type](https://github.com/WebKit/WebKit/blob/6eece09a1c31e47489811edd003d1e36910e9fd3/Source/WebCore/css/CSSProperties.json#L6578-L6587) +/// property. +/// +/// See also [MaskMode](MaskMode). +pub const WebKitMaskSourceType = enum { + /// Equivalent to `match-source` in the standard `mask-mode` syntax. + auto, + /// The luminance values of the mask image is used. + luminance, + /// The alpha values of the mask image is used. + alpha, + + pub usingnamespace css.DefineEnumProperty(@This()); +}; diff --git a/src/css/properties/outline.zig b/src/css/properties/outline.zig new file mode 100644 index 0000000000..cf98f18c6f --- /dev/null +++ b/src/css/properties/outline.zig @@ -0,0 +1,59 @@ +const std = @import("std"); +const bun = @import("root").bun; +const Allocator = std.mem.Allocator; +const ArrayList = std.ArrayListUnmanaged; + +pub const css = @import("../css_parser.zig"); + +const SmallList = css.SmallList; +const Printer = css.Printer; +const PrintErr = css.PrintErr; +const Error = css.Error; + +const ContainerName = css.css_rules.container.ContainerName; + +const LengthPercentage = css.css_values.length.LengthPercentage; +const CustomIdent = css.css_values.ident.CustomIdent; +const CSSString = css.css_values.string.CSSString; +const CSSNumber = css.css_values.number.CSSNumber; +const LengthPercentageOrAuto = css.css_values.length.LengthPercentageOrAuto; +const Size2D = css.css_values.size.Size2D; +const DashedIdent = css.css_values.ident.DashedIdent; +const Image = css.css_values.image.Image; +const CssColor = css.css_values.color.CssColor; +const Ratio = css.css_values.ratio.Ratio; +const Length = css.css_values.length.LengthValue; +const Rect = css.css_values.rect.Rect; +const NumberOrPercentage = css.css_values.percentage.NumberOrPercentage; +const CustomIdentList = css.css_values.ident.CustomIdentList; +const Angle = css.css_values.angle.Angle; +const Url = css.css_values.url.Url; + +const GenericBorder = css.css_properties.border.GenericBorder; +const LineStyle = css.css_properties.border.LineStyle; + +/// A value for the [outline](https://drafts.csswg.org/css-ui/#outline) shorthand property. +pub const Outline = GenericBorder(OutlineStyle, 11); + +/// A value for the [outline-style](https://drafts.csswg.org/css-ui/#outline-style) property. +pub const OutlineStyle = union(enum) { + /// The `auto` keyword. + auto: void, + /// A value equivalent to the `border-style` property. + line_style: LineStyle, + + pub usingnamespace css.DeriveParse(@This()); + pub usingnamespace css.DeriveToCss(@This()); + + pub fn default() @This() { + return .{ .line_style = .none }; + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } +}; diff --git a/src/css/properties/overflow.zig b/src/css/properties/overflow.zig new file mode 100644 index 0000000000..39b246b9d4 --- /dev/null +++ b/src/css/properties/overflow.zig @@ -0,0 +1,96 @@ +const std = @import("std"); +const bun = @import("root").bun; +const Allocator = std.mem.Allocator; +const ArrayList = std.ArrayListUnmanaged; + +pub const css = @import("../css_parser.zig"); + +const SmallList = css.SmallList; +const Printer = css.Printer; +const PrintErr = css.PrintErr; +const Error = css.Error; + +const ContainerName = css.css_rules.container.ContainerName; + +const LengthPercentage = css.css_values.length.LengthPercentage; +const CustomIdent = css.css_values.ident.CustomIdent; +const CSSString = css.css_values.string.CSSString; +const CSSNumber = css.css_values.number.CSSNumber; +const LengthPercentageOrAuto = css.css_values.length.LengthPercentageOrAuto; +const Size2D = css.css_values.size.Size2D; +const DashedIdent = css.css_values.ident.DashedIdent; +const Image = css.css_values.image.Image; +const CssColor = css.css_values.color.CssColor; +const Ratio = css.css_values.ratio.Ratio; +const Length = css.css_values.length.LengthValue; +const Rect = css.css_values.rect.Rect; +const NumberOrPercentage = css.css_values.percentage.NumberOrPercentage; +const CustomIdentList = css.css_values.ident.CustomIdentList; +const Angle = css.css_values.angle.Angle; +const Url = css.css_values.url.Url; + +const GenericBorder = css.css_properties.border.GenericBorder; +const LineStyle = css.css_properties.border.LineStyle; + +/// A value for the [overflow](https://www.w3.org/TR/css-overflow-3/#overflow-properties) shorthand property. +pub const Overflow = struct { + /// A value for the [overflow](https://www.w3.org/TR/css-overflow-3/#overflow-properties) shorthand property. + x: OverflowKeyword, + /// The overflow mode for the y direction. + y: OverflowKeyword, + + pub fn parse(input: *css.Parser) css.Result(Overflow) { + const x = switch (OverflowKeyword.parse(input)) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + const y = switch (input.tryParse(OverflowKeyword.parse, .{})) { + .result => |v| v, + else => x, + }; + return .{ .result = Overflow{ .x = x, .y = y } }; + } + + pub fn toCss(this: *const Overflow, comptime W: type, dest: *css.Printer(W)) css.PrintErr!void { + try this.x.toCss(W, dest); + if (this.y != this.x) { + try dest.writeChar(' '); + try this.y.toCss(W, dest); + } + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub inline fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } +}; + +/// An [overflow](https://www.w3.org/TR/css-overflow-3/#overflow-properties) keyword +/// as used in the `overflow-x`, `overflow-y`, and `overflow` properties. +pub const OverflowKeyword = enum { + /// Overflowing content is visible. + visible, + /// Overflowing content is hidden. Programmatic scrolling is allowed. + hidden, + /// Overflowing content is clipped. Programmatic scrolling is not allowed. + clip, + /// The element is scrollable. + scroll, + /// Overflowing content scrolls if needed. + auto, + + pub usingnamespace css.DefineEnumProperty(@This()); +}; + +/// A value for the [text-overflow](https://www.w3.org/TR/css-overflow-3/#text-overflow) property. +pub const TextOverflow = enum { + /// Overflowing text is clipped. + clip, + /// Overflowing text is truncated with an ellipsis. + ellipsis, + + pub usingnamespace css.DefineEnumProperty(@This()); +}; diff --git a/src/css/properties/position.zig b/src/css/properties/position.zig new file mode 100644 index 0000000000..2eeb39147b --- /dev/null +++ b/src/css/properties/position.zig @@ -0,0 +1,107 @@ +const std = @import("std"); +const bun = @import("root").bun; +const Allocator = std.mem.Allocator; +const ArrayList = std.ArrayListUnmanaged; + +pub const css = @import("../css_parser.zig"); + +const SmallList = css.SmallList; +const Printer = css.Printer; +const PrintErr = css.PrintErr; +const Error = css.Error; + +const ContainerName = css.css_rules.container.ContainerName; + +const LengthPercentage = css.css_values.length.LengthPercentage; +const CustomIdent = css.css_values.ident.CustomIdent; +const CSSString = css.css_values.string.CSSString; +const CSSNumber = css.css_values.number.CSSNumber; +const LengthPercentageOrAuto = css.css_values.length.LengthPercentageOrAuto; +const Size2D = css.css_values.size.Size2D; +const DashedIdent = css.css_values.ident.DashedIdent; +const Image = css.css_values.image.Image; +const CssColor = css.css_values.color.CssColor; +const Ratio = css.css_values.ratio.Ratio; +const Length = css.css_values.length.LengthValue; +const Rect = css.css_values.rect.Rect; +const NumberOrPercentage = css.css_values.percentage.NumberOrPercentage; +const CustomIdentList = css.css_values.ident.CustomIdentList; +const Angle = css.css_values.angle.Angle; +const Url = css.css_values.url.Url; + +const GenericBorder = css.css_properties.border.GenericBorder; +const LineStyle = css.css_properties.border.LineStyle; + +/// A value for the [position](https://www.w3.org/TR/css-position-3/#position-property) property. +pub const Position = union(enum) { + /// The box is laid in the document flow. + static, + /// The box is laid out in the document flow and offset from the resulting position. + relative, + /// The box is taken out of document flow and positioned in reference to its relative ancestor. + absolute, + /// Similar to relative but adjusted according to the ancestor scrollable element. + sticky: css.VendorPrefix, + /// The box is taken out of the document flow and positioned in reference to the page viewport. + fixed, + + pub fn parse(input: *css.Parser) css.Result(Position) { + const location = input.currentSourceLocation(); + const ident = switch (input.expectIdent()) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + + const PositionKeyword = enum { + static, + relative, + absolute, + fixed, + sticky, + @"-webkit-sticky", + }; + + const keyword_map = bun.ComptimeStringMap(PositionKeyword, .{ + .{ "static", .static }, + .{ "relative", .relative }, + .{ "absolute", .absolute }, + .{ "fixed", .fixed }, + .{ "sticky", .sticky }, + .{ "-webkit-sticky", .@"-webkit-sticky" }, + }); + + const keyword = keyword_map.get(ident) orelse { + return .{ .err = location.newUnexpectedTokenError(.{ .ident = ident }) }; + }; + + return .{ .result = switch (keyword) { + .static => .static, + .relative => .relative, + .absolute => .absolute, + .fixed => .fixed, + .sticky => .{ .sticky = css.VendorPrefix{ .none = true } }, + .@"-webkit-sticky" => .{ .sticky = css.VendorPrefix{ .webkit = true } }, + } }; + } + + pub fn toCss(this: *const Position, comptime W: type, dest: *css.Printer(W)) css.PrintErr!void { + return switch (this.*) { + .static => dest.writeStr("static"), + .relative => dest.writeStr("relative"), + .absolute => dest.writeStr("absolute"), + .fixed => dest.writeStr("fixed"), + .sticky => |prefix| { + try prefix.toCss(W, dest); + return dest.writeStr("sticky"); + }, + }; + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } +}; diff --git a/src/css/properties/prefix_handler.zig b/src/css/properties/prefix_handler.zig new file mode 100644 index 0000000000..a5ba88ff16 --- /dev/null +++ b/src/css/properties/prefix_handler.zig @@ -0,0 +1,142 @@ +const std = @import("std"); +const bun = @import("root").bun; +const Allocator = std.mem.Allocator; + +pub const css = @import("../css_parser.zig"); + +const CustomPropertyName = css.css_properties.CustomPropertyName; + +const Printer = css.Printer; +const PrintErr = css.PrintErr; +const VendorPrefix = css.VendorPrefix; +const Error = css.Error; + +const PropertyId = css.PropertyId; +const PropertyIdTag = css.PropertyIdTag; +const Property = css.Property; +const UnparsedProperty = css.css_properties.custom.UnparsedProperty; + +/// *NOTE* The struct field names must match their corresponding names in `Property`! +pub const FallbackHandler = struct { + color: ?usize = null, + @"text-shadow": ?usize = null, + // TODO: add these back plz + // filter: ?usize = null, + // @"backdrop-filter": ?usize = null, + // fill: ?usize = null, + // stroke: ?usize = null, + // @"caret-color": ?usize = null, + // caret: ?usize = null, + + const field_count = @typeInfo(FallbackHandler).Struct.fields.len; + + pub fn handleProperty( + this: *FallbackHandler, + property: *const Property, + dest: *css.DeclarationList, + context: *css.PropertyHandlerContext, + ) bool { + inline for (std.meta.fields(FallbackHandler)) |field| { + if (@intFromEnum(@field(PropertyIdTag, field.name)) == @intFromEnum(@as(PropertyIdTag, property.*))) { + const has_vendor_prefix = comptime PropertyIdTag.hasVendorPrefix(@field(PropertyIdTag, field.name)); + var val = if (comptime has_vendor_prefix) + @field(property, field.name)[0].deepClone(context.allocator) + else + @field(property, field.name).deepClone(context.allocator); + + if (@field(this, field.name) == null) { + const fallbacks = val.getFallbacks(context.allocator, context.targets); + const has_fallbacks = !fallbacks.isEmpty(); + + for (fallbacks.slice()) |fallback| { + dest.append( + context.allocator, + @unionInit( + Property, + field.name, + if (comptime has_vendor_prefix) + .{ fallback, @field(property, field.name)[1] } + else + fallback, + ), + ) catch bun.outOfMemory(); + } + if (comptime has_vendor_prefix) { + if (has_fallbacks and @field(property, field.name[1]).contains(VendorPrefix{ .none = true })) { + @field(property, field.name[1]) = css.VendorPrefix{ .none = true }; + } + } + } + + if (@field(this, field.name) == null or + context.targets.browsers != null and !val.isCompatible(context.targets.browsers.?)) + { + @field(this, field.name) = dest.items.len; + dest.append( + context.allocator, + @unionInit( + Property, + field.name, + if (comptime has_vendor_prefix) + .{ val, @field(property, field.name)[1] } + else + val, + ), + ) catch bun.outOfMemory(); + } else if (@field(this, field.name) != null) { + const index = @field(this, field.name).?; + dest.items[index] = @unionInit( + Property, + field.name, + if (comptime has_vendor_prefix) + .{ val, @field(property, field.name)[1] } + else + val, + ); + } else { + val.deinit(context.allocator); + } + + return true; + } + } + + if (@as(PropertyIdTag, property.*) == .unparsed) { + const val: *const UnparsedProperty = &property.unparsed; + var unparsed, const index = unparsed_and_index: { + inline for (std.meta.fields(FallbackHandler)) |field| { + if (@intFromEnum(@field(PropertyIdTag, field.name)) == @intFromEnum(val.property_id)) { + const has_vendor_prefix = comptime PropertyIdTag.hasVendorPrefix(@field(PropertyIdTag, field.name)); + const newval = newval: { + if (comptime has_vendor_prefix) { + if (@field(val.property_id, field.name)[1].contains(VendorPrefix{ .none = true })) + break :newval val.getPrefixed(context.targets, @field(css.prefixes.Feature, field.name)); + } + break :newval val.deepClone(context.allocator); + }; + break :unparsed_and_index .{ newval, &@field(this, field.name) }; + } + } + return false; + }; + + context.addUnparsedFallbacks(&unparsed); + if (index.*) |i| { + dest.items[i] = Property{ .unparsed = unparsed }; + } else { + index.* = dest.items.len; + dest.append(context.allocator, Property{ .unparsed = unparsed }) catch bun.outOfMemory(); + } + + return true; + } + + return false; + } + + pub fn finalize(this: *FallbackHandler, _: *css.DeclarationList, _: *css.PropertyHandlerContext) void { + inline for (std.meta.fields(FallbackHandler)) |field| { + @field(this, field.name) = null; + } + } +}; diff --git a/src/css/properties/properties.zig b/src/css/properties/properties.zig new file mode 100644 index 0000000000..4ec12ffa41 --- /dev/null +++ b/src/css/properties/properties.zig @@ -0,0 +1,1880 @@ +const std = @import("std"); +const bun = @import("root").bun; +const Allocator = std.mem.Allocator; +const css = @import("../css_parser.zig"); +const Printer = css.Printer; +const PrintErr = css.PrintErr; +const Position = position.Position; +const Error = css.Error; +const ArrayList = std.ArrayListUnmanaged; +const SmallList = css.SmallList; + +pub const CustomPropertyName = @import("./custom.zig").CustomPropertyName; + +pub const @"align" = @import("./align.zig"); +pub const animation = @import("./animation.zig"); +pub const background = @import("./background.zig"); +pub const border = @import("./border.zig"); +pub const border_image = @import("./border_image.zig"); +pub const border_radius = @import("./border_radius.zig"); +pub const box_shadow = @import("./box_shadow.zig"); +pub const contain = @import("./contain.zig"); +pub const css_modules = @import("./css_modules.zig"); +pub const custom = @import("./custom.zig"); +pub const display = @import("./display.zig"); +pub const effects = @import("./effects.zig"); +pub const flex = @import("./flex.zig"); +pub const font = @import("./font.zig"); +pub const list = @import("./list.zig"); +pub const margin_padding = @import("./margin_padding.zig"); +pub const masking = @import("./masking.zig"); +pub const outline = @import("./outline.zig"); +pub const overflow = @import("./overflow.zig"); +pub const position = @import("./position.zig"); +pub const prefix_handler = @import("./prefix_handler.zig"); +pub const shape = @import("./shape.zig"); +pub const size = @import("./size.zig"); +pub const svg = @import("./svg.zig"); +pub const text = @import("./text.zig"); +pub const transform = @import("./transform.zig"); +pub const transition = @import("./transition.zig"); +pub const ui = @import("./ui.zig"); + +const generated = @import("./properties_generated.zig"); +pub const PropertyId = generated.PropertyId; +pub const Property = generated.Property; +pub const PropertyIdTag = generated.PropertyIdTag; + +/// A [CSS-wide keyword](https://drafts.csswg.org/css-cascade-5/#defaulting-keywords). +pub const CSSWideKeyword = enum { + /// The property's initial value. + initial, + /// The property's computed value on the parent element. + inherit, + /// Either inherit or initial depending on whether the property is inherited. + unset, + /// Rolls back the cascade to the cascaded value of the earlier origin. + revert, + /// Rolls back the cascade to the value of the previous cascade layer. + @"revert-layer", + + pub usingnamespace css.DefineEnumProperty(@This()); +}; + +// pub fn DefineProperties(comptime properties: anytype) type { +// const input_fields: []const std.builtin.Type.StructField = std.meta.fields(@TypeOf(properties)); +// const total_fields_len = input_fields.len + 2; // +2 for the custom property and the `all` property +// const TagSize = u16; +// const PropertyIdT, const max_enum_name_length: usize = brk: { +// var max: usize = 0; +// var property_id_type = std.builtin.Type.Enum{ +// .tag_type = TagSize, +// .is_exhaustive = true, +// .decls = &.{}, +// .fields = undefined, +// }; +// var enum_fields: [total_fields_len]std.builtin.Type.EnumField = undefined; +// for (input_fields, 0..) |field, i| { +// enum_fields[i] = .{ +// .name = field.name, +// .value = i, +// }; +// max = @max(max, field.name.len); +// } +// enum_fields[input_fields.len] = std.builtin.Type.EnumField{ +// .name = "all", +// .value = input_fields.len, +// // }; +// // enum_fields[input_fields.len + 1] = std.builtin.Type.EnumField{ +// .name = "custom", +// .value = input_fields.len + 1, +// }; +// property_id_type.fields = &enum_fields; +// break :brk .{ property_id_type, max }; +// }; + +// const types: []const type = types: { +// var types: [total_fields_len]type = undefined; +// inline for (input_fields, 0..) |field, i| { +// types[i] = @field(properties, field.name).ty; + +// if (std.mem.eql(u8, field.name, "transition-property")) { +// types[i] = struct { SmallList(PropertyIdT, 1), css.VendorPrefix }; +// } + +// // Validate it + +// const value = @field(properties, field.name); +// const ValueT = @TypeOf(value); +// const value_ty = value.ty; +// const ValueTy = @TypeOf(value_ty); +// const value_ty_info = @typeInfo(ValueTy); +// // If `valid_prefixes` is defined, the `ty` should be a two item tuple where +// // the second item is of type `VendorPrefix` +// if (@hasField(ValueT, "valid_prefixes")) { +// if (!value_ty_info.Struct.is_tuple) { +// @compileError("Expected a tuple type for `ty` when `valid_prefixes` is defined"); +// } +// if (value_ty_info.Struct.fields[1].type != css.VendorPrefix) { +// @compileError("Expected the second item in the tuple to be of type `VendorPrefix`"); +// } +// } +// } +// types[input_fields.len] = void; +// types[input_fields.len + 1] = CustomPropertyName; +// break :types &types; +// }; +// const PropertyT = PropertyT: { +// var union_fields: [total_fields_len]std.builtin.Type.UnionField = undefined; +// inline for (input_fields, 0..) |input_field, i| { +// const Ty = types[i]; +// union_fields[i] = std.builtin.Type.UnionField{ +// .alignment = @alignOf(Ty), +// .type = type, +// .name = input_field.name, +// }; +// } +// union_fields[input_fields.len] = std.builtin.Type.UnionField{ +// .alignment = 0, +// .type = void, +// .name = "all", +// }; +// union_fields[input_fields.len + 1] = std.builtin.Type.UnionField{ +// .alignment = @alignOf(CustomPropertyName), +// .type = CustomPropertyName, +// .name = "custom", +// }; +// break :PropertyT std.builtin.Type.Union{ +// .layout = .auto, +// .tag_type = PropertyIdT, +// .decls = &.{}, +// .fields = union_fields, +// }; +// }; +// _ = PropertyT; // autofix +// return struct { +// pub const PropertyId = PropertyIdT; + +// pub fn propertyIdEq(lhs: PropertyId, rhs: PropertyId) bool { +// _ = lhs; // autofix +// _ = rhs; // autofix +// @compileError(css.todo_stuff.depth); +// } + +// pub fn propertyIdIsShorthand(id: PropertyId) bool { +// inline for (std.meta.fields(PropertyId)) |field| { +// if (field.value == @intFromEnum(id)) { +// const is_shorthand = if (@hasField(@TypeOf(@field(properties, field.name)), "shorthand")) +// @field(@field(properties, field.name), "shorthand") +// else +// false; +// return is_shorthand; +// } +// } +// return false; +// } + +// /// PropertyId.prefix() +// pub fn propertyIdPrefix(id: PropertyId) css.VendorPrefix { +// _ = id; // autofix +// @compileError(css.todo_stuff.depth); +// } + +// /// PropertyId.name() +// pub fn propertyIdName(id: PropertyId) []const u8 { +// _ = id; // autofix +// @compileError(css.todo_stuff.depth); +// } + +// pub fn propertyIdFromStr(name: []const u8) PropertyId { +// const prefix, const name_ref = if (bun.strings.startsWithCaseInsensitiveAscii(name, "-webkit-")) +// .{ css.VendorPrefix.webkit, name[8..] } +// else if (bun.strings.startsWithCaseInsensitiveAscii(name, "-moz-")) +// .{ css.VendorPrefix.moz, name[5..] } +// else if (bun.strings.startsWithCaseInsensitiveAscii(name, "-o-")) +// .{ css.VendorPrefix.moz, name[3..] } +// else if (bun.strings.startsWithCaseInsensitiveAscii(name, "-ms-")) +// .{ css.VendorPrefix.moz, name[4..] } +// else +// .{ css.VendorPrefix.none, name }; + +// return parsePropertyIdFromNameAndPrefix(name_ref, prefix) catch .{ +// .custom = CustomPropertyName.fromStr(name), +// }; +// } + +// pub fn parsePropertyIdFromNameAndPrefix(name: []const u8, prefix: css.VendorPrefix) Error!PropertyId { +// var buffer: [max_enum_name_length]u8 = undefined; +// if (name.len > buffer.len) { +// // TODO: actual source just returns empty Err(()) +// return Error.InvalidPropertyName; +// } +// const lower = bun.strings.copyLowercase(name, buffer[0..name.len]); +// inline for (std.meta.fields(PropertyIdT)) |field_| { +// const field: std.builtin.Type.EnumField = field_; +// // skip custom +// if (bun.strings.eql(field.name, "custom")) continue; + +// if (bun.strings.eql(lower, field.name)) { +// const prop = @field(properties, field.name); +// const allowed_prefixes = allowed_prefixes: { +// var prefixes: css.VendorPrefix = if (@hasField(@TypeOf(prop), "unprefixed") and !prop.unprefixed) +// css.VendorPrefix.empty() +// else +// css.VendorPrefix{ .none = true }; + +// if (@hasField(@TypeOf(prop), "valid_prefixes")) { +// prefixes = css.VendorPrefix.bitwiseOr(prefixes, prop.valid_prefixes); +// } + +// break :allowed_prefixes prefixes; +// }; + +// if (allowed_prefixes.contains(prefix)) return @enumFromInt(field.value); +// } +// } +// return Error.InvalidPropertyName; +// } +// }; +// } + +// /// SmallList(PropertyId) +// const SmallListPropertyIdPlaceholder = struct {}; + +// pub const Property = DefineProperties(.{ +// .@"background-color" = .{ +// .ty = CssColor, +// }, +// .@"background-image" = .{ +// // PERF: make this equivalent to SmallVec<[_; 1]> +// .ty = SmallList(Image, 1), +// }, +// .@"background-position-x" = .{ +// // PERF: make this equivalent to SmallVec<[_; 1]> +// .ty = SmallList(css_values.position.HorizontalPosition, 1), +// }, +// .@"background-position-y" = .{ +// // PERF: make this equivalent to SmallVec<[_; 1]> +// .ty = SmallList(css_values.position.HorizontalPosition, 1), +// }, +// .@"background-position" = .{ +// // PERF: make this equivalent to SmallVec<[_; 1]> +// .ty = SmallList(background.BackgroundPosition, 1), +// .shorthand = true, +// }, +// .@"background-size" = .{ +// // PERF: make this equivalent to SmallVec<[_; 1]> +// .ty = SmallList(background.BackgroundSize, 1), +// }, +// .@"background-repeat" = .{ +// // PERF: make this equivalent to SmallVec<[_; 1]> +// .ty = SmallList(background.BackgroundSize, 1), +// }, +// .@"background-attachment" = .{ +// // PERF: make this equivalent to SmallVec<[_; 1]> +// .ty = SmallList(background.BackgroundAttachment, 1), +// }, +// .@"background-clip" = .{ +// // PERF: make this equivalent to SmallVec<[_; 1]> +// .ty = struct { +// SmallList(background.BackgroundAttachment, 1), +// css.VendorPrefix, +// }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// .moz = true, +// }, +// }, +// .@"background-origin" = .{ +// // PERF: make this equivalent to SmallVec<[_; 1]> +// .ty = SmallList(background.BackgroundOrigin, 1), +// }, +// .background = .{ +// // PERF: make this equivalent to SmallVec<[_; 1]> +// .ty = SmallList(background.Background, 1), +// }, + +// .@"box-shadow" = .{ +// // PERF: make this equivalent to SmallVec<[_; 1]> +// .ty = struct { SmallList(box_shadow.BoxShadow, 1), css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// .moz = true, +// }, +// }, +// .opacity = .{ +// .ty = css.css_values.alpha.AlphaValue, +// }, +// .color = .{ +// .ty = CssColor, +// }, +// .display = .{ +// .ty = display.Display, +// }, +// .visibility = .{ +// .ty = display.Visibility, +// }, + +// .width = .{ +// .ty = size.Size, +// .logical_group = .{ .ty = LogicalGroup.size, .category = PropertyCategory.physical }, +// }, +// .height = .{ +// .ty = size.Size, +// .logical_group = .{ .ty = LogicalGroup.size, .category = PropertyCategory.physical }, +// }, +// .@"min-width" = .{ +// .ty = size.Size, +// .logical_group = .{ .ty = LogicalGroup.min_size, .category = PropertyCategory.physical }, +// }, +// .@"min-height" = .{ +// .ty = size.Size, +// .logical_group = .{ .ty = LogicalGroup.min_size, .category = PropertyCategory.physical }, +// }, +// .@"max-width" = .{ +// .ty = size.MaxSize, +// .logical_group = .{ .ty = LogicalGroup.max_size, .category = PropertyCategory.physical }, +// }, +// .@"max-height" = .{ +// .ty = size.MaxSize, +// .logical_group = .{ .ty = LogicalGroup.max_size, .category = PropertyCategory.physical }, +// }, +// .@"block-size" = .{ +// .ty = size.Size, +// .logical_group = .{ .ty = LogicalGroup.size, .category = PropertyCategory.logical }, +// }, +// .@"inline-size" = .{ +// .ty = size.Size, +// .logical_group = .{ .ty = LogicalGroup.size, .category = PropertyCategory.logical }, +// }, +// .min_block_size = .{ +// .ty = size.Size, +// .logical_group = .{ .ty = LogicalGroup.min_size, .category = PropertyCategory.logical }, +// }, +// .@"min-inline-size" = .{ +// .ty = size.Size, +// .logical_group = .{ .ty = LogicalGroup.min_size, .category = PropertyCategory.logical }, +// }, +// .@"max-block-size" = .{ +// .ty = size.MaxSize, +// .logical_group = .{ .ty = LogicalGroup.max_size, .category = PropertyCategory.logical }, +// }, +// .@"max-inline-size" = .{ +// .ty = size.MaxSize, +// .logical_group = .{ .ty = LogicalGroup.max_size, .category = PropertyCategory.logical }, +// }, +// .@"box-sizing" = .{ +// .ty = struct { size.BoxSizing, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// .moz = true, +// }, +// }, +// .@"aspect-ratio" = .{ +// .ty = size.AspectRatio, +// }, + +// .overflow = .{ +// .ty = overflow.Overflow, +// .shorthand = true, +// }, +// .@"overflow-x" = .{ +// .ty = overflow.OverflowKeyword, +// }, +// .@"overflow-y" = .{ +// .ty = overflow.OverflowKeyword, +// }, +// .@"text-overflow" = .{ +// .ty = struct { overflow.TextOverflow, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .o = true, +// }, +// }, + +// // https://www.w3.org/TR/2020/WD-css-position-3-20200519 +// .position = .{ +// .ty = position.Position, +// }, +// .top = .{ +// .ty = LengthPercentageOrAuto, +// .logical_group = .{ .ty = LogicalGroup.inset, .category = PropertyCategory.physical }, +// }, +// .bottom = .{ +// .ty = LengthPercentageOrAuto, +// .logical_group = .{ .ty = LogicalGroup.inset, .category = PropertyCategory.physical }, +// }, +// .left = .{ +// .ty = LengthPercentageOrAuto, +// .logical_group = .{ .ty = LogicalGroup.inset, .category = PropertyCategory.physical }, +// }, +// .right = .{ +// .ty = LengthPercentageOrAuto, +// .logical_group = .{ .ty = LogicalGroup.inset, .category = PropertyCategory.physical }, +// }, +// .@"inset-block-start" = .{ +// .ty = LengthPercentageOrAuto, +// .logical_group = .{ .ty = LogicalGroup.inset, .category = PropertyCategory.logical }, +// }, +// .@"inset-block-end" = .{ +// .ty = LengthPercentageOrAuto, +// .logical_group = .{ .ty = LogicalGroup.inset, .category = PropertyCategory.logical }, +// }, +// .@"inset-inline-start" = .{ +// .ty = LengthPercentageOrAuto, +// .logical_group = .{ .ty = LogicalGroup.inset, .category = PropertyCategory.logical }, +// }, +// .@"inset-inline-end" = .{ +// .ty = LengthPercentageOrAuto, +// .logical_group = .{ .ty = LogicalGroup.inset, .category = PropertyCategory.logical }, +// }, +// .@"inset-block" = .{ +// .ty = margin_padding.InsetBlock, +// .shorthand = true, +// }, +// .@"inset-inline" = .{ +// .ty = margin_padding.InsetInline, +// .shorthand = true, +// }, +// .inset = .{ +// .ty = margin_padding.Inset, +// .shorthand = true, +// }, + +// .@"border-spacing" = .{ +// .ty = css.css_values.size.Size(Length), +// }, + +// .@"border-top-color" = .{ +// .ty = CssColor, +// .logical_group = .{ .ty = LogicalGroup.border_color, .category = PropertyCategory.physical }, +// }, +// .@"border-bottom-color" = .{ +// .ty = CssColor, +// .logical_group = .{ .ty = LogicalGroup.border_color, .category = PropertyCategory.physical }, +// }, +// .@"border-left-color" = .{ +// .ty = CssColor, +// .logical_group = .{ .ty = LogicalGroup.border_color, .category = PropertyCategory.physical }, +// }, +// .@"border-right-color" = .{ +// .ty = CssColor, +// .logical_group = .{ .ty = LogicalGroup.border_color, .category = PropertyCategory.physical }, +// }, +// .@"border-block-start-color" = .{ +// .ty = CssColor, +// .logical_group = .{ .ty = LogicalGroup.border_color, .category = PropertyCategory.logical }, +// }, +// .@"border-block-end-color" = .{ +// .ty = CssColor, +// .logical_group = .{ .ty = LogicalGroup.border_color, .category = PropertyCategory.logical }, +// }, +// .@"border-inline-start-color" = .{ +// .ty = CssColor, +// .logical_group = .{ .ty = LogicalGroup.border_color, .category = PropertyCategory.logical }, +// }, +// .@"border-inline-end-color" = .{ +// .ty = CssColor, +// .logical_group = .{ .ty = LogicalGroup.border_color, .category = PropertyCategory.logical }, +// }, + +// .@"border-top-style" = .{ +// .ty = border.LineStyle, +// .logical_group = .{ .ty = LogicalGroup.border_style, .category = PropertyCategory.physical }, +// }, +// .@"border-bottom-style" = .{ +// .ty = border.LineStyle, +// .logical_group = .{ .ty = LogicalGroup.border_style, .category = PropertyCategory.physical }, +// }, +// .@"border-left-style" = .{ +// .ty = border.LineStyle, +// .logical_group = .{ .ty = LogicalGroup.border_style, .category = PropertyCategory.physical }, +// }, +// .@"border-right-style" = .{ +// .ty = border.LineStyle, +// .logical_group = .{ .ty = LogicalGroup.border_style, .category = PropertyCategory.physical }, +// }, +// .@"border-block-start-style" = .{ +// .ty = border.LineStyle, +// .logical_group = .{ .ty = LogicalGroup.border_style, .category = PropertyCategory.logical }, +// }, +// .@"border-block-end-style" = .{ +// .ty = border.LineStyle, +// .logical_group = .{ .ty = LogicalGroup.border_style, .category = PropertyCategory.logical }, +// }, +// .@"border-inline-start-style" = .{ +// .ty = border.LineStyle, +// .logical_group = .{ .ty = LogicalGroup.border_style, .category = PropertyCategory.logical }, +// }, +// .@"border-inline-end-style" = .{ +// .ty = border.LineStyle, +// .logical_group = .{ .ty = LogicalGroup.border_style, .category = PropertyCategory.logical }, +// }, + +// .@"border-top-width" = .{ +// .ty = BorderSideWidth, +// .logical_group = .{ .ty = LogicalGroup.border_width, .category = PropertyCategory.physical }, +// }, +// .@"border-bottom-width" = .{ +// .ty = BorderSideWidth, +// .logical_group = .{ .ty = LogicalGroup.border_width, .category = PropertyCategory.physical }, +// }, +// .@"border-left-width" = .{ +// .ty = BorderSideWidth, +// .logical_group = .{ .ty = LogicalGroup.border_width, .category = PropertyCategory.physical }, +// }, +// .@"border-right-width" = .{ +// .ty = BorderSideWidth, +// .logical_group = .{ .ty = LogicalGroup.border_width, .category = PropertyCategory.physical }, +// }, +// .@"border-block-start-width" = .{ +// .ty = BorderSideWidth, +// .logical_group = .{ .ty = LogicalGroup.border_width, .category = PropertyCategory.logical }, +// }, +// .@"border-block-end-width" = .{ +// .ty = BorderSideWidth, +// .logical_group = .{ .ty = LogicalGroup.border_width, .category = PropertyCategory.logical }, +// }, +// .@"border-inline-start-width" = .{ +// .ty = BorderSideWidth, +// .logical_group = .{ .ty = LogicalGroup.border_width, .category = PropertyCategory.logical }, +// }, +// .@"border-inline-end-width" = .{ +// .ty = BorderSideWidth, +// .logical_group = .{ .ty = LogicalGroup.border_width, .category = PropertyCategory.logical }, +// }, + +// .@"border-top-left-radius" = .{ +// .ty = struct { Size2D(LengthPercentage), css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// .moz = true, +// }, +// .logical_group = .{ .ty = LogicalGroup.border_radius, .category = PropertyCategory.physical }, +// }, +// .@"border-top-right-radius" = .{ +// .ty = struct { Size2D(LengthPercentage), css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// .moz = true, +// }, +// .logical_group = .{ .ty = LogicalGroup.border_radius, .category = PropertyCategory.physical }, +// }, +// .@"border-bottom-left-radius" = .{ +// .ty = struct { Size2D(LengthPercentage), css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// .moz = true, +// }, +// .logical_group = .{ .ty = LogicalGroup.border_radius, .category = PropertyCategory.physical }, +// }, +// .@"border-bottom-right-radius" = .{ +// .ty = struct { Size2D(LengthPercentage), css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// .moz = true, +// }, +// .logical_group = .{ .ty = LogicalGroup.border_radius, .category = PropertyCategory.physical }, +// }, +// .@"border-start-start-radius" = .{ +// .ty = Size2D(LengthPercentage), +// .logical_group = .{ .ty = LogicalGroup.border_radius, .category = PropertyCategory.logical }, +// }, +// .@"border-start-end-radius" = .{ +// .ty = Size2D(LengthPercentage), +// .logical_group = .{ .ty = LogicalGroup.border_radius, .category = PropertyCategory.logical }, +// }, +// .@"border-end-start-radius" = .{ +// .ty = Size2D(LengthPercentage), +// .logical_group = .{ .ty = LogicalGroup.border_radius, .category = PropertyCategory.logical }, +// }, +// .@"border-end-end-radius" = .{ +// .ty = Size2D(LengthPercentage), +// .logical_group = .{ .ty = LogicalGroup.border_radius, .category = PropertyCategory.logical }, +// }, +// .@"border-radius" = .{ +// .ty = struct { BorderRadius, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// .moz = true, +// }, +// .shorthand = true, +// }, + +// .@"border-image-source" = .{ +// .ty = Image, +// }, +// .@"border-image-outset" = .{ +// .ty = Rect(LengthOrNumber), +// }, +// .@"border-image-repeat" = .{ +// .ty = BorderImageRepeat, +// }, +// .@"border-image-width" = .{ +// .ty = Rect(BorderImageSideWidth), +// }, +// .@"border-image-slice" = .{ +// .ty = BorderImageSlice, +// }, +// .@"border-image" = .{ +// .ty = struct { BorderImage, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// .moz = true, +// .o = true, +// }, +// .shorthand = true, +// }, + +// .@"border-color" = .{ +// .ty = BorderColor, +// .shorthand = true, +// }, +// .@"border-style" = .{ +// .ty = BorderStyle, +// .shorthand = true, +// }, +// .@"border-width" = .{ +// .ty = BorderWidth, +// .shorthand = true, +// }, + +// .@"border-block-color" = .{ +// .ty = BorderBlockColor, +// .shorthand = true, +// }, +// .@"border-block-style" = .{ +// .ty = BorderBlockStyle, +// .shorthand = true, +// }, +// .@"border-block-width" = .{ +// .ty = BorderBlockWidth, +// .shorthand = true, +// }, + +// .@"border-inline-color" = .{ +// .ty = BorderInlineColor, +// .shorthand = true, +// }, +// .@"border-inline-style" = .{ +// .ty = BorderInlineStyle, +// .shorthand = true, +// }, +// .@"border-inline-width" = .{ +// .ty = BorderInlineWidth, +// .shorthand = true, +// }, + +// .border = .{ +// .ty = Border, +// .shorthand = true, +// }, +// .@"border-top" = .{ +// .ty = BorderTop, +// .shorthand = true, +// }, +// .@"border-bottom" = .{ +// .ty = BorderBottom, +// .shorthand = true, +// }, +// .@"border-left" = .{ +// .ty = BorderLeft, +// .shorthand = true, +// }, +// .@"border-right" = .{ +// .ty = BorderRight, +// .shorthand = true, +// }, +// .@"border-block" = .{ +// .ty = BorderBlock, +// .shorthand = true, +// }, +// .@"border-block-start" = .{ +// .ty = BorderBlockStart, +// .shorthand = true, +// }, +// .@"border-block-end" = .{ +// .ty = BorderBlockEnd, +// .shorthand = true, +// }, +// .@"border-inline" = .{ +// .ty = BorderInline, +// .shorthand = true, +// }, +// .@"border-inline-start" = .{ +// .ty = BorderInlineStart, +// .shorthand = true, +// }, +// .@"border-inline-end" = .{ +// .ty = BorderInlineEnd, +// .shorthand = true, +// }, + +// .outline = .{ +// .ty = Outline, +// .shorthand = true, +// }, +// .@"outline-color" = .{ +// .ty = CssColor, +// }, +// .@"outline-style" = .{ +// .ty = OutlineStyle, +// }, +// .@"outline-width" = .{ +// .ty = BorderSideWidth, +// }, + +// // Flex properties: https://www.w3.org/TR/2018/CR-css-flexbox-1-20181119 +// .@"flex-direction" = .{ +// .ty = struct { FlexDirection, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// .ms = true, +// }, +// }, +// .@"flex-wrap" = .{ +// .ty = struct { FlexWrap, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// .ms = true, +// }, +// }, +// .@"flex-flow" = .{ +// .ty = struct { FlexFlow, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// .ms = true, +// }, +// .shorthand = true, +// }, +// .@"flex-grow" = .{ +// .ty = struct { CSSNumber, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// }, +// }, +// .@"flex-shrink" = .{ +// .ty = struct { CSSNumber, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// }, +// }, +// .@"flex-basis" = .{ +// .ty = struct { LengthPercentageOrAuto, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// }, +// }, +// .flex = .{ +// .ty = struct { Flex, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// .ms = true, +// }, +// .shorthand = true, +// }, +// .order = .{ +// .ty = struct { CSSInteger, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// }, +// }, + +// // Align properties: https://www.w3.org/TR/2020/WD-css-align-3-20200421 +// .@"align-content" = .{ +// .ty = struct { AlignContent, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// }, +// }, +// .@"justify-content" = .{ +// .ty = struct { JustifyContent, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// }, +// }, +// .@"place-content" = .{ +// .ty = PlaceContent, +// .shorthand = true, +// }, +// .@"align-self" = .{ +// .ty = struct { AlignSelf, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// }, +// }, +// .@"justify-self" = .{ +// .ty = JustifySelf, +// }, +// .@"place-self" = .{ +// .ty = PlaceSelf, +// .shorthand = true, +// }, +// .@"align-items" = .{ +// .ty = struct { AlignItems, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// }, +// }, +// .@"justify-items" = .{ +// .ty = JustifyItems, +// }, +// .@"place-items" = .{ +// .ty = PlaceItems, +// .shorthand = true, +// }, +// .@"row-gap" = .{ +// .ty = GapValue, +// }, +// .@"column-gap" = .{ +// .ty = GapValue, +// }, +// .gap = .{ +// .ty = Gap, +// .shorthand = true, +// }, + +// // Old flex (2009): https://www.w3.org/TR/2009/WD-css3-flexbox-20090723/ +// .@"box-orient" = .{ +// .ty = struct { BoxOrient, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// .moz = true, +// }, +// .unprefixed = false, +// }, +// .@"box-direction" = .{ +// .ty = struct { BoxDirection, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// .moz = true, +// }, +// .unprefixed = false, +// }, +// .@"box-ordinal-group" = .{ +// .ty = struct { CSSInteger, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// .moz = true, +// }, +// .unprefixed = false, +// }, +// .@"box-align" = .{ +// .ty = struct { BoxAlign, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// .moz = true, +// }, +// .unprefixed = false, +// }, +// .@"box-flex" = .{ +// .ty = struct { CSSNumber, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// .moz = true, +// }, +// .unprefixed = false, +// }, +// .@"box-flex-group" = .{ +// .ty = struct { CSSInteger, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// }, +// .unprefixed = false, +// }, +// .@"box-pack" = .{ +// .ty = struct { BoxPack, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// .moz = true, +// }, +// .unprefixed = false, +// }, +// .@"box-lines" = .{ +// .ty = struct { BoxLines, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// .moz = true, +// }, +// .unprefixed = false, +// }, + +// // Old flex (2012): https://www.w3.org/TR/2012/WD-css3-flexbox-20120322/ +// .@"flex-pack" = .{ +// .ty = struct { FlexPack, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .ms = true, +// }, +// .unprefixed = false, +// }, +// .@"flex-order" = .{ +// .ty = struct { CSSInteger, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .ms = true, +// }, +// .unprefixed = false, +// }, +// .@"flex-align" = .{ +// .ty = struct { BoxAlign, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .ms = true, +// }, +// .unprefixed = false, +// }, +// .@"flex-item-align" = .{ +// .ty = struct { FlexItemAlign, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .ms = true, +// }, +// .unprefixed = false, +// }, +// .@"flex-line-pack" = .{ +// .ty = struct { FlexLinePack, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .ms = true, +// }, +// .unprefixed = false, +// }, + +// // Microsoft extensions +// .@"flex-positive" = .{ +// .ty = struct { CSSNumber, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .ms = true, +// }, +// .unprefixed = false, +// }, +// .@"flex-negative" = .{ +// .ty = struct { CSSNumber, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .ms = true, +// }, +// .unprefixed = false, +// }, +// .@"flex-preferred-size" = .{ +// .ty = struct { LengthPercentageOrAuto, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .ms = true, +// }, +// .unprefixed = false, +// }, + +// // TODO: the following is enabled with #[cfg(feature = "grid")] +// // .@"grid-template-columns" = .{ +// // .ty = TrackSizing, +// // }, +// // .@"grid-template-rows" = .{ +// // .ty = TrackSizing, +// // }, +// // .@"grid-auto-columns" = .{ +// // .ty = TrackSizeList, +// // }, +// // .@"grid-auto-rows" = .{ +// // .ty = TrackSizeList, +// // }, +// // .@"grid-auto-flow" = .{ +// // .ty = GridAutoFlow, +// // }, +// // .@"grid-template-areas" = .{ +// // .ty = GridTemplateAreas, +// // }, +// // .@"grid-template" = .{ +// // .ty = GridTemplate, +// // .shorthand = true, +// // }, +// // .grid = .{ +// // .ty = Grid, +// // .shorthand = true, +// // }, +// // .@"grid-row-start" = .{ +// // .ty = GridLine, +// // }, +// // .@"grid-row-end" = .{ +// // .ty = GridLine, +// // }, +// // .@"grid-column-start" = .{ +// // .ty = GridLine, +// // }, +// // .@"grid-column-end" = .{ +// // .ty = GridLine, +// // }, +// // .@"grid-row" = .{ +// // .ty = GridRow, +// // .shorthand = true, +// // }, +// // .@"grid-column" = .{ +// // .ty = GridColumn, +// // .shorthand = true, +// // }, +// // .@"grid-area" = .{ +// // .ty = GridArea, +// // .shorthand = true, +// // }, + +// .@"margin-top" = .{ +// .ty = LengthPercentageOrAuto, +// .logical_group = .{ .ty = LogicalGroup.margin, .category = PropertyCategory.physical }, +// }, +// .@"margin-bottom" = .{ +// .ty = LengthPercentageOrAuto, +// .logical_group = .{ .ty = LogicalGroup.margin, .category = PropertyCategory.physical }, +// }, +// .@"margin-left" = .{ +// .ty = LengthPercentageOrAuto, +// .logical_group = .{ .ty = LogicalGroup.margin, .category = PropertyCategory.physical }, +// }, +// .@"margin-right" = .{ +// .ty = LengthPercentageOrAuto, +// .logical_group = .{ .ty = LogicalGroup.margin, .category = PropertyCategory.physical }, +// }, +// .@"margin-block-start" = .{ +// .ty = LengthPercentageOrAuto, +// .logical_group = .{ .ty = LogicalGroup.margin, .category = PropertyCategory.logical }, +// }, +// .@"margin-block-end" = .{ +// .ty = LengthPercentageOrAuto, +// .logical_group = .{ .ty = LogicalGroup.margin, .category = PropertyCategory.logical }, +// }, +// .@"margin-inline-start" = .{ +// .ty = LengthPercentageOrAuto, +// .logical_group = .{ .ty = LogicalGroup.margin, .category = PropertyCategory.logical }, +// }, +// .@"margin-inline-end" = .{ +// .ty = LengthPercentageOrAuto, +// .logical_group = .{ .ty = LogicalGroup.margin, .category = PropertyCategory.logical }, +// }, +// .@"margin-block" = .{ +// .ty = MarginBlock, +// .shorthand = true, +// }, +// .@"margin-inline" = .{ +// .ty = MarginInline, +// .shorthand = true, +// }, +// .margin = .{ +// .ty = Margin, +// .shorthand = true, +// }, + +// .@"padding-top" = .{ +// .ty = LengthPercentageOrAuto, +// .logical_group = .{ .ty = LogicalGroup.padding, .category = PropertyCategory.physical }, +// }, +// .@"padding-bottom" = .{ +// .ty = LengthPercentageOrAuto, +// .logical_group = .{ .ty = LogicalGroup.padding, .category = PropertyCategory.physical }, +// }, +// .@"padding-left" = .{ +// .ty = LengthPercentageOrAuto, +// .logical_group = .{ .ty = LogicalGroup.padding, .category = PropertyCategory.physical }, +// }, +// .@"padding-right" = .{ +// .ty = LengthPercentageOrAuto, +// .logical_group = .{ .ty = LogicalGroup.padding, .category = PropertyCategory.physical }, +// }, +// .@"padding-block-start" = .{ +// .ty = LengthPercentageOrAuto, +// .logical_group = .{ .ty = LogicalGroup.padding, .category = PropertyCategory.logical }, +// }, +// .@"padding-block-end" = .{ +// .ty = LengthPercentageOrAuto, +// .logical_group = .{ .ty = LogicalGroup.padding, .category = PropertyCategory.logical }, +// }, +// .@"padding-inline-start" = .{ +// .ty = LengthPercentageOrAuto, +// .logical_group = .{ .ty = LogicalGroup.padding, .category = PropertyCategory.logical }, +// }, +// .@"padding-inline-end" = .{ +// .ty = LengthPercentageOrAuto, +// .logical_group = .{ .ty = LogicalGroup.padding, .category = PropertyCategory.logical }, +// }, +// .@"padding-block" = .{ +// .ty = PaddingBlock, +// .shorthand = true, +// }, +// .@"padding-inline" = .{ +// .ty = PaddingInline, +// .shorthand = true, +// }, +// .padding = .{ +// .ty = Padding, +// .shorthand = true, +// }, + +// .@"scroll-margin-top" = .{ +// .ty = LengthPercentageOrAuto, +// .logical_group = .{ .ty = LogicalGroup.scroll_margin, .category = PropertyCategory.physical }, +// }, +// .@"scroll-margin-bottom" = .{ +// .ty = LengthPercentageOrAuto, +// .logical_group = .{ .ty = LogicalGroup.scroll_margin, .category = PropertyCategory.physical }, +// }, +// .@"scroll-margin-left" = .{ +// .ty = LengthPercentageOrAuto, +// .logical_group = .{ .ty = LogicalGroup.scroll_margin, .category = PropertyCategory.physical }, +// }, +// .@"scroll-margin-right" = .{ +// .ty = LengthPercentageOrAuto, +// .logical_group = .{ .ty = LogicalGroup.scroll_margin, .category = PropertyCategory.physical }, +// }, +// .@"scroll-margin-block-start" = .{ +// .ty = LengthPercentageOrAuto, +// .logical_group = .{ .ty = LogicalGroup.scroll_margin, .category = PropertyCategory.logical }, +// }, +// .@"scroll-margin-block-end" = .{ +// .ty = LengthPercentageOrAuto, +// .logical_group = .{ .ty = LogicalGroup.scroll_margin, .category = PropertyCategory.logical }, +// }, +// .@"scroll-margin-inline-start" = .{ +// .ty = LengthPercentageOrAuto, +// .logical_group = .{ .ty = LogicalGroup.scroll_margin, .category = PropertyCategory.logical }, +// }, +// .@"scroll-margin-inline-end" = .{ +// .ty = LengthPercentageOrAuto, +// .logical_group = .{ .ty = LogicalGroup.scroll_margin, .category = PropertyCategory.logical }, +// }, +// .@"scroll-margin-block" = .{ +// .ty = ScrollMarginBlock, +// .shorthand = true, +// }, +// .@"scroll-margin-inline" = .{ +// .ty = ScrollMarginInline, +// .shorthand = true, +// }, +// .@"scroll-margin" = .{ +// .ty = ScrollMargin, +// .shorthand = true, +// }, + +// .@"scroll-padding-top" = .{ +// .ty = LengthPercentageOrAuto, +// .logical_group = .{ .ty = LogicalGroup.scroll_padding, .category = PropertyCategory.physical }, +// }, +// .@"scroll-padding-bottom" = .{ +// .ty = LengthPercentageOrAuto, +// .logical_group = .{ .ty = LogicalGroup.scroll_padding, .category = PropertyCategory.physical }, +// }, +// .@"scroll-padding-left" = .{ +// .ty = LengthPercentageOrAuto, +// .logical_group = .{ .ty = LogicalGroup.scroll_padding, .category = PropertyCategory.physical }, +// }, +// .@"scroll-padding-right" = .{ +// .ty = LengthPercentageOrAuto, +// .logical_group = .{ .ty = LogicalGroup.scroll_padding, .category = PropertyCategory.physical }, +// }, +// .@"scroll-padding-block-start" = .{ +// .ty = LengthPercentageOrAuto, +// .logical_group = .{ .ty = LogicalGroup.scroll_padding, .category = PropertyCategory.logical }, +// }, +// .@"scroll-padding-block-end" = .{ +// .ty = LengthPercentageOrAuto, +// .logical_group = .{ .ty = LogicalGroup.scroll_padding, .category = PropertyCategory.logical }, +// }, +// .@"scroll-padding-inline-start" = .{ +// .ty = LengthPercentageOrAuto, +// .logical_group = .{ .ty = LogicalGroup.scroll_padding, .category = PropertyCategory.logical }, +// }, +// .@"scroll-padding-inline-end" = .{ +// .ty = LengthPercentageOrAuto, +// .logical_group = .{ .ty = LogicalGroup.scroll_padding, .category = PropertyCategory.logical }, +// }, +// .@"scroll-padding-block" = .{ +// .ty = ScrollPaddingBlock, +// .shorthand = true, +// }, +// .@"scroll-padding-inline" = .{ +// .ty = ScrollPaddingInline, +// .shorthand = true, +// }, +// .@"scroll-padding" = .{ +// .ty = ScrollPadding, +// .shorthand = true, +// }, + +// .@"font-weight" = .{ +// .ty = FontWeight, +// }, +// .@"font-size" = .{ +// .ty = FontSize, +// }, +// .@"font-stretch" = .{ +// .ty = FontStretch, +// }, +// .@"font-family" = .{ +// .ty = ArrayList(FontFamily), +// }, +// .@"font-style" = .{ +// .ty = FontStyle, +// }, +// .@"font-variant-caps" = .{ +// .ty = FontVariantCaps, +// }, +// .@"line-height" = .{ +// .ty = LineHeight, +// }, +// .font = .{ +// .ty = Font, +// .shorthand = true, +// }, +// .@"vertical-align" = .{ +// .ty = VerticalAlign, +// }, +// .@"font-palette" = .{ +// .ty = DashedIdentReference, +// }, + +// .@"transition-property" = .{ +// .ty = struct { SmallListPropertyIdPlaceholder, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// .moz = true, +// .ms = true, +// }, +// }, +// .@"transition-duration" = .{ +// .ty = struct { SmallList(Time, 1), css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// .moz = true, +// .ms = true, +// }, +// }, +// .@"transition-delay" = .{ +// .ty = struct { SmallList(Time, 1), css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// .moz = true, +// .ms = true, +// }, +// }, +// .@"transition-timing-function" = .{ +// .ty = struct { SmallList(EasingFunction, 1), css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// .moz = true, +// .ms = true, +// }, +// }, +// .transition = .{ +// .ty = struct { SmallList(Transition, 1), css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// .moz = true, +// .ms = true, +// }, +// .shorthand = true, +// }, + +// .@"animation-name" = .{ +// .ty = struct { AnimationNameList, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// .moz = true, +// .o = true, +// }, +// }, +// .@"animation-duration" = .{ +// .ty = struct { SmallList(Time, 1), css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// .moz = true, +// .o = true, +// }, +// }, +// .@"animation-timing-function" = .{ +// .ty = struct { SmallList(EasingFunction, 1), css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// .moz = true, +// .o = true, +// }, +// }, +// .@"animation-iteration-count" = .{ +// .ty = struct { SmallList(AnimationIterationCount, 1), css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// .moz = true, +// .o = true, +// }, +// }, +// .@"animation-direction" = .{ +// .ty = struct { SmallList(AnimationDirection, 1), css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// .moz = true, +// .o = true, +// }, +// }, +// .@"animation-play-state" = .{ +// .ty = struct { SmallList(AnimationPlayState, 1), css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// .moz = true, +// .o = true, +// }, +// }, +// .@"animation-delay" = .{ +// .ty = struct { SmallList(Time, 1), css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// .moz = true, +// .o = true, +// }, +// }, +// .@"animation-fill-mode" = .{ +// .ty = struct { SmallList(AnimationFillMode, 1), css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// .moz = true, +// .o = true, +// }, +// }, +// .@"animation-composition" = .{ +// .ty = SmallList(AnimationComposition, 1), +// }, +// .@"animation-timeline" = .{ +// .ty = SmallList(AnimationTimeline, 1), +// }, +// .@"animation-range-start" = .{ +// .ty = SmallList(AnimationRangeStart, 1), +// }, +// .@"animation-range-end" = .{ +// .ty = SmallList(AnimationRangeEnd, 1), +// }, +// .@"animation-range" = .{ +// .ty = SmallList(AnimationRange, 1), +// }, +// .animation = .{ +// .ty = struct { AnimationList, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// .moz = true, +// .o = true, +// }, +// .shorthand = true, +// }, + +// // https://drafts.csswg.org/css-transforms-2/ +// .transform = .{ +// .ty = struct { TransformList, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// .moz = true, +// .ms = true, +// .o = true, +// }, +// }, +// .@"transform-origin" = .{ +// .ty = struct { Position, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// .moz = true, +// .ms = true, +// .o = true, +// }, +// // TODO: handle z offset syntax +// }, +// .@"transform-style" = .{ +// .ty = struct { TransformStyle, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// .moz = true, +// }, +// }, +// .@"transform-box" = .{ +// .ty = TransformBox, +// }, +// .@"backface-visibility" = .{ +// .ty = struct { BackfaceVisibility, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// .moz = true, +// }, +// }, +// .perspective = .{ +// .ty = struct { Perspective, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// .moz = true, +// }, +// }, +// .@"perspective-origin" = .{ +// .ty = struct { Position, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// .moz = true, +// }, +// }, +// .translate = .{ +// .ty = Translate, +// }, +// .rotate = .{ +// .ty = Rotate, +// }, +// .scale = .{ +// .ty = Scale, +// }, + +// // https://www.w3.org/TR/2021/CRD-css-text-3-20210422 +// .@"text-transform" = .{ +// .ty = TextTransform, +// }, +// .@"white-space" = .{ +// .ty = WhiteSpace, +// }, +// .@"tab-size" = .{ +// .ty = struct { LengthOrNumber, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .moz = true, +// .o = true, +// }, +// }, +// .@"word-break" = .{ +// .ty = WordBreak, +// }, +// .@"line-break" = .{ +// .ty = LineBreak, +// }, +// .hyphens = .{ +// .ty = struct { Hyphens, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// .moz = true, +// .ms = true, +// }, +// }, +// .@"overflow-wrap" = .{ +// .ty = OverflowWrap, +// }, +// .@"word-wrap" = .{ +// .ty = OverflowWrap, +// }, +// .@"text-align" = .{ +// .ty = TextAlign, +// }, +// .@"text-align-last" = .{ +// .ty = struct { TextAlignLast, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .moz = true, +// }, +// }, +// .@"text-justify" = .{ +// .ty = TextJustify, +// }, +// .@"word-spacing" = .{ +// .ty = Spacing, +// }, +// .@"letter-spacing" = .{ +// .ty = Spacing, +// }, +// .@"text-indent" = .{ +// .ty = TextIndent, +// }, + +// // https://www.w3.org/TR/2020/WD-css-text-decor-4-20200506 +// .@"text-decoration-line" = .{ +// .ty = struct { TextDecorationLine, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// .moz = true, +// }, +// }, +// .@"text-decoration-style" = .{ +// .ty = struct { TextDecorationStyle, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// .moz = true, +// }, +// }, +// .@"text-decoration-color" = .{ +// .ty = struct { CssColor, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// .moz = true, +// }, +// }, +// .@"text-decoration-thickness" = .{ +// .ty = TextDecorationThickness, +// }, +// .@"text-decoration" = .{ +// .ty = struct { TextDecoration, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// .moz = true, +// }, +// .shorthand = true, +// }, +// .@"text-decoration-skip-ink" = .{ +// .ty = struct { TextDecorationSkipInk, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// }, +// }, +// .@"text-emphasis-style" = .{ +// .ty = struct { TextEmphasisStyle, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// }, +// }, +// .@"text-emphasis-color" = .{ +// .ty = struct { CssColor, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// }, +// }, +// .@"text-emphasis" = .{ +// .ty = struct { TextEmphasis, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// }, +// .shorthand = true, +// }, +// .@"text-emphasis-position" = .{ +// .ty = struct { TextEmphasisPosition, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// }, +// }, +// .@"text-shadow" = .{ +// .ty = SmallList(TextShadow, 1), +// }, + +// // https://w3c.github.io/csswg-drafts/css-size-adjust/ +// .@"text-size-adjust" = .{ +// .ty = struct { TextSizeAdjust, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// .moz = true, +// .ms = true, +// }, +// }, + +// // https://drafts.csswg.org/css-writing-modes-3/ +// .direction = .{ +// .ty = Direction, +// }, +// .@"unicode-bidi" = .{ +// .ty = UnicodeBidi, +// }, + +// // https://www.w3.org/TR/css-break-3/ +// .@"box-decoration-break" = .{ +// .ty = struct { BoxDecorationBreak, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// }, +// }, + +// // https://www.w3.org/TR/2021/WD-css-ui-4-20210316 +// .resize = .{ +// .ty = Resize, +// }, +// .cursor = .{ +// .ty = Cursor, +// }, +// .@"caret-color" = .{ +// .ty = ColorOrAuto, +// }, +// .@"caret-shape" = .{ +// .ty = CaretShape, +// }, +// .caret = .{ +// .ty = Caret, +// .shorthand = true, +// }, +// .@"user-select" = .{ +// .ty = struct { UserSelect, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// .moz = true, +// .ms = true, +// }, +// }, +// .@"accent-color" = .{ +// .ty = ColorOrAuto, +// }, +// .appearance = .{ +// .ty = struct { Appearance, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// .moz = true, +// .ms = true, +// }, +// }, + +// // https://www.w3.org/TR/2020/WD-css-lists-3-20201117 +// .@"list-style-type" = .{ +// .ty = ListStyleType, +// }, +// .@"list-style-image" = .{ +// .ty = Image, +// }, +// .@"list-style-position" = .{ +// .ty = ListStylePosition, +// }, +// .@"list-style" = .{ +// .ty = ListStyle, +// .shorthand = true, +// }, +// .@"marker-side" = .{ +// .ty = MarkerSide, +// }, + +// // CSS modules +// .composes = .{ +// .ty = Composes, +// .conditional = .{ +// .css_modules = true, +// }, +// }, + +// // https://www.w3.org/TR/SVG2/painting.html +// .fill = .{ +// .ty = SVGPaint, +// }, +// .@"fill-rule" = .{ +// .ty = FillRule, +// }, +// .@"fill-opacity" = .{ +// .ty = AlphaValue, +// }, +// .stroke = .{ +// .ty = SVGPaint, +// }, +// .@"stroke-opacity" = .{ +// .ty = AlphaValue, +// }, +// .@"stroke-width" = .{ +// .ty = LengthPercentage, +// }, +// .@"stroke-linecap" = .{ +// .ty = StrokeLinecap, +// }, +// .@"stroke-linejoin" = .{ +// .ty = StrokeLinejoin, +// }, +// .@"stroke-miterlimit" = .{ +// .ty = CSSNumber, +// }, +// .@"stroke-dasharray" = .{ +// .ty = StrokeDasharray, +// }, +// .@"stroke-dashoffset" = .{ +// .ty = LengthPercentage, +// }, +// .@"marker-start" = .{ +// .ty = Marker, +// }, +// .@"marker-mid" = .{ +// .ty = Marker, +// }, +// .@"marker-end" = .{ +// .ty = Marker, +// }, +// .marker = .{ +// .ty = Marker, +// }, +// .@"color-interpolation" = .{ +// .ty = ColorInterpolation, +// }, +// .@"color-interpolation-filters" = .{ +// .ty = ColorInterpolation, +// }, +// .@"color-rendering" = .{ +// .ty = ColorRendering, +// }, +// .@"shape-rendering" = .{ +// .ty = ShapeRendering, +// }, +// .@"text-rendering" = .{ +// .ty = TextRendering, +// }, +// .@"image-rendering" = .{ +// .ty = ImageRendering, +// }, + +// // https://www.w3.org/TR/css-masking-1/ +// .@"clip-path" = .{ +// .ty = struct { ClipPath, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// }, +// }, +// .@"clip-rule" = .{ +// .ty = FillRule, +// }, +// .@"mask-image" = .{ +// .ty = struct { SmallList(Image, 1), css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// }, +// }, +// .@"mask-mode" = .{ +// .ty = SmallList(MaskMode, 1), +// }, +// .@"mask-repeat" = .{ +// .ty = struct { SmallList(BackgroundRepeat, 1), css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// }, +// }, +// .@"mask-position-x" = .{ +// .ty = SmallList(HorizontalPosition, 1), +// }, +// .@"mask-position-y" = .{ +// .ty = SmallList(VerticalPosition, 1), +// }, +// .@"mask-position" = .{ +// .ty = struct { SmallList(Position, 1), css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// }, +// }, +// .@"mask-clip" = .{ +// .ty = struct { SmallList(MaskClip, 1), css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// }, +// }, +// .@"mask-origin" = .{ +// .ty = struct { SmallList(GeometryBox, 1), css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// }, +// }, +// .@"mask-size" = .{ +// .ty = struct { SmallList(BackgroundSize, 1), css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// }, +// }, +// .@"mask-composite" = .{ +// .ty = SmallList(MaskComposite, 1), +// }, +// .@"mask-type" = .{ +// .ty = MaskType, +// }, +// .mask = .{ +// .ty = struct { SmallList(Mask, 1), css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// }, +// .shorthand = true, +// }, +// .@"mask-border-source" = .{ +// .ty = Image, +// }, +// .@"mask-border-mode" = .{ +// .ty = MaskBorderMode, +// }, +// .@"mask-border-slice" = .{ +// .ty = BorderImageSlice, +// }, +// .@"mask-border-width" = .{ +// .ty = Rect(BorderImageSideWidth), +// }, +// .@"mask-border-outset" = .{ +// .ty = Rect(LengthOrNumber), +// }, +// .@"mask-border-repeat" = .{ +// .ty = BorderImageRepeat, +// }, +// .@"mask-border" = .{ +// .ty = MaskBorder, +// .shorthand = true, +// }, + +// // WebKit additions +// .@"-webkit-mask-composite" = .{ +// .ty = SmallList(WebKitMaskComposite, 1), +// }, +// .@"mask-source-type" = .{ +// .ty = struct { SmallList(WebKitMaskSourceType, 1), css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// }, +// .unprefixed = false, +// }, +// .@"mask-box-image" = .{ +// .ty = struct { BorderImage, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// }, +// .unprefixed = false, +// }, +// .@"mask-box-image-source" = .{ +// .ty = struct { Image, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// }, +// .unprefixed = false, +// }, +// .@"mask-box-image-slice" = .{ +// .ty = struct { BorderImageSlice, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// }, +// .unprefixed = false, +// }, +// .@"mask-box-image-width" = .{ +// .ty = struct { Rect(BorderImageSideWidth), css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// }, +// .unprefixed = false, +// }, +// .@"mask-box-image-outset" = .{ +// .ty = struct { Rect(LengthOrNumber), css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// }, +// .unprefixed = false, +// }, +// .@"mask-box-image-repeat" = .{ +// .ty = struct { BorderImageRepeat, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// }, +// .unprefixed = false, +// }, + +// // https://drafts.fxtf.org/filter-effects-1/ +// .filter = .{ +// .ty = struct { FilterList, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// }, +// }, +// .@"backdrop-filter" = .{ +// .ty = struct { FilterList, css.VendorPrefix }, +// .valid_prefixes = css.VendorPrefix{ +// .webkit = true, +// }, +// }, + +// // https://drafts.csswg.org/css2/ +// .@"z-index" = .{ +// .ty = position.ZIndex, +// }, + +// // https://drafts.csswg.org/css-contain-3/ +// .@"container-type" = .{ +// .ty = ContainerType, +// }, +// .@"container-name" = .{ +// .ty = ContainerNameList, +// }, +// .container = .{ +// .ty = Container, +// .shorthand = true, +// }, + +// // https://w3c.github.io/csswg-drafts/css-view-transitions-1/ +// .@"view-transition-name" = .{ +// .ty = CustomIdent, +// }, + +// // https://drafts.csswg.org/css-color-adjust/ +// .@"color-scheme" = .{ +// .ty = ColorScheme, +// }, +// }); diff --git a/src/css/properties/properties_generated.zig b/src/css/properties/properties_generated.zig new file mode 100644 index 0000000000..be76ee77a7 --- /dev/null +++ b/src/css/properties/properties_generated.zig @@ -0,0 +1,9686 @@ +const std = @import("std"); +const bun = @import("root").bun; +const Allocator = std.mem.Allocator; + +pub const css = @import("../css_parser.zig"); + +const Printer = css.Printer; +const PrintErr = css.PrintErr; +const VendorPrefix = css.VendorPrefix; + +const PropertyImpl = @import("./properties_impl.zig").PropertyImpl; +const PropertyIdImpl = @import("./properties_impl.zig").PropertyIdImpl; + +const CSSWideKeyword = css.css_properties.CSSWideKeyword; +const UnparsedProperty = css.css_properties.custom.UnparsedProperty; +const CustomProperty = css.css_properties.custom.CustomProperty; + +const css_values = css.css_values; +const CssColor = css.css_values.color.CssColor; +const Image = css.css_values.image.Image; +const Length = css.css_values.length.Length; +const LengthValue = css.css_values.length.LengthValue; +const LengthPercentage = css_values.length.LengthPercentage; +const LengthPercentageOrAuto = css_values.length.LengthPercentageOrAuto; +const PropertyCategory = css.PropertyCategory; +const LogicalGroup = css.LogicalGroup; +const CSSNumber = css.css_values.number.CSSNumber; +const CSSNumberFns = css.css_values.number.CSSNumberFns; +const CSSInteger = css.css_values.number.CSSInteger; +const CSSIntegerFns = css.css_values.number.CSSIntegerFns; +const NumberOrPercentage = css.css_values.percentage.NumberOrPercentage; +const Percentage = css.css_values.percentage.Percentage; +const Angle = css.css_values.angle.Angle; +const DashedIdentReference = css.css_values.ident.DashedIdentReference; +const Time = css.css_values.time.Time; +const EasingFunction = css.css_values.easing.EasingFunction; +const CustomIdent = css.css_values.ident.CustomIdent; +const CSSString = css.css_values.string.CSSString; +const DashedIdent = css.css_values.ident.DashedIdent; +const Url = css.css_values.url.Url; +const CustomIdentList = css.css_values.ident.CustomIdentList; +const Location = css.Location; +const HorizontalPosition = css.css_values.position.HorizontalPosition; +const VerticalPosition = css.css_values.position.VerticalPosition; +const ContainerName = css.css_rules.container.ContainerName; + +pub const font = css.css_properties.font; +const border = css.css_properties.border; +const border_radius = css.css_properties.border_radius; +const border_image = css.css_properties.border_image; +const outline = css.css_properties.outline; +const flex = css.css_properties.flex; +const @"align" = css.css_properties.@"align"; +const margin_padding = css.css_properties.margin_padding; +const transition = css.css_properties.transition; +const animation = css.css_properties.animation; +const transform = css.css_properties.transform; +const text = css.css_properties.text; +const ui = css.css_properties.ui; +const list = css.css_properties.list; +const css_modules = css.css_properties.css_modules; +const svg = css.css_properties.svg; +const shape = css.css_properties.shape; +const masking = css.css_properties.masking; +const background = css.css_properties.background; +const effects = css.css_properties.effects; +const contain = css.css_properties.contain; +const custom = css.css_properties.custom; +const position = css.css_properties.position; +const box_shadow = css.css_properties.box_shadow; +const size = css.css_properties.size; +const overflow = css.css_properties.overflow; + +const BorderSideWidth = border.BorderSideWidth; +const Size2D = css_values.size.Size2D; +const BorderRadius = border_radius.BorderRadius; +const Rect = css_values.rect.Rect; +const LengthOrNumber = css_values.length.LengthOrNumber; +const BorderImageRepeat = border_image.BorderImageRepeat; +const BorderImageSideWidth = border_image.BorderImageSideWidth; +const BorderImageSlice = border_image.BorderImageSlice; +const BorderImage = border_image.BorderImage; +const BorderColor = border.BorderColor; +const BorderStyle = border.BorderStyle; +const BorderWidth = border.BorderWidth; +const BorderBlockColor = border.BorderBlockColor; +const BorderBlockStyle = border.BorderBlockStyle; +const BorderBlockWidth = border.BorderBlockWidth; +const BorderInlineColor = border.BorderInlineColor; +const BorderInlineStyle = border.BorderInlineStyle; +const BorderInlineWidth = border.BorderInlineWidth; +const Border = border.Border; +const BorderTop = border.BorderTop; +const BorderRight = border.BorderRight; +const BorderLeft = border.BorderLeft; +const BorderBottom = border.BorderBottom; +const BorderBlockStart = border.BorderBlockStart; +const BorderBlockEnd = border.BorderBlockEnd; +const BorderInlineStart = border.BorderInlineStart; +const BorderInlineEnd = border.BorderInlineEnd; +const BorderBlock = border.BorderBlock; +const BorderInline = border.BorderInline; +const Outline = outline.Outline; +const OutlineStyle = outline.OutlineStyle; +const FlexDirection = flex.FlexDirection; +const FlexWrap = flex.FlexWrap; +const FlexFlow = flex.FlexFlow; +const Flex = flex.Flex; +const BoxOrient = flex.BoxOrient; +const BoxDirection = flex.BoxDirection; +const BoxAlign = flex.BoxAlign; +const BoxPack = flex.BoxPack; +const BoxLines = flex.BoxLines; +const FlexPack = flex.FlexPack; +const FlexItemAlign = flex.FlexItemAlign; +const FlexLinePack = flex.FlexLinePack; +const AlignContent = @"align".AlignContent; +const JustifyContent = @"align".JustifyContent; +const PlaceContent = @"align".PlaceContent; +const AlignSelf = @"align".AlignSelf; +const JustifySelf = @"align".JustifySelf; +const PlaceSelf = @"align".PlaceSelf; +const AlignItems = @"align".AlignItems; +const JustifyItems = @"align".JustifyItems; +const PlaceItems = @"align".PlaceItems; +const GapValue = @"align".GapValue; +const Gap = @"align".Gap; +const MarginBlock = margin_padding.MarginBlock; +const Margin = margin_padding.Margin; +const MarginInline = margin_padding.MarginInline; +const PaddingBlock = margin_padding.PaddingBlock; +const PaddingInline = margin_padding.PaddingInline; +const Padding = margin_padding.Padding; +const ScrollMarginBlock = margin_padding.ScrollMarginBlock; +const ScrollMarginInline = margin_padding.ScrollMarginInline; +const ScrollMargin = margin_padding.ScrollMargin; +const ScrollPaddingBlock = margin_padding.ScrollPaddingBlock; +const ScrollPaddingInline = margin_padding.ScrollPaddingInline; +const ScrollPadding = margin_padding.ScrollPadding; +const FontWeight = font.FontWeight; +const FontSize = font.FontSize; +const FontStretch = font.FontStretch; +const FontFamily = font.FontFamily; +const FontStyle = font.FontStyle; +const FontVariantCaps = font.FontVariantCaps; +const LineHeight = font.LineHeight; +const Font = font.Font; +// const VerticalAlign = font.VerticalAlign; +// const Transition = transition.Transition; +// const AnimationNameList = animation.AnimationNameList; +// const AnimationList = animation.AnimationList; +// const AnimationIterationCount = animation.AnimationIterationCount; +// const AnimationDirection = animation.AnimationDirection; +// const AnimationPlayState = animation.AnimationPlayState; +// const AnimationFillMode = animation.AnimationFillMode; +// const AnimationComposition = animation.AnimationComposition; +// const AnimationTimeline = animation.AnimationTimeline; +// const AnimationRangeStart = animation.AnimationRangeStart; +// const AnimationRangeEnd = animation.AnimationRangeEnd; +// const AnimationRange = animation.AnimationRange; +// const TransformList = transform.TransformList; +// const TransformStyle = transform.TransformStyle; +// const TransformBox = transform.TransformBox; +// const BackfaceVisibility = transform.BackfaceVisibility; +// const Perspective = transform.Perspective; +// const Translate = transform.Translate; +// const Rotate = transform.Rotate; +// const Scale = transform.Scale; +// const TextTransform = text.TextTransform; +// const WhiteSpace = text.WhiteSpace; +// const WordBreak = text.WordBreak; +// const LineBreak = text.LineBreak; +// const Hyphens = text.Hyphens; +// const OverflowWrap = text.OverflowWrap; +// const TextAlign = text.TextAlign; +// const TextIndent = text.TextIndent; +// const Spacing = text.Spacing; +// const TextJustify = text.TextJustify; +// const TextAlignLast = text.TextAlignLast; +// const TextDecorationLine = text.TextDecorationLine; +// const TextDecorationStyle = text.TextDecorationStyle; +// const TextDecorationThickness = text.TextDecorationThickness; +// const TextDecoration = text.TextDecoration; +// const TextDecorationSkipInk = text.TextDecorationSkipInk; +// const TextEmphasisStyle = text.TextEmphasisStyle; +// const TextEmphasis = text.TextEmphasis; +// const TextEmphasisPositionVertical = text.TextEmphasisPositionVertical; +// const TextEmphasisPositionHorizontal = text.TextEmphasisPositionHorizontal; +// const TextEmphasisPosition = text.TextEmphasisPosition; +const TextShadow = text.TextShadow; +// const TextSizeAdjust = text.TextSizeAdjust; +const Direction = text.Direction; +// const UnicodeBidi = text.UnicodeBidi; +// const BoxDecorationBreak = text.BoxDecorationBreak; +// const Resize = ui.Resize; +// const Cursor = ui.Cursor; +// const ColorOrAuto = ui.ColorOrAuto; +// const CaretShape = ui.CaretShape; +// const Caret = ui.Caret; +// const UserSelect = ui.UserSelect; +// const Appearance = ui.Appearance; +// const ColorScheme = ui.ColorScheme; +// const ListStyleType = list.ListStyleType; +// const ListStylePosition = list.ListStylePosition; +// const ListStyle = list.ListStyle; +// const MarkerSide = list.MarkerSide; +const Composes = css_modules.Composes; +// const SVGPaint = svg.SVGPaint; +// const FillRule = shape.FillRule; +// const AlphaValue = shape.AlphaValue; +// const StrokeLinecap = svg.StrokeLinecap; +// const StrokeLinejoin = svg.StrokeLinejoin; +// const StrokeDasharray = svg.StrokeDasharray; +// const Marker = svg.Marker; +// const ColorInterpolation = svg.ColorInterpolation; +// const ColorRendering = svg.ColorRendering; +// const ShapeRendering = svg.ShapeRendering; +// const TextRendering = svg.TextRendering; +// const ImageRendering = svg.ImageRendering; +const ClipPath = masking.ClipPath; +const MaskMode = masking.MaskMode; +const MaskClip = masking.MaskClip; +const GeometryBox = masking.GeometryBox; +const MaskComposite = masking.MaskComposite; +const MaskType = masking.MaskType; +const Mask = masking.Mask; +const MaskBorderMode = masking.MaskBorderMode; +const MaskBorder = masking.MaskBorder; +const WebKitMaskComposite = masking.WebKitMaskComposite; +const WebKitMaskSourceType = masking.WebKitMaskSourceType; +const BackgroundRepeat = background.BackgroundRepeat; +const BackgroundSize = background.BackgroundSize; +// const FilterList = effects.FilterList; +// const ContainerType = contain.ContainerType; +// const Container = contain.Container; +// const ContainerNameList = contain.ContainerNameList; +const CustomPropertyName = custom.CustomPropertyName; +const display = css.css_properties.display; + +const Position = position.Position; + +const Result = css.Result; + +const BabyList = bun.BabyList; +const ArrayList = std.ArrayListUnmanaged; +const SmallList = css.SmallList; +pub const Property = union(PropertyIdTag) { + @"background-color": CssColor, + @"background-image": SmallList(Image, 1), + @"background-position-x": SmallList(css_values.position.HorizontalPosition, 1), + @"background-position-y": SmallList(css_values.position.VerticalPosition, 1), + @"background-position": SmallList(background.BackgroundPosition, 1), + @"background-size": SmallList(background.BackgroundSize, 1), + @"background-repeat": SmallList(background.BackgroundRepeat, 1), + @"background-attachment": SmallList(background.BackgroundAttachment, 1), + @"background-clip": struct { SmallList(background.BackgroundClip, 1), VendorPrefix }, + @"background-origin": SmallList(background.BackgroundOrigin, 1), + background: SmallList(background.Background, 1), + @"box-shadow": struct { SmallList(box_shadow.BoxShadow, 1), VendorPrefix }, + opacity: css.css_values.alpha.AlphaValue, + color: CssColor, + display: display.Display, + visibility: display.Visibility, + width: size.Size, + height: size.Size, + @"min-width": size.Size, + @"min-height": size.Size, + @"max-width": size.MaxSize, + @"max-height": size.MaxSize, + @"block-size": size.Size, + @"inline-size": size.Size, + @"min-block-size": size.Size, + @"min-inline-size": size.Size, + @"max-block-size": size.MaxSize, + @"max-inline-size": size.MaxSize, + @"box-sizing": struct { size.BoxSizing, VendorPrefix }, + @"aspect-ratio": size.AspectRatio, + overflow: overflow.Overflow, + @"overflow-x": overflow.OverflowKeyword, + @"overflow-y": overflow.OverflowKeyword, + @"text-overflow": struct { overflow.TextOverflow, VendorPrefix }, + position: position.Position, + top: LengthPercentageOrAuto, + bottom: LengthPercentageOrAuto, + left: LengthPercentageOrAuto, + right: LengthPercentageOrAuto, + @"inset-block-start": LengthPercentageOrAuto, + @"inset-block-end": LengthPercentageOrAuto, + @"inset-inline-start": LengthPercentageOrAuto, + @"inset-inline-end": LengthPercentageOrAuto, + @"inset-block": margin_padding.InsetBlock, + @"inset-inline": margin_padding.InsetInline, + inset: margin_padding.Inset, + @"border-spacing": css.css_values.size.Size2D(Length), + @"border-top-color": CssColor, + @"border-bottom-color": CssColor, + @"border-left-color": CssColor, + @"border-right-color": CssColor, + @"border-block-start-color": CssColor, + @"border-block-end-color": CssColor, + @"border-inline-start-color": CssColor, + @"border-inline-end-color": CssColor, + @"border-top-style": border.LineStyle, + @"border-bottom-style": border.LineStyle, + @"border-left-style": border.LineStyle, + @"border-right-style": border.LineStyle, + @"border-block-start-style": border.LineStyle, + @"border-block-end-style": border.LineStyle, + @"border-inline-start-style": border.LineStyle, + @"border-inline-end-style": border.LineStyle, + @"border-top-width": BorderSideWidth, + @"border-bottom-width": BorderSideWidth, + @"border-left-width": BorderSideWidth, + @"border-right-width": BorderSideWidth, + @"border-block-start-width": BorderSideWidth, + @"border-block-end-width": BorderSideWidth, + @"border-inline-start-width": BorderSideWidth, + @"border-inline-end-width": BorderSideWidth, + @"border-top-left-radius": struct { Size2D(LengthPercentage), VendorPrefix }, + @"border-top-right-radius": struct { Size2D(LengthPercentage), VendorPrefix }, + @"border-bottom-left-radius": struct { Size2D(LengthPercentage), VendorPrefix }, + @"border-bottom-right-radius": struct { Size2D(LengthPercentage), VendorPrefix }, + @"border-start-start-radius": Size2D(LengthPercentage), + @"border-start-end-radius": Size2D(LengthPercentage), + @"border-end-start-radius": Size2D(LengthPercentage), + @"border-end-end-radius": Size2D(LengthPercentage), + @"border-radius": struct { BorderRadius, VendorPrefix }, + @"border-image-source": Image, + @"border-image-outset": Rect(LengthOrNumber), + @"border-image-repeat": BorderImageRepeat, + @"border-image-width": Rect(BorderImageSideWidth), + @"border-image-slice": BorderImageSlice, + @"border-image": struct { BorderImage, VendorPrefix }, + @"border-color": BorderColor, + @"border-style": BorderStyle, + @"border-width": BorderWidth, + @"border-block-color": BorderBlockColor, + @"border-block-style": BorderBlockStyle, + @"border-block-width": BorderBlockWidth, + @"border-inline-color": BorderInlineColor, + @"border-inline-style": BorderInlineStyle, + @"border-inline-width": BorderInlineWidth, + border: Border, + @"border-top": BorderTop, + @"border-bottom": BorderBottom, + @"border-left": BorderLeft, + @"border-right": BorderRight, + @"border-block": BorderBlock, + @"border-block-start": BorderBlockStart, + @"border-block-end": BorderBlockEnd, + @"border-inline": BorderInline, + @"border-inline-start": BorderInlineStart, + @"border-inline-end": BorderInlineEnd, + outline: Outline, + @"outline-color": CssColor, + @"outline-style": OutlineStyle, + @"outline-width": BorderSideWidth, + @"flex-direction": struct { FlexDirection, VendorPrefix }, + @"flex-wrap": struct { FlexWrap, VendorPrefix }, + @"flex-flow": struct { FlexFlow, VendorPrefix }, + @"flex-grow": struct { CSSNumber, VendorPrefix }, + @"flex-shrink": struct { CSSNumber, VendorPrefix }, + @"flex-basis": struct { LengthPercentageOrAuto, VendorPrefix }, + flex: struct { Flex, VendorPrefix }, + order: struct { CSSInteger, VendorPrefix }, + @"align-content": struct { AlignContent, VendorPrefix }, + @"justify-content": struct { JustifyContent, VendorPrefix }, + @"place-content": PlaceContent, + @"align-self": struct { AlignSelf, VendorPrefix }, + @"justify-self": JustifySelf, + @"place-self": PlaceSelf, + @"align-items": struct { AlignItems, VendorPrefix }, + @"justify-items": JustifyItems, + @"place-items": PlaceItems, + @"row-gap": GapValue, + @"column-gap": GapValue, + gap: Gap, + @"box-orient": struct { BoxOrient, VendorPrefix }, + @"box-direction": struct { BoxDirection, VendorPrefix }, + @"box-ordinal-group": struct { CSSInteger, VendorPrefix }, + @"box-align": struct { BoxAlign, VendorPrefix }, + @"box-flex": struct { CSSNumber, VendorPrefix }, + @"box-flex-group": struct { CSSInteger, VendorPrefix }, + @"box-pack": struct { BoxPack, VendorPrefix }, + @"box-lines": struct { BoxLines, VendorPrefix }, + @"flex-pack": struct { FlexPack, VendorPrefix }, + @"flex-order": struct { CSSInteger, VendorPrefix }, + @"flex-align": struct { BoxAlign, VendorPrefix }, + @"flex-item-align": struct { FlexItemAlign, VendorPrefix }, + @"flex-line-pack": struct { FlexLinePack, VendorPrefix }, + @"flex-positive": struct { CSSNumber, VendorPrefix }, + @"flex-negative": struct { CSSNumber, VendorPrefix }, + @"flex-preferred-size": struct { LengthPercentageOrAuto, VendorPrefix }, + @"margin-top": LengthPercentageOrAuto, + @"margin-bottom": LengthPercentageOrAuto, + @"margin-left": LengthPercentageOrAuto, + @"margin-right": LengthPercentageOrAuto, + @"margin-block-start": LengthPercentageOrAuto, + @"margin-block-end": LengthPercentageOrAuto, + @"margin-inline-start": LengthPercentageOrAuto, + @"margin-inline-end": LengthPercentageOrAuto, + @"margin-block": MarginBlock, + @"margin-inline": MarginInline, + margin: Margin, + @"padding-top": LengthPercentageOrAuto, + @"padding-bottom": LengthPercentageOrAuto, + @"padding-left": LengthPercentageOrAuto, + @"padding-right": LengthPercentageOrAuto, + @"padding-block-start": LengthPercentageOrAuto, + @"padding-block-end": LengthPercentageOrAuto, + @"padding-inline-start": LengthPercentageOrAuto, + @"padding-inline-end": LengthPercentageOrAuto, + @"padding-block": PaddingBlock, + @"padding-inline": PaddingInline, + padding: Padding, + @"scroll-margin-top": LengthPercentageOrAuto, + @"scroll-margin-bottom": LengthPercentageOrAuto, + @"scroll-margin-left": LengthPercentageOrAuto, + @"scroll-margin-right": LengthPercentageOrAuto, + @"scroll-margin-block-start": LengthPercentageOrAuto, + @"scroll-margin-block-end": LengthPercentageOrAuto, + @"scroll-margin-inline-start": LengthPercentageOrAuto, + @"scroll-margin-inline-end": LengthPercentageOrAuto, + @"scroll-margin-block": ScrollMarginBlock, + @"scroll-margin-inline": ScrollMarginInline, + @"scroll-margin": ScrollMargin, + @"scroll-padding-top": LengthPercentageOrAuto, + @"scroll-padding-bottom": LengthPercentageOrAuto, + @"scroll-padding-left": LengthPercentageOrAuto, + @"scroll-padding-right": LengthPercentageOrAuto, + @"scroll-padding-block-start": LengthPercentageOrAuto, + @"scroll-padding-block-end": LengthPercentageOrAuto, + @"scroll-padding-inline-start": LengthPercentageOrAuto, + @"scroll-padding-inline-end": LengthPercentageOrAuto, + @"scroll-padding-block": ScrollPaddingBlock, + @"scroll-padding-inline": ScrollPaddingInline, + @"scroll-padding": ScrollPadding, + @"font-weight": FontWeight, + @"font-size": FontSize, + @"font-stretch": FontStretch, + @"font-family": BabyList(FontFamily), + @"font-style": FontStyle, + @"font-variant-caps": FontVariantCaps, + @"line-height": LineHeight, + font: Font, + @"text-decoration-color": struct { CssColor, VendorPrefix }, + @"text-emphasis-color": struct { CssColor, VendorPrefix }, + @"text-shadow": SmallList(TextShadow, 1), + direction: Direction, + composes: Composes, + @"mask-image": struct { SmallList(Image, 1), VendorPrefix }, + @"mask-mode": SmallList(MaskMode, 1), + @"mask-repeat": struct { SmallList(BackgroundRepeat, 1), VendorPrefix }, + @"mask-position-x": SmallList(HorizontalPosition, 1), + @"mask-position-y": SmallList(VerticalPosition, 1), + @"mask-position": struct { SmallList(Position, 1), VendorPrefix }, + @"mask-clip": struct { SmallList(MaskClip, 1), VendorPrefix }, + @"mask-origin": struct { SmallList(GeometryBox, 1), VendorPrefix }, + @"mask-size": struct { SmallList(BackgroundSize, 1), VendorPrefix }, + @"mask-composite": SmallList(MaskComposite, 1), + @"mask-type": MaskType, + mask: struct { SmallList(Mask, 1), VendorPrefix }, + @"mask-border-source": Image, + @"mask-border-mode": MaskBorderMode, + @"mask-border-slice": BorderImageSlice, + @"mask-border-width": Rect(BorderImageSideWidth), + @"mask-border-outset": Rect(LengthOrNumber), + @"mask-border-repeat": BorderImageRepeat, + @"mask-border": MaskBorder, + @"-webkit-mask-composite": SmallList(WebKitMaskComposite, 1), + @"mask-source-type": struct { SmallList(WebKitMaskSourceType, 1), VendorPrefix }, + @"mask-box-image": struct { BorderImage, VendorPrefix }, + @"mask-box-image-source": struct { Image, VendorPrefix }, + @"mask-box-image-slice": struct { BorderImageSlice, VendorPrefix }, + @"mask-box-image-width": struct { Rect(BorderImageSideWidth), VendorPrefix }, + @"mask-box-image-outset": struct { Rect(LengthOrNumber), VendorPrefix }, + @"mask-box-image-repeat": struct { BorderImageRepeat, VendorPrefix }, + all: CSSWideKeyword, + unparsed: UnparsedProperty, + custom: CustomProperty, + + pub usingnamespace PropertyImpl(); + + // Sanity check to make sure all types have the following functions: + // - deepClone() + // - eql() + // - parse() + // - toCss() + // + // We do this string concatenation thing so we get all the errors at once, + // instead of relying on Zig semantic analysis which usualy stops at the first error. + comptime { + const compile_error: []const u8 = compile_error: { + var compile_error: []const u8 = ""; + + if (!@hasDecl(CssColor, "deepClone")) { + compile_error = compile_error ++ @typeName(CssColor) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(CssColor, "parse")) { + compile_error = compile_error ++ @typeName(CssColor) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(CssColor, "toCss")) { + compile_error = compile_error ++ @typeName(CssColor) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(CssColor, "eql")) { + compile_error = compile_error ++ @typeName(CssColor) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(SmallList(Image, 1), "deepClone")) { + compile_error = compile_error ++ @typeName(SmallList(Image, 1)) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(SmallList(Image, 1), "parse")) { + compile_error = compile_error ++ @typeName(SmallList(Image, 1)) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(SmallList(Image, 1), "toCss")) { + compile_error = compile_error ++ @typeName(SmallList(Image, 1)) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(SmallList(Image, 1), "eql")) { + compile_error = compile_error ++ @typeName(SmallList(Image, 1)) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(SmallList(css_values.position.HorizontalPosition, 1), "deepClone")) { + compile_error = compile_error ++ @typeName(SmallList(css_values.position.HorizontalPosition, 1)) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(SmallList(css_values.position.HorizontalPosition, 1), "parse")) { + compile_error = compile_error ++ @typeName(SmallList(css_values.position.HorizontalPosition, 1)) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(SmallList(css_values.position.HorizontalPosition, 1), "toCss")) { + compile_error = compile_error ++ @typeName(SmallList(css_values.position.HorizontalPosition, 1)) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(SmallList(css_values.position.HorizontalPosition, 1), "eql")) { + compile_error = compile_error ++ @typeName(SmallList(css_values.position.HorizontalPosition, 1)) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(SmallList(css_values.position.VerticalPosition, 1), "deepClone")) { + compile_error = compile_error ++ @typeName(SmallList(css_values.position.VerticalPosition, 1)) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(SmallList(css_values.position.VerticalPosition, 1), "parse")) { + compile_error = compile_error ++ @typeName(SmallList(css_values.position.VerticalPosition, 1)) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(SmallList(css_values.position.VerticalPosition, 1), "toCss")) { + compile_error = compile_error ++ @typeName(SmallList(css_values.position.VerticalPosition, 1)) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(SmallList(css_values.position.VerticalPosition, 1), "eql")) { + compile_error = compile_error ++ @typeName(SmallList(css_values.position.VerticalPosition, 1)) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(SmallList(background.BackgroundPosition, 1), "deepClone")) { + compile_error = compile_error ++ @typeName(SmallList(background.BackgroundPosition, 1)) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(SmallList(background.BackgroundPosition, 1), "parse")) { + compile_error = compile_error ++ @typeName(SmallList(background.BackgroundPosition, 1)) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(SmallList(background.BackgroundPosition, 1), "toCss")) { + compile_error = compile_error ++ @typeName(SmallList(background.BackgroundPosition, 1)) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(SmallList(background.BackgroundPosition, 1), "eql")) { + compile_error = compile_error ++ @typeName(SmallList(background.BackgroundPosition, 1)) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(SmallList(background.BackgroundSize, 1), "deepClone")) { + compile_error = compile_error ++ @typeName(SmallList(background.BackgroundSize, 1)) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(SmallList(background.BackgroundSize, 1), "parse")) { + compile_error = compile_error ++ @typeName(SmallList(background.BackgroundSize, 1)) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(SmallList(background.BackgroundSize, 1), "toCss")) { + compile_error = compile_error ++ @typeName(SmallList(background.BackgroundSize, 1)) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(SmallList(background.BackgroundSize, 1), "eql")) { + compile_error = compile_error ++ @typeName(SmallList(background.BackgroundSize, 1)) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(SmallList(background.BackgroundRepeat, 1), "deepClone")) { + compile_error = compile_error ++ @typeName(SmallList(background.BackgroundRepeat, 1)) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(SmallList(background.BackgroundRepeat, 1), "parse")) { + compile_error = compile_error ++ @typeName(SmallList(background.BackgroundRepeat, 1)) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(SmallList(background.BackgroundRepeat, 1), "toCss")) { + compile_error = compile_error ++ @typeName(SmallList(background.BackgroundRepeat, 1)) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(SmallList(background.BackgroundRepeat, 1), "eql")) { + compile_error = compile_error ++ @typeName(SmallList(background.BackgroundRepeat, 1)) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(SmallList(background.BackgroundAttachment, 1), "deepClone")) { + compile_error = compile_error ++ @typeName(SmallList(background.BackgroundAttachment, 1)) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(SmallList(background.BackgroundAttachment, 1), "parse")) { + compile_error = compile_error ++ @typeName(SmallList(background.BackgroundAttachment, 1)) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(SmallList(background.BackgroundAttachment, 1), "toCss")) { + compile_error = compile_error ++ @typeName(SmallList(background.BackgroundAttachment, 1)) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(SmallList(background.BackgroundAttachment, 1), "eql")) { + compile_error = compile_error ++ @typeName(SmallList(background.BackgroundAttachment, 1)) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(SmallList(background.BackgroundClip, 1), "deepClone")) { + compile_error = compile_error ++ @typeName(SmallList(background.BackgroundClip, 1)) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(SmallList(background.BackgroundClip, 1), "parse")) { + compile_error = compile_error ++ @typeName(SmallList(background.BackgroundClip, 1)) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(SmallList(background.BackgroundClip, 1), "toCss")) { + compile_error = compile_error ++ @typeName(SmallList(background.BackgroundClip, 1)) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(SmallList(background.BackgroundClip, 1), "eql")) { + compile_error = compile_error ++ @typeName(SmallList(background.BackgroundClip, 1)) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(SmallList(background.BackgroundOrigin, 1), "deepClone")) { + compile_error = compile_error ++ @typeName(SmallList(background.BackgroundOrigin, 1)) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(SmallList(background.BackgroundOrigin, 1), "parse")) { + compile_error = compile_error ++ @typeName(SmallList(background.BackgroundOrigin, 1)) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(SmallList(background.BackgroundOrigin, 1), "toCss")) { + compile_error = compile_error ++ @typeName(SmallList(background.BackgroundOrigin, 1)) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(SmallList(background.BackgroundOrigin, 1), "eql")) { + compile_error = compile_error ++ @typeName(SmallList(background.BackgroundOrigin, 1)) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(SmallList(background.Background, 1), "deepClone")) { + compile_error = compile_error ++ @typeName(SmallList(background.Background, 1)) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(SmallList(background.Background, 1), "parse")) { + compile_error = compile_error ++ @typeName(SmallList(background.Background, 1)) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(SmallList(background.Background, 1), "toCss")) { + compile_error = compile_error ++ @typeName(SmallList(background.Background, 1)) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(SmallList(background.Background, 1), "eql")) { + compile_error = compile_error ++ @typeName(SmallList(background.Background, 1)) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(SmallList(box_shadow.BoxShadow, 1), "deepClone")) { + compile_error = compile_error ++ @typeName(SmallList(box_shadow.BoxShadow, 1)) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(SmallList(box_shadow.BoxShadow, 1), "parse")) { + compile_error = compile_error ++ @typeName(SmallList(box_shadow.BoxShadow, 1)) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(SmallList(box_shadow.BoxShadow, 1), "toCss")) { + compile_error = compile_error ++ @typeName(SmallList(box_shadow.BoxShadow, 1)) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(SmallList(box_shadow.BoxShadow, 1), "eql")) { + compile_error = compile_error ++ @typeName(SmallList(box_shadow.BoxShadow, 1)) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(css.css_values.alpha.AlphaValue, "deepClone")) { + compile_error = compile_error ++ @typeName(css.css_values.alpha.AlphaValue) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(css.css_values.alpha.AlphaValue, "parse")) { + compile_error = compile_error ++ @typeName(css.css_values.alpha.AlphaValue) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(css.css_values.alpha.AlphaValue, "toCss")) { + compile_error = compile_error ++ @typeName(css.css_values.alpha.AlphaValue) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(css.css_values.alpha.AlphaValue, "eql")) { + compile_error = compile_error ++ @typeName(css.css_values.alpha.AlphaValue) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(CssColor, "deepClone")) { + compile_error = compile_error ++ @typeName(CssColor) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(CssColor, "parse")) { + compile_error = compile_error ++ @typeName(CssColor) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(CssColor, "toCss")) { + compile_error = compile_error ++ @typeName(CssColor) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(CssColor, "eql")) { + compile_error = compile_error ++ @typeName(CssColor) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(display.Display, "deepClone")) { + compile_error = compile_error ++ @typeName(display.Display) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(display.Display, "parse")) { + compile_error = compile_error ++ @typeName(display.Display) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(display.Display, "toCss")) { + compile_error = compile_error ++ @typeName(display.Display) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(display.Display, "eql")) { + compile_error = compile_error ++ @typeName(display.Display) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(display.Visibility, "deepClone")) { + compile_error = compile_error ++ @typeName(display.Visibility) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(display.Visibility, "parse")) { + compile_error = compile_error ++ @typeName(display.Visibility) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(display.Visibility, "toCss")) { + compile_error = compile_error ++ @typeName(display.Visibility) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(display.Visibility, "eql")) { + compile_error = compile_error ++ @typeName(display.Visibility) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(size.Size, "deepClone")) { + compile_error = compile_error ++ @typeName(size.Size) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(size.Size, "parse")) { + compile_error = compile_error ++ @typeName(size.Size) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(size.Size, "toCss")) { + compile_error = compile_error ++ @typeName(size.Size) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(size.Size, "eql")) { + compile_error = compile_error ++ @typeName(size.Size) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(size.Size, "deepClone")) { + compile_error = compile_error ++ @typeName(size.Size) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(size.Size, "parse")) { + compile_error = compile_error ++ @typeName(size.Size) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(size.Size, "toCss")) { + compile_error = compile_error ++ @typeName(size.Size) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(size.Size, "eql")) { + compile_error = compile_error ++ @typeName(size.Size) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(size.Size, "deepClone")) { + compile_error = compile_error ++ @typeName(size.Size) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(size.Size, "parse")) { + compile_error = compile_error ++ @typeName(size.Size) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(size.Size, "toCss")) { + compile_error = compile_error ++ @typeName(size.Size) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(size.Size, "eql")) { + compile_error = compile_error ++ @typeName(size.Size) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(size.Size, "deepClone")) { + compile_error = compile_error ++ @typeName(size.Size) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(size.Size, "parse")) { + compile_error = compile_error ++ @typeName(size.Size) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(size.Size, "toCss")) { + compile_error = compile_error ++ @typeName(size.Size) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(size.Size, "eql")) { + compile_error = compile_error ++ @typeName(size.Size) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(size.MaxSize, "deepClone")) { + compile_error = compile_error ++ @typeName(size.MaxSize) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(size.MaxSize, "parse")) { + compile_error = compile_error ++ @typeName(size.MaxSize) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(size.MaxSize, "toCss")) { + compile_error = compile_error ++ @typeName(size.MaxSize) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(size.MaxSize, "eql")) { + compile_error = compile_error ++ @typeName(size.MaxSize) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(size.MaxSize, "deepClone")) { + compile_error = compile_error ++ @typeName(size.MaxSize) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(size.MaxSize, "parse")) { + compile_error = compile_error ++ @typeName(size.MaxSize) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(size.MaxSize, "toCss")) { + compile_error = compile_error ++ @typeName(size.MaxSize) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(size.MaxSize, "eql")) { + compile_error = compile_error ++ @typeName(size.MaxSize) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(size.Size, "deepClone")) { + compile_error = compile_error ++ @typeName(size.Size) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(size.Size, "parse")) { + compile_error = compile_error ++ @typeName(size.Size) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(size.Size, "toCss")) { + compile_error = compile_error ++ @typeName(size.Size) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(size.Size, "eql")) { + compile_error = compile_error ++ @typeName(size.Size) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(size.Size, "deepClone")) { + compile_error = compile_error ++ @typeName(size.Size) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(size.Size, "parse")) { + compile_error = compile_error ++ @typeName(size.Size) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(size.Size, "toCss")) { + compile_error = compile_error ++ @typeName(size.Size) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(size.Size, "eql")) { + compile_error = compile_error ++ @typeName(size.Size) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(size.Size, "deepClone")) { + compile_error = compile_error ++ @typeName(size.Size) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(size.Size, "parse")) { + compile_error = compile_error ++ @typeName(size.Size) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(size.Size, "toCss")) { + compile_error = compile_error ++ @typeName(size.Size) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(size.Size, "eql")) { + compile_error = compile_error ++ @typeName(size.Size) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(size.Size, "deepClone")) { + compile_error = compile_error ++ @typeName(size.Size) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(size.Size, "parse")) { + compile_error = compile_error ++ @typeName(size.Size) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(size.Size, "toCss")) { + compile_error = compile_error ++ @typeName(size.Size) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(size.Size, "eql")) { + compile_error = compile_error ++ @typeName(size.Size) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(size.MaxSize, "deepClone")) { + compile_error = compile_error ++ @typeName(size.MaxSize) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(size.MaxSize, "parse")) { + compile_error = compile_error ++ @typeName(size.MaxSize) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(size.MaxSize, "toCss")) { + compile_error = compile_error ++ @typeName(size.MaxSize) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(size.MaxSize, "eql")) { + compile_error = compile_error ++ @typeName(size.MaxSize) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(size.MaxSize, "deepClone")) { + compile_error = compile_error ++ @typeName(size.MaxSize) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(size.MaxSize, "parse")) { + compile_error = compile_error ++ @typeName(size.MaxSize) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(size.MaxSize, "toCss")) { + compile_error = compile_error ++ @typeName(size.MaxSize) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(size.MaxSize, "eql")) { + compile_error = compile_error ++ @typeName(size.MaxSize) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(size.BoxSizing, "deepClone")) { + compile_error = compile_error ++ @typeName(size.BoxSizing) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(size.BoxSizing, "parse")) { + compile_error = compile_error ++ @typeName(size.BoxSizing) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(size.BoxSizing, "toCss")) { + compile_error = compile_error ++ @typeName(size.BoxSizing) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(size.BoxSizing, "eql")) { + compile_error = compile_error ++ @typeName(size.BoxSizing) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(size.AspectRatio, "deepClone")) { + compile_error = compile_error ++ @typeName(size.AspectRatio) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(size.AspectRatio, "parse")) { + compile_error = compile_error ++ @typeName(size.AspectRatio) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(size.AspectRatio, "toCss")) { + compile_error = compile_error ++ @typeName(size.AspectRatio) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(size.AspectRatio, "eql")) { + compile_error = compile_error ++ @typeName(size.AspectRatio) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(overflow.Overflow, "deepClone")) { + compile_error = compile_error ++ @typeName(overflow.Overflow) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(overflow.Overflow, "parse")) { + compile_error = compile_error ++ @typeName(overflow.Overflow) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(overflow.Overflow, "toCss")) { + compile_error = compile_error ++ @typeName(overflow.Overflow) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(overflow.Overflow, "eql")) { + compile_error = compile_error ++ @typeName(overflow.Overflow) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(overflow.OverflowKeyword, "deepClone")) { + compile_error = compile_error ++ @typeName(overflow.OverflowKeyword) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(overflow.OverflowKeyword, "parse")) { + compile_error = compile_error ++ @typeName(overflow.OverflowKeyword) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(overflow.OverflowKeyword, "toCss")) { + compile_error = compile_error ++ @typeName(overflow.OverflowKeyword) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(overflow.OverflowKeyword, "eql")) { + compile_error = compile_error ++ @typeName(overflow.OverflowKeyword) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(overflow.OverflowKeyword, "deepClone")) { + compile_error = compile_error ++ @typeName(overflow.OverflowKeyword) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(overflow.OverflowKeyword, "parse")) { + compile_error = compile_error ++ @typeName(overflow.OverflowKeyword) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(overflow.OverflowKeyword, "toCss")) { + compile_error = compile_error ++ @typeName(overflow.OverflowKeyword) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(overflow.OverflowKeyword, "eql")) { + compile_error = compile_error ++ @typeName(overflow.OverflowKeyword) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(overflow.TextOverflow, "deepClone")) { + compile_error = compile_error ++ @typeName(overflow.TextOverflow) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(overflow.TextOverflow, "parse")) { + compile_error = compile_error ++ @typeName(overflow.TextOverflow) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(overflow.TextOverflow, "toCss")) { + compile_error = compile_error ++ @typeName(overflow.TextOverflow) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(overflow.TextOverflow, "eql")) { + compile_error = compile_error ++ @typeName(overflow.TextOverflow) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(position.Position, "deepClone")) { + compile_error = compile_error ++ @typeName(position.Position) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(position.Position, "parse")) { + compile_error = compile_error ++ @typeName(position.Position) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(position.Position, "toCss")) { + compile_error = compile_error ++ @typeName(position.Position) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(position.Position, "eql")) { + compile_error = compile_error ++ @typeName(position.Position) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "deepClone")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "parse")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "toCss")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "eql")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "deepClone")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "parse")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "toCss")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "eql")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "deepClone")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "parse")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "toCss")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "eql")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "deepClone")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "parse")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "toCss")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "eql")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "deepClone")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "parse")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "toCss")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "eql")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "deepClone")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "parse")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "toCss")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "eql")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "deepClone")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "parse")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "toCss")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "eql")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "deepClone")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "parse")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "toCss")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "eql")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(margin_padding.InsetBlock, "deepClone")) { + compile_error = compile_error ++ @typeName(margin_padding.InsetBlock) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(margin_padding.InsetBlock, "parse")) { + compile_error = compile_error ++ @typeName(margin_padding.InsetBlock) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(margin_padding.InsetBlock, "toCss")) { + compile_error = compile_error ++ @typeName(margin_padding.InsetBlock) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(margin_padding.InsetBlock, "eql")) { + compile_error = compile_error ++ @typeName(margin_padding.InsetBlock) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(margin_padding.InsetInline, "deepClone")) { + compile_error = compile_error ++ @typeName(margin_padding.InsetInline) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(margin_padding.InsetInline, "parse")) { + compile_error = compile_error ++ @typeName(margin_padding.InsetInline) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(margin_padding.InsetInline, "toCss")) { + compile_error = compile_error ++ @typeName(margin_padding.InsetInline) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(margin_padding.InsetInline, "eql")) { + compile_error = compile_error ++ @typeName(margin_padding.InsetInline) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(margin_padding.Inset, "deepClone")) { + compile_error = compile_error ++ @typeName(margin_padding.Inset) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(margin_padding.Inset, "parse")) { + compile_error = compile_error ++ @typeName(margin_padding.Inset) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(margin_padding.Inset, "toCss")) { + compile_error = compile_error ++ @typeName(margin_padding.Inset) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(margin_padding.Inset, "eql")) { + compile_error = compile_error ++ @typeName(margin_padding.Inset) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(css.css_values.size.Size2D(Length), "deepClone")) { + compile_error = compile_error ++ @typeName(css.css_values.size.Size2D(Length)) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(css.css_values.size.Size2D(Length), "parse")) { + compile_error = compile_error ++ @typeName(css.css_values.size.Size2D(Length)) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(css.css_values.size.Size2D(Length), "toCss")) { + compile_error = compile_error ++ @typeName(css.css_values.size.Size2D(Length)) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(css.css_values.size.Size2D(Length), "eql")) { + compile_error = compile_error ++ @typeName(css.css_values.size.Size2D(Length)) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(CssColor, "deepClone")) { + compile_error = compile_error ++ @typeName(CssColor) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(CssColor, "parse")) { + compile_error = compile_error ++ @typeName(CssColor) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(CssColor, "toCss")) { + compile_error = compile_error ++ @typeName(CssColor) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(CssColor, "eql")) { + compile_error = compile_error ++ @typeName(CssColor) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(CssColor, "deepClone")) { + compile_error = compile_error ++ @typeName(CssColor) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(CssColor, "parse")) { + compile_error = compile_error ++ @typeName(CssColor) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(CssColor, "toCss")) { + compile_error = compile_error ++ @typeName(CssColor) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(CssColor, "eql")) { + compile_error = compile_error ++ @typeName(CssColor) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(CssColor, "deepClone")) { + compile_error = compile_error ++ @typeName(CssColor) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(CssColor, "parse")) { + compile_error = compile_error ++ @typeName(CssColor) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(CssColor, "toCss")) { + compile_error = compile_error ++ @typeName(CssColor) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(CssColor, "eql")) { + compile_error = compile_error ++ @typeName(CssColor) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(CssColor, "deepClone")) { + compile_error = compile_error ++ @typeName(CssColor) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(CssColor, "parse")) { + compile_error = compile_error ++ @typeName(CssColor) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(CssColor, "toCss")) { + compile_error = compile_error ++ @typeName(CssColor) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(CssColor, "eql")) { + compile_error = compile_error ++ @typeName(CssColor) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(CssColor, "deepClone")) { + compile_error = compile_error ++ @typeName(CssColor) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(CssColor, "parse")) { + compile_error = compile_error ++ @typeName(CssColor) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(CssColor, "toCss")) { + compile_error = compile_error ++ @typeName(CssColor) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(CssColor, "eql")) { + compile_error = compile_error ++ @typeName(CssColor) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(CssColor, "deepClone")) { + compile_error = compile_error ++ @typeName(CssColor) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(CssColor, "parse")) { + compile_error = compile_error ++ @typeName(CssColor) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(CssColor, "toCss")) { + compile_error = compile_error ++ @typeName(CssColor) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(CssColor, "eql")) { + compile_error = compile_error ++ @typeName(CssColor) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(CssColor, "deepClone")) { + compile_error = compile_error ++ @typeName(CssColor) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(CssColor, "parse")) { + compile_error = compile_error ++ @typeName(CssColor) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(CssColor, "toCss")) { + compile_error = compile_error ++ @typeName(CssColor) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(CssColor, "eql")) { + compile_error = compile_error ++ @typeName(CssColor) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(CssColor, "deepClone")) { + compile_error = compile_error ++ @typeName(CssColor) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(CssColor, "parse")) { + compile_error = compile_error ++ @typeName(CssColor) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(CssColor, "toCss")) { + compile_error = compile_error ++ @typeName(CssColor) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(CssColor, "eql")) { + compile_error = compile_error ++ @typeName(CssColor) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(border.LineStyle, "deepClone")) { + compile_error = compile_error ++ @typeName(border.LineStyle) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(border.LineStyle, "parse")) { + compile_error = compile_error ++ @typeName(border.LineStyle) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(border.LineStyle, "toCss")) { + compile_error = compile_error ++ @typeName(border.LineStyle) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(border.LineStyle, "eql")) { + compile_error = compile_error ++ @typeName(border.LineStyle) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(border.LineStyle, "deepClone")) { + compile_error = compile_error ++ @typeName(border.LineStyle) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(border.LineStyle, "parse")) { + compile_error = compile_error ++ @typeName(border.LineStyle) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(border.LineStyle, "toCss")) { + compile_error = compile_error ++ @typeName(border.LineStyle) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(border.LineStyle, "eql")) { + compile_error = compile_error ++ @typeName(border.LineStyle) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(border.LineStyle, "deepClone")) { + compile_error = compile_error ++ @typeName(border.LineStyle) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(border.LineStyle, "parse")) { + compile_error = compile_error ++ @typeName(border.LineStyle) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(border.LineStyle, "toCss")) { + compile_error = compile_error ++ @typeName(border.LineStyle) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(border.LineStyle, "eql")) { + compile_error = compile_error ++ @typeName(border.LineStyle) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(border.LineStyle, "deepClone")) { + compile_error = compile_error ++ @typeName(border.LineStyle) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(border.LineStyle, "parse")) { + compile_error = compile_error ++ @typeName(border.LineStyle) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(border.LineStyle, "toCss")) { + compile_error = compile_error ++ @typeName(border.LineStyle) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(border.LineStyle, "eql")) { + compile_error = compile_error ++ @typeName(border.LineStyle) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(border.LineStyle, "deepClone")) { + compile_error = compile_error ++ @typeName(border.LineStyle) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(border.LineStyle, "parse")) { + compile_error = compile_error ++ @typeName(border.LineStyle) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(border.LineStyle, "toCss")) { + compile_error = compile_error ++ @typeName(border.LineStyle) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(border.LineStyle, "eql")) { + compile_error = compile_error ++ @typeName(border.LineStyle) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(border.LineStyle, "deepClone")) { + compile_error = compile_error ++ @typeName(border.LineStyle) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(border.LineStyle, "parse")) { + compile_error = compile_error ++ @typeName(border.LineStyle) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(border.LineStyle, "toCss")) { + compile_error = compile_error ++ @typeName(border.LineStyle) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(border.LineStyle, "eql")) { + compile_error = compile_error ++ @typeName(border.LineStyle) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(border.LineStyle, "deepClone")) { + compile_error = compile_error ++ @typeName(border.LineStyle) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(border.LineStyle, "parse")) { + compile_error = compile_error ++ @typeName(border.LineStyle) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(border.LineStyle, "toCss")) { + compile_error = compile_error ++ @typeName(border.LineStyle) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(border.LineStyle, "eql")) { + compile_error = compile_error ++ @typeName(border.LineStyle) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(border.LineStyle, "deepClone")) { + compile_error = compile_error ++ @typeName(border.LineStyle) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(border.LineStyle, "parse")) { + compile_error = compile_error ++ @typeName(border.LineStyle) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(border.LineStyle, "toCss")) { + compile_error = compile_error ++ @typeName(border.LineStyle) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(border.LineStyle, "eql")) { + compile_error = compile_error ++ @typeName(border.LineStyle) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(BorderSideWidth, "deepClone")) { + compile_error = compile_error ++ @typeName(BorderSideWidth) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(BorderSideWidth, "parse")) { + compile_error = compile_error ++ @typeName(BorderSideWidth) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(BorderSideWidth, "toCss")) { + compile_error = compile_error ++ @typeName(BorderSideWidth) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(BorderSideWidth, "eql")) { + compile_error = compile_error ++ @typeName(BorderSideWidth) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(BorderSideWidth, "deepClone")) { + compile_error = compile_error ++ @typeName(BorderSideWidth) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(BorderSideWidth, "parse")) { + compile_error = compile_error ++ @typeName(BorderSideWidth) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(BorderSideWidth, "toCss")) { + compile_error = compile_error ++ @typeName(BorderSideWidth) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(BorderSideWidth, "eql")) { + compile_error = compile_error ++ @typeName(BorderSideWidth) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(BorderSideWidth, "deepClone")) { + compile_error = compile_error ++ @typeName(BorderSideWidth) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(BorderSideWidth, "parse")) { + compile_error = compile_error ++ @typeName(BorderSideWidth) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(BorderSideWidth, "toCss")) { + compile_error = compile_error ++ @typeName(BorderSideWidth) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(BorderSideWidth, "eql")) { + compile_error = compile_error ++ @typeName(BorderSideWidth) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(BorderSideWidth, "deepClone")) { + compile_error = compile_error ++ @typeName(BorderSideWidth) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(BorderSideWidth, "parse")) { + compile_error = compile_error ++ @typeName(BorderSideWidth) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(BorderSideWidth, "toCss")) { + compile_error = compile_error ++ @typeName(BorderSideWidth) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(BorderSideWidth, "eql")) { + compile_error = compile_error ++ @typeName(BorderSideWidth) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(BorderSideWidth, "deepClone")) { + compile_error = compile_error ++ @typeName(BorderSideWidth) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(BorderSideWidth, "parse")) { + compile_error = compile_error ++ @typeName(BorderSideWidth) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(BorderSideWidth, "toCss")) { + compile_error = compile_error ++ @typeName(BorderSideWidth) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(BorderSideWidth, "eql")) { + compile_error = compile_error ++ @typeName(BorderSideWidth) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(BorderSideWidth, "deepClone")) { + compile_error = compile_error ++ @typeName(BorderSideWidth) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(BorderSideWidth, "parse")) { + compile_error = compile_error ++ @typeName(BorderSideWidth) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(BorderSideWidth, "toCss")) { + compile_error = compile_error ++ @typeName(BorderSideWidth) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(BorderSideWidth, "eql")) { + compile_error = compile_error ++ @typeName(BorderSideWidth) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(BorderSideWidth, "deepClone")) { + compile_error = compile_error ++ @typeName(BorderSideWidth) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(BorderSideWidth, "parse")) { + compile_error = compile_error ++ @typeName(BorderSideWidth) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(BorderSideWidth, "toCss")) { + compile_error = compile_error ++ @typeName(BorderSideWidth) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(BorderSideWidth, "eql")) { + compile_error = compile_error ++ @typeName(BorderSideWidth) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(BorderSideWidth, "deepClone")) { + compile_error = compile_error ++ @typeName(BorderSideWidth) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(BorderSideWidth, "parse")) { + compile_error = compile_error ++ @typeName(BorderSideWidth) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(BorderSideWidth, "toCss")) { + compile_error = compile_error ++ @typeName(BorderSideWidth) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(BorderSideWidth, "eql")) { + compile_error = compile_error ++ @typeName(BorderSideWidth) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(Size2D(LengthPercentage), "deepClone")) { + compile_error = compile_error ++ @typeName(Size2D(LengthPercentage)) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(Size2D(LengthPercentage), "parse")) { + compile_error = compile_error ++ @typeName(Size2D(LengthPercentage)) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(Size2D(LengthPercentage), "toCss")) { + compile_error = compile_error ++ @typeName(Size2D(LengthPercentage)) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(Size2D(LengthPercentage), "eql")) { + compile_error = compile_error ++ @typeName(Size2D(LengthPercentage)) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(Size2D(LengthPercentage), "deepClone")) { + compile_error = compile_error ++ @typeName(Size2D(LengthPercentage)) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(Size2D(LengthPercentage), "parse")) { + compile_error = compile_error ++ @typeName(Size2D(LengthPercentage)) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(Size2D(LengthPercentage), "toCss")) { + compile_error = compile_error ++ @typeName(Size2D(LengthPercentage)) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(Size2D(LengthPercentage), "eql")) { + compile_error = compile_error ++ @typeName(Size2D(LengthPercentage)) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(Size2D(LengthPercentage), "deepClone")) { + compile_error = compile_error ++ @typeName(Size2D(LengthPercentage)) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(Size2D(LengthPercentage), "parse")) { + compile_error = compile_error ++ @typeName(Size2D(LengthPercentage)) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(Size2D(LengthPercentage), "toCss")) { + compile_error = compile_error ++ @typeName(Size2D(LengthPercentage)) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(Size2D(LengthPercentage), "eql")) { + compile_error = compile_error ++ @typeName(Size2D(LengthPercentage)) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(Size2D(LengthPercentage), "deepClone")) { + compile_error = compile_error ++ @typeName(Size2D(LengthPercentage)) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(Size2D(LengthPercentage), "parse")) { + compile_error = compile_error ++ @typeName(Size2D(LengthPercentage)) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(Size2D(LengthPercentage), "toCss")) { + compile_error = compile_error ++ @typeName(Size2D(LengthPercentage)) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(Size2D(LengthPercentage), "eql")) { + compile_error = compile_error ++ @typeName(Size2D(LengthPercentage)) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(Size2D(LengthPercentage), "deepClone")) { + compile_error = compile_error ++ @typeName(Size2D(LengthPercentage)) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(Size2D(LengthPercentage), "parse")) { + compile_error = compile_error ++ @typeName(Size2D(LengthPercentage)) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(Size2D(LengthPercentage), "toCss")) { + compile_error = compile_error ++ @typeName(Size2D(LengthPercentage)) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(Size2D(LengthPercentage), "eql")) { + compile_error = compile_error ++ @typeName(Size2D(LengthPercentage)) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(Size2D(LengthPercentage), "deepClone")) { + compile_error = compile_error ++ @typeName(Size2D(LengthPercentage)) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(Size2D(LengthPercentage), "parse")) { + compile_error = compile_error ++ @typeName(Size2D(LengthPercentage)) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(Size2D(LengthPercentage), "toCss")) { + compile_error = compile_error ++ @typeName(Size2D(LengthPercentage)) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(Size2D(LengthPercentage), "eql")) { + compile_error = compile_error ++ @typeName(Size2D(LengthPercentage)) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(Size2D(LengthPercentage), "deepClone")) { + compile_error = compile_error ++ @typeName(Size2D(LengthPercentage)) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(Size2D(LengthPercentage), "parse")) { + compile_error = compile_error ++ @typeName(Size2D(LengthPercentage)) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(Size2D(LengthPercentage), "toCss")) { + compile_error = compile_error ++ @typeName(Size2D(LengthPercentage)) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(Size2D(LengthPercentage), "eql")) { + compile_error = compile_error ++ @typeName(Size2D(LengthPercentage)) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(Size2D(LengthPercentage), "deepClone")) { + compile_error = compile_error ++ @typeName(Size2D(LengthPercentage)) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(Size2D(LengthPercentage), "parse")) { + compile_error = compile_error ++ @typeName(Size2D(LengthPercentage)) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(Size2D(LengthPercentage), "toCss")) { + compile_error = compile_error ++ @typeName(Size2D(LengthPercentage)) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(Size2D(LengthPercentage), "eql")) { + compile_error = compile_error ++ @typeName(Size2D(LengthPercentage)) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(BorderRadius, "deepClone")) { + compile_error = compile_error ++ @typeName(BorderRadius) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(BorderRadius, "parse")) { + compile_error = compile_error ++ @typeName(BorderRadius) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(BorderRadius, "toCss")) { + compile_error = compile_error ++ @typeName(BorderRadius) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(BorderRadius, "eql")) { + compile_error = compile_error ++ @typeName(BorderRadius) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(Image, "deepClone")) { + compile_error = compile_error ++ @typeName(Image) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(Image, "parse")) { + compile_error = compile_error ++ @typeName(Image) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(Image, "toCss")) { + compile_error = compile_error ++ @typeName(Image) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(Image, "eql")) { + compile_error = compile_error ++ @typeName(Image) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(Rect(LengthOrNumber), "deepClone")) { + compile_error = compile_error ++ @typeName(Rect(LengthOrNumber)) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(Rect(LengthOrNumber), "parse")) { + compile_error = compile_error ++ @typeName(Rect(LengthOrNumber)) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(Rect(LengthOrNumber), "toCss")) { + compile_error = compile_error ++ @typeName(Rect(LengthOrNumber)) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(Rect(LengthOrNumber), "eql")) { + compile_error = compile_error ++ @typeName(Rect(LengthOrNumber)) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(BorderImageRepeat, "deepClone")) { + compile_error = compile_error ++ @typeName(BorderImageRepeat) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(BorderImageRepeat, "parse")) { + compile_error = compile_error ++ @typeName(BorderImageRepeat) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(BorderImageRepeat, "toCss")) { + compile_error = compile_error ++ @typeName(BorderImageRepeat) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(BorderImageRepeat, "eql")) { + compile_error = compile_error ++ @typeName(BorderImageRepeat) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(Rect(BorderImageSideWidth), "deepClone")) { + compile_error = compile_error ++ @typeName(Rect(BorderImageSideWidth)) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(Rect(BorderImageSideWidth), "parse")) { + compile_error = compile_error ++ @typeName(Rect(BorderImageSideWidth)) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(Rect(BorderImageSideWidth), "toCss")) { + compile_error = compile_error ++ @typeName(Rect(BorderImageSideWidth)) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(Rect(BorderImageSideWidth), "eql")) { + compile_error = compile_error ++ @typeName(Rect(BorderImageSideWidth)) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(BorderImageSlice, "deepClone")) { + compile_error = compile_error ++ @typeName(BorderImageSlice) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(BorderImageSlice, "parse")) { + compile_error = compile_error ++ @typeName(BorderImageSlice) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(BorderImageSlice, "toCss")) { + compile_error = compile_error ++ @typeName(BorderImageSlice) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(BorderImageSlice, "eql")) { + compile_error = compile_error ++ @typeName(BorderImageSlice) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(BorderImage, "deepClone")) { + compile_error = compile_error ++ @typeName(BorderImage) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(BorderImage, "parse")) { + compile_error = compile_error ++ @typeName(BorderImage) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(BorderImage, "toCss")) { + compile_error = compile_error ++ @typeName(BorderImage) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(BorderImage, "eql")) { + compile_error = compile_error ++ @typeName(BorderImage) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(BorderColor, "deepClone")) { + compile_error = compile_error ++ @typeName(BorderColor) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(BorderColor, "parse")) { + compile_error = compile_error ++ @typeName(BorderColor) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(BorderColor, "toCss")) { + compile_error = compile_error ++ @typeName(BorderColor) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(BorderColor, "eql")) { + compile_error = compile_error ++ @typeName(BorderColor) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(BorderStyle, "deepClone")) { + compile_error = compile_error ++ @typeName(BorderStyle) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(BorderStyle, "parse")) { + compile_error = compile_error ++ @typeName(BorderStyle) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(BorderStyle, "toCss")) { + compile_error = compile_error ++ @typeName(BorderStyle) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(BorderStyle, "eql")) { + compile_error = compile_error ++ @typeName(BorderStyle) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(BorderWidth, "deepClone")) { + compile_error = compile_error ++ @typeName(BorderWidth) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(BorderWidth, "parse")) { + compile_error = compile_error ++ @typeName(BorderWidth) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(BorderWidth, "toCss")) { + compile_error = compile_error ++ @typeName(BorderWidth) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(BorderWidth, "eql")) { + compile_error = compile_error ++ @typeName(BorderWidth) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(BorderBlockColor, "deepClone")) { + compile_error = compile_error ++ @typeName(BorderBlockColor) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(BorderBlockColor, "parse")) { + compile_error = compile_error ++ @typeName(BorderBlockColor) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(BorderBlockColor, "toCss")) { + compile_error = compile_error ++ @typeName(BorderBlockColor) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(BorderBlockColor, "eql")) { + compile_error = compile_error ++ @typeName(BorderBlockColor) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(BorderBlockStyle, "deepClone")) { + compile_error = compile_error ++ @typeName(BorderBlockStyle) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(BorderBlockStyle, "parse")) { + compile_error = compile_error ++ @typeName(BorderBlockStyle) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(BorderBlockStyle, "toCss")) { + compile_error = compile_error ++ @typeName(BorderBlockStyle) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(BorderBlockStyle, "eql")) { + compile_error = compile_error ++ @typeName(BorderBlockStyle) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(BorderBlockWidth, "deepClone")) { + compile_error = compile_error ++ @typeName(BorderBlockWidth) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(BorderBlockWidth, "parse")) { + compile_error = compile_error ++ @typeName(BorderBlockWidth) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(BorderBlockWidth, "toCss")) { + compile_error = compile_error ++ @typeName(BorderBlockWidth) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(BorderBlockWidth, "eql")) { + compile_error = compile_error ++ @typeName(BorderBlockWidth) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(BorderInlineColor, "deepClone")) { + compile_error = compile_error ++ @typeName(BorderInlineColor) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(BorderInlineColor, "parse")) { + compile_error = compile_error ++ @typeName(BorderInlineColor) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(BorderInlineColor, "toCss")) { + compile_error = compile_error ++ @typeName(BorderInlineColor) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(BorderInlineColor, "eql")) { + compile_error = compile_error ++ @typeName(BorderInlineColor) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(BorderInlineStyle, "deepClone")) { + compile_error = compile_error ++ @typeName(BorderInlineStyle) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(BorderInlineStyle, "parse")) { + compile_error = compile_error ++ @typeName(BorderInlineStyle) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(BorderInlineStyle, "toCss")) { + compile_error = compile_error ++ @typeName(BorderInlineStyle) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(BorderInlineStyle, "eql")) { + compile_error = compile_error ++ @typeName(BorderInlineStyle) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(BorderInlineWidth, "deepClone")) { + compile_error = compile_error ++ @typeName(BorderInlineWidth) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(BorderInlineWidth, "parse")) { + compile_error = compile_error ++ @typeName(BorderInlineWidth) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(BorderInlineWidth, "toCss")) { + compile_error = compile_error ++ @typeName(BorderInlineWidth) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(BorderInlineWidth, "eql")) { + compile_error = compile_error ++ @typeName(BorderInlineWidth) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(Border, "deepClone")) { + compile_error = compile_error ++ @typeName(Border) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(Border, "parse")) { + compile_error = compile_error ++ @typeName(Border) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(Border, "toCss")) { + compile_error = compile_error ++ @typeName(Border) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(Border, "eql")) { + compile_error = compile_error ++ @typeName(Border) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(BorderTop, "deepClone")) { + compile_error = compile_error ++ @typeName(BorderTop) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(BorderTop, "parse")) { + compile_error = compile_error ++ @typeName(BorderTop) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(BorderTop, "toCss")) { + compile_error = compile_error ++ @typeName(BorderTop) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(BorderTop, "eql")) { + compile_error = compile_error ++ @typeName(BorderTop) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(BorderBottom, "deepClone")) { + compile_error = compile_error ++ @typeName(BorderBottom) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(BorderBottom, "parse")) { + compile_error = compile_error ++ @typeName(BorderBottom) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(BorderBottom, "toCss")) { + compile_error = compile_error ++ @typeName(BorderBottom) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(BorderBottom, "eql")) { + compile_error = compile_error ++ @typeName(BorderBottom) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(BorderLeft, "deepClone")) { + compile_error = compile_error ++ @typeName(BorderLeft) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(BorderLeft, "parse")) { + compile_error = compile_error ++ @typeName(BorderLeft) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(BorderLeft, "toCss")) { + compile_error = compile_error ++ @typeName(BorderLeft) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(BorderLeft, "eql")) { + compile_error = compile_error ++ @typeName(BorderLeft) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(BorderRight, "deepClone")) { + compile_error = compile_error ++ @typeName(BorderRight) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(BorderRight, "parse")) { + compile_error = compile_error ++ @typeName(BorderRight) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(BorderRight, "toCss")) { + compile_error = compile_error ++ @typeName(BorderRight) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(BorderRight, "eql")) { + compile_error = compile_error ++ @typeName(BorderRight) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(BorderBlock, "deepClone")) { + compile_error = compile_error ++ @typeName(BorderBlock) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(BorderBlock, "parse")) { + compile_error = compile_error ++ @typeName(BorderBlock) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(BorderBlock, "toCss")) { + compile_error = compile_error ++ @typeName(BorderBlock) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(BorderBlock, "eql")) { + compile_error = compile_error ++ @typeName(BorderBlock) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(BorderBlockStart, "deepClone")) { + compile_error = compile_error ++ @typeName(BorderBlockStart) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(BorderBlockStart, "parse")) { + compile_error = compile_error ++ @typeName(BorderBlockStart) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(BorderBlockStart, "toCss")) { + compile_error = compile_error ++ @typeName(BorderBlockStart) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(BorderBlockStart, "eql")) { + compile_error = compile_error ++ @typeName(BorderBlockStart) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(BorderBlockEnd, "deepClone")) { + compile_error = compile_error ++ @typeName(BorderBlockEnd) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(BorderBlockEnd, "parse")) { + compile_error = compile_error ++ @typeName(BorderBlockEnd) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(BorderBlockEnd, "toCss")) { + compile_error = compile_error ++ @typeName(BorderBlockEnd) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(BorderBlockEnd, "eql")) { + compile_error = compile_error ++ @typeName(BorderBlockEnd) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(BorderInline, "deepClone")) { + compile_error = compile_error ++ @typeName(BorderInline) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(BorderInline, "parse")) { + compile_error = compile_error ++ @typeName(BorderInline) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(BorderInline, "toCss")) { + compile_error = compile_error ++ @typeName(BorderInline) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(BorderInline, "eql")) { + compile_error = compile_error ++ @typeName(BorderInline) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(BorderInlineStart, "deepClone")) { + compile_error = compile_error ++ @typeName(BorderInlineStart) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(BorderInlineStart, "parse")) { + compile_error = compile_error ++ @typeName(BorderInlineStart) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(BorderInlineStart, "toCss")) { + compile_error = compile_error ++ @typeName(BorderInlineStart) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(BorderInlineStart, "eql")) { + compile_error = compile_error ++ @typeName(BorderInlineStart) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(BorderInlineEnd, "deepClone")) { + compile_error = compile_error ++ @typeName(BorderInlineEnd) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(BorderInlineEnd, "parse")) { + compile_error = compile_error ++ @typeName(BorderInlineEnd) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(BorderInlineEnd, "toCss")) { + compile_error = compile_error ++ @typeName(BorderInlineEnd) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(BorderInlineEnd, "eql")) { + compile_error = compile_error ++ @typeName(BorderInlineEnd) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(Outline, "deepClone")) { + compile_error = compile_error ++ @typeName(Outline) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(Outline, "parse")) { + compile_error = compile_error ++ @typeName(Outline) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(Outline, "toCss")) { + compile_error = compile_error ++ @typeName(Outline) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(Outline, "eql")) { + compile_error = compile_error ++ @typeName(Outline) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(CssColor, "deepClone")) { + compile_error = compile_error ++ @typeName(CssColor) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(CssColor, "parse")) { + compile_error = compile_error ++ @typeName(CssColor) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(CssColor, "toCss")) { + compile_error = compile_error ++ @typeName(CssColor) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(CssColor, "eql")) { + compile_error = compile_error ++ @typeName(CssColor) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(OutlineStyle, "deepClone")) { + compile_error = compile_error ++ @typeName(OutlineStyle) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(OutlineStyle, "parse")) { + compile_error = compile_error ++ @typeName(OutlineStyle) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(OutlineStyle, "toCss")) { + compile_error = compile_error ++ @typeName(OutlineStyle) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(OutlineStyle, "eql")) { + compile_error = compile_error ++ @typeName(OutlineStyle) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(BorderSideWidth, "deepClone")) { + compile_error = compile_error ++ @typeName(BorderSideWidth) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(BorderSideWidth, "parse")) { + compile_error = compile_error ++ @typeName(BorderSideWidth) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(BorderSideWidth, "toCss")) { + compile_error = compile_error ++ @typeName(BorderSideWidth) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(BorderSideWidth, "eql")) { + compile_error = compile_error ++ @typeName(BorderSideWidth) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(FlexDirection, "deepClone")) { + compile_error = compile_error ++ @typeName(FlexDirection) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(FlexDirection, "parse")) { + compile_error = compile_error ++ @typeName(FlexDirection) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(FlexDirection, "toCss")) { + compile_error = compile_error ++ @typeName(FlexDirection) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(FlexDirection, "eql")) { + compile_error = compile_error ++ @typeName(FlexDirection) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(FlexWrap, "deepClone")) { + compile_error = compile_error ++ @typeName(FlexWrap) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(FlexWrap, "parse")) { + compile_error = compile_error ++ @typeName(FlexWrap) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(FlexWrap, "toCss")) { + compile_error = compile_error ++ @typeName(FlexWrap) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(FlexWrap, "eql")) { + compile_error = compile_error ++ @typeName(FlexWrap) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(FlexFlow, "deepClone")) { + compile_error = compile_error ++ @typeName(FlexFlow) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(FlexFlow, "parse")) { + compile_error = compile_error ++ @typeName(FlexFlow) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(FlexFlow, "toCss")) { + compile_error = compile_error ++ @typeName(FlexFlow) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(FlexFlow, "eql")) { + compile_error = compile_error ++ @typeName(FlexFlow) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "deepClone")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "parse")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "toCss")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "eql")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(Flex, "deepClone")) { + compile_error = compile_error ++ @typeName(Flex) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(Flex, "parse")) { + compile_error = compile_error ++ @typeName(Flex) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(Flex, "toCss")) { + compile_error = compile_error ++ @typeName(Flex) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(Flex, "eql")) { + compile_error = compile_error ++ @typeName(Flex) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(AlignContent, "deepClone")) { + compile_error = compile_error ++ @typeName(AlignContent) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(AlignContent, "parse")) { + compile_error = compile_error ++ @typeName(AlignContent) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(AlignContent, "toCss")) { + compile_error = compile_error ++ @typeName(AlignContent) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(AlignContent, "eql")) { + compile_error = compile_error ++ @typeName(AlignContent) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(JustifyContent, "deepClone")) { + compile_error = compile_error ++ @typeName(JustifyContent) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(JustifyContent, "parse")) { + compile_error = compile_error ++ @typeName(JustifyContent) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(JustifyContent, "toCss")) { + compile_error = compile_error ++ @typeName(JustifyContent) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(JustifyContent, "eql")) { + compile_error = compile_error ++ @typeName(JustifyContent) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(PlaceContent, "deepClone")) { + compile_error = compile_error ++ @typeName(PlaceContent) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(PlaceContent, "parse")) { + compile_error = compile_error ++ @typeName(PlaceContent) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(PlaceContent, "toCss")) { + compile_error = compile_error ++ @typeName(PlaceContent) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(PlaceContent, "eql")) { + compile_error = compile_error ++ @typeName(PlaceContent) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(AlignSelf, "deepClone")) { + compile_error = compile_error ++ @typeName(AlignSelf) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(AlignSelf, "parse")) { + compile_error = compile_error ++ @typeName(AlignSelf) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(AlignSelf, "toCss")) { + compile_error = compile_error ++ @typeName(AlignSelf) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(AlignSelf, "eql")) { + compile_error = compile_error ++ @typeName(AlignSelf) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(JustifySelf, "deepClone")) { + compile_error = compile_error ++ @typeName(JustifySelf) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(JustifySelf, "parse")) { + compile_error = compile_error ++ @typeName(JustifySelf) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(JustifySelf, "toCss")) { + compile_error = compile_error ++ @typeName(JustifySelf) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(JustifySelf, "eql")) { + compile_error = compile_error ++ @typeName(JustifySelf) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(PlaceSelf, "deepClone")) { + compile_error = compile_error ++ @typeName(PlaceSelf) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(PlaceSelf, "parse")) { + compile_error = compile_error ++ @typeName(PlaceSelf) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(PlaceSelf, "toCss")) { + compile_error = compile_error ++ @typeName(PlaceSelf) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(PlaceSelf, "eql")) { + compile_error = compile_error ++ @typeName(PlaceSelf) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(AlignItems, "deepClone")) { + compile_error = compile_error ++ @typeName(AlignItems) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(AlignItems, "parse")) { + compile_error = compile_error ++ @typeName(AlignItems) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(AlignItems, "toCss")) { + compile_error = compile_error ++ @typeName(AlignItems) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(AlignItems, "eql")) { + compile_error = compile_error ++ @typeName(AlignItems) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(JustifyItems, "deepClone")) { + compile_error = compile_error ++ @typeName(JustifyItems) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(JustifyItems, "parse")) { + compile_error = compile_error ++ @typeName(JustifyItems) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(JustifyItems, "toCss")) { + compile_error = compile_error ++ @typeName(JustifyItems) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(JustifyItems, "eql")) { + compile_error = compile_error ++ @typeName(JustifyItems) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(PlaceItems, "deepClone")) { + compile_error = compile_error ++ @typeName(PlaceItems) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(PlaceItems, "parse")) { + compile_error = compile_error ++ @typeName(PlaceItems) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(PlaceItems, "toCss")) { + compile_error = compile_error ++ @typeName(PlaceItems) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(PlaceItems, "eql")) { + compile_error = compile_error ++ @typeName(PlaceItems) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(GapValue, "deepClone")) { + compile_error = compile_error ++ @typeName(GapValue) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(GapValue, "parse")) { + compile_error = compile_error ++ @typeName(GapValue) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(GapValue, "toCss")) { + compile_error = compile_error ++ @typeName(GapValue) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(GapValue, "eql")) { + compile_error = compile_error ++ @typeName(GapValue) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(GapValue, "deepClone")) { + compile_error = compile_error ++ @typeName(GapValue) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(GapValue, "parse")) { + compile_error = compile_error ++ @typeName(GapValue) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(GapValue, "toCss")) { + compile_error = compile_error ++ @typeName(GapValue) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(GapValue, "eql")) { + compile_error = compile_error ++ @typeName(GapValue) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(Gap, "deepClone")) { + compile_error = compile_error ++ @typeName(Gap) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(Gap, "parse")) { + compile_error = compile_error ++ @typeName(Gap) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(Gap, "toCss")) { + compile_error = compile_error ++ @typeName(Gap) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(Gap, "eql")) { + compile_error = compile_error ++ @typeName(Gap) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(BoxOrient, "deepClone")) { + compile_error = compile_error ++ @typeName(BoxOrient) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(BoxOrient, "parse")) { + compile_error = compile_error ++ @typeName(BoxOrient) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(BoxOrient, "toCss")) { + compile_error = compile_error ++ @typeName(BoxOrient) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(BoxOrient, "eql")) { + compile_error = compile_error ++ @typeName(BoxOrient) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(BoxDirection, "deepClone")) { + compile_error = compile_error ++ @typeName(BoxDirection) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(BoxDirection, "parse")) { + compile_error = compile_error ++ @typeName(BoxDirection) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(BoxDirection, "toCss")) { + compile_error = compile_error ++ @typeName(BoxDirection) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(BoxDirection, "eql")) { + compile_error = compile_error ++ @typeName(BoxDirection) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(BoxAlign, "deepClone")) { + compile_error = compile_error ++ @typeName(BoxAlign) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(BoxAlign, "parse")) { + compile_error = compile_error ++ @typeName(BoxAlign) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(BoxAlign, "toCss")) { + compile_error = compile_error ++ @typeName(BoxAlign) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(BoxAlign, "eql")) { + compile_error = compile_error ++ @typeName(BoxAlign) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(BoxPack, "deepClone")) { + compile_error = compile_error ++ @typeName(BoxPack) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(BoxPack, "parse")) { + compile_error = compile_error ++ @typeName(BoxPack) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(BoxPack, "toCss")) { + compile_error = compile_error ++ @typeName(BoxPack) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(BoxPack, "eql")) { + compile_error = compile_error ++ @typeName(BoxPack) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(BoxLines, "deepClone")) { + compile_error = compile_error ++ @typeName(BoxLines) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(BoxLines, "parse")) { + compile_error = compile_error ++ @typeName(BoxLines) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(BoxLines, "toCss")) { + compile_error = compile_error ++ @typeName(BoxLines) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(BoxLines, "eql")) { + compile_error = compile_error ++ @typeName(BoxLines) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(FlexPack, "deepClone")) { + compile_error = compile_error ++ @typeName(FlexPack) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(FlexPack, "parse")) { + compile_error = compile_error ++ @typeName(FlexPack) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(FlexPack, "toCss")) { + compile_error = compile_error ++ @typeName(FlexPack) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(FlexPack, "eql")) { + compile_error = compile_error ++ @typeName(FlexPack) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(BoxAlign, "deepClone")) { + compile_error = compile_error ++ @typeName(BoxAlign) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(BoxAlign, "parse")) { + compile_error = compile_error ++ @typeName(BoxAlign) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(BoxAlign, "toCss")) { + compile_error = compile_error ++ @typeName(BoxAlign) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(BoxAlign, "eql")) { + compile_error = compile_error ++ @typeName(BoxAlign) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(FlexItemAlign, "deepClone")) { + compile_error = compile_error ++ @typeName(FlexItemAlign) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(FlexItemAlign, "parse")) { + compile_error = compile_error ++ @typeName(FlexItemAlign) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(FlexItemAlign, "toCss")) { + compile_error = compile_error ++ @typeName(FlexItemAlign) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(FlexItemAlign, "eql")) { + compile_error = compile_error ++ @typeName(FlexItemAlign) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(FlexLinePack, "deepClone")) { + compile_error = compile_error ++ @typeName(FlexLinePack) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(FlexLinePack, "parse")) { + compile_error = compile_error ++ @typeName(FlexLinePack) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(FlexLinePack, "toCss")) { + compile_error = compile_error ++ @typeName(FlexLinePack) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(FlexLinePack, "eql")) { + compile_error = compile_error ++ @typeName(FlexLinePack) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "deepClone")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "parse")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "toCss")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "eql")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "deepClone")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "parse")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "toCss")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "eql")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "deepClone")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "parse")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "toCss")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "eql")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "deepClone")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "parse")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "toCss")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "eql")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "deepClone")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "parse")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "toCss")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "eql")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "deepClone")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "parse")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "toCss")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "eql")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "deepClone")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "parse")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "toCss")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "eql")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "deepClone")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "parse")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "toCss")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "eql")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "deepClone")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "parse")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "toCss")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "eql")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(MarginBlock, "deepClone")) { + compile_error = compile_error ++ @typeName(MarginBlock) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(MarginBlock, "parse")) { + compile_error = compile_error ++ @typeName(MarginBlock) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(MarginBlock, "toCss")) { + compile_error = compile_error ++ @typeName(MarginBlock) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(MarginBlock, "eql")) { + compile_error = compile_error ++ @typeName(MarginBlock) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(MarginInline, "deepClone")) { + compile_error = compile_error ++ @typeName(MarginInline) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(MarginInline, "parse")) { + compile_error = compile_error ++ @typeName(MarginInline) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(MarginInline, "toCss")) { + compile_error = compile_error ++ @typeName(MarginInline) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(MarginInline, "eql")) { + compile_error = compile_error ++ @typeName(MarginInline) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(Margin, "deepClone")) { + compile_error = compile_error ++ @typeName(Margin) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(Margin, "parse")) { + compile_error = compile_error ++ @typeName(Margin) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(Margin, "toCss")) { + compile_error = compile_error ++ @typeName(Margin) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(Margin, "eql")) { + compile_error = compile_error ++ @typeName(Margin) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "deepClone")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "parse")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "toCss")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "eql")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "deepClone")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "parse")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "toCss")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "eql")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "deepClone")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "parse")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "toCss")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "eql")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "deepClone")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "parse")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "toCss")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "eql")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "deepClone")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "parse")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "toCss")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "eql")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "deepClone")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "parse")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "toCss")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "eql")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "deepClone")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "parse")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "toCss")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "eql")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "deepClone")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "parse")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "toCss")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "eql")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(PaddingBlock, "deepClone")) { + compile_error = compile_error ++ @typeName(PaddingBlock) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(PaddingBlock, "parse")) { + compile_error = compile_error ++ @typeName(PaddingBlock) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(PaddingBlock, "toCss")) { + compile_error = compile_error ++ @typeName(PaddingBlock) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(PaddingBlock, "eql")) { + compile_error = compile_error ++ @typeName(PaddingBlock) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(PaddingInline, "deepClone")) { + compile_error = compile_error ++ @typeName(PaddingInline) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(PaddingInline, "parse")) { + compile_error = compile_error ++ @typeName(PaddingInline) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(PaddingInline, "toCss")) { + compile_error = compile_error ++ @typeName(PaddingInline) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(PaddingInline, "eql")) { + compile_error = compile_error ++ @typeName(PaddingInline) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(Padding, "deepClone")) { + compile_error = compile_error ++ @typeName(Padding) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(Padding, "parse")) { + compile_error = compile_error ++ @typeName(Padding) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(Padding, "toCss")) { + compile_error = compile_error ++ @typeName(Padding) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(Padding, "eql")) { + compile_error = compile_error ++ @typeName(Padding) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "deepClone")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "parse")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "toCss")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "eql")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "deepClone")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "parse")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "toCss")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "eql")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "deepClone")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "parse")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "toCss")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "eql")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "deepClone")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "parse")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "toCss")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "eql")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "deepClone")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "parse")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "toCss")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "eql")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "deepClone")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "parse")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "toCss")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "eql")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "deepClone")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "parse")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "toCss")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "eql")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "deepClone")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "parse")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "toCss")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "eql")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(ScrollMarginBlock, "deepClone")) { + compile_error = compile_error ++ @typeName(ScrollMarginBlock) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(ScrollMarginBlock, "parse")) { + compile_error = compile_error ++ @typeName(ScrollMarginBlock) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(ScrollMarginBlock, "toCss")) { + compile_error = compile_error ++ @typeName(ScrollMarginBlock) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(ScrollMarginBlock, "eql")) { + compile_error = compile_error ++ @typeName(ScrollMarginBlock) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(ScrollMarginInline, "deepClone")) { + compile_error = compile_error ++ @typeName(ScrollMarginInline) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(ScrollMarginInline, "parse")) { + compile_error = compile_error ++ @typeName(ScrollMarginInline) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(ScrollMarginInline, "toCss")) { + compile_error = compile_error ++ @typeName(ScrollMarginInline) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(ScrollMarginInline, "eql")) { + compile_error = compile_error ++ @typeName(ScrollMarginInline) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(ScrollMargin, "deepClone")) { + compile_error = compile_error ++ @typeName(ScrollMargin) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(ScrollMargin, "parse")) { + compile_error = compile_error ++ @typeName(ScrollMargin) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(ScrollMargin, "toCss")) { + compile_error = compile_error ++ @typeName(ScrollMargin) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(ScrollMargin, "eql")) { + compile_error = compile_error ++ @typeName(ScrollMargin) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "deepClone")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "parse")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "toCss")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "eql")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "deepClone")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "parse")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "toCss")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "eql")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "deepClone")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "parse")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "toCss")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "eql")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "deepClone")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "parse")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "toCss")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "eql")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "deepClone")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "parse")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "toCss")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "eql")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "deepClone")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "parse")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "toCss")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "eql")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "deepClone")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "parse")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "toCss")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "eql")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "deepClone")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "parse")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "toCss")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(LengthPercentageOrAuto, "eql")) { + compile_error = compile_error ++ @typeName(LengthPercentageOrAuto) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(ScrollPaddingBlock, "deepClone")) { + compile_error = compile_error ++ @typeName(ScrollPaddingBlock) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(ScrollPaddingBlock, "parse")) { + compile_error = compile_error ++ @typeName(ScrollPaddingBlock) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(ScrollPaddingBlock, "toCss")) { + compile_error = compile_error ++ @typeName(ScrollPaddingBlock) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(ScrollPaddingBlock, "eql")) { + compile_error = compile_error ++ @typeName(ScrollPaddingBlock) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(ScrollPaddingInline, "deepClone")) { + compile_error = compile_error ++ @typeName(ScrollPaddingInline) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(ScrollPaddingInline, "parse")) { + compile_error = compile_error ++ @typeName(ScrollPaddingInline) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(ScrollPaddingInline, "toCss")) { + compile_error = compile_error ++ @typeName(ScrollPaddingInline) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(ScrollPaddingInline, "eql")) { + compile_error = compile_error ++ @typeName(ScrollPaddingInline) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(ScrollPadding, "deepClone")) { + compile_error = compile_error ++ @typeName(ScrollPadding) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(ScrollPadding, "parse")) { + compile_error = compile_error ++ @typeName(ScrollPadding) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(ScrollPadding, "toCss")) { + compile_error = compile_error ++ @typeName(ScrollPadding) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(ScrollPadding, "eql")) { + compile_error = compile_error ++ @typeName(ScrollPadding) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(FontWeight, "deepClone")) { + compile_error = compile_error ++ @typeName(FontWeight) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(FontWeight, "parse")) { + compile_error = compile_error ++ @typeName(FontWeight) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(FontWeight, "toCss")) { + compile_error = compile_error ++ @typeName(FontWeight) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(FontWeight, "eql")) { + compile_error = compile_error ++ @typeName(FontWeight) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(FontSize, "deepClone")) { + compile_error = compile_error ++ @typeName(FontSize) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(FontSize, "parse")) { + compile_error = compile_error ++ @typeName(FontSize) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(FontSize, "toCss")) { + compile_error = compile_error ++ @typeName(FontSize) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(FontSize, "eql")) { + compile_error = compile_error ++ @typeName(FontSize) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(FontStretch, "deepClone")) { + compile_error = compile_error ++ @typeName(FontStretch) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(FontStretch, "parse")) { + compile_error = compile_error ++ @typeName(FontStretch) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(FontStretch, "toCss")) { + compile_error = compile_error ++ @typeName(FontStretch) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(FontStretch, "eql")) { + compile_error = compile_error ++ @typeName(FontStretch) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(BabyList(FontFamily), "deepClone")) { + compile_error = compile_error ++ @typeName(BabyList(FontFamily)) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(BabyList(FontFamily), "parse")) { + compile_error = compile_error ++ @typeName(BabyList(FontFamily)) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(BabyList(FontFamily), "toCss")) { + compile_error = compile_error ++ @typeName(BabyList(FontFamily)) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(BabyList(FontFamily), "eql")) { + compile_error = compile_error ++ @typeName(BabyList(FontFamily)) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(FontStyle, "deepClone")) { + compile_error = compile_error ++ @typeName(FontStyle) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(FontStyle, "parse")) { + compile_error = compile_error ++ @typeName(FontStyle) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(FontStyle, "toCss")) { + compile_error = compile_error ++ @typeName(FontStyle) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(FontStyle, "eql")) { + compile_error = compile_error ++ @typeName(FontStyle) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(FontVariantCaps, "deepClone")) { + compile_error = compile_error ++ @typeName(FontVariantCaps) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(FontVariantCaps, "parse")) { + compile_error = compile_error ++ @typeName(FontVariantCaps) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(FontVariantCaps, "toCss")) { + compile_error = compile_error ++ @typeName(FontVariantCaps) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(FontVariantCaps, "eql")) { + compile_error = compile_error ++ @typeName(FontVariantCaps) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(LineHeight, "deepClone")) { + compile_error = compile_error ++ @typeName(LineHeight) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(LineHeight, "parse")) { + compile_error = compile_error ++ @typeName(LineHeight) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(LineHeight, "toCss")) { + compile_error = compile_error ++ @typeName(LineHeight) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(LineHeight, "eql")) { + compile_error = compile_error ++ @typeName(LineHeight) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(Font, "deepClone")) { + compile_error = compile_error ++ @typeName(Font) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(Font, "parse")) { + compile_error = compile_error ++ @typeName(Font) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(Font, "toCss")) { + compile_error = compile_error ++ @typeName(Font) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(Font, "eql")) { + compile_error = compile_error ++ @typeName(Font) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(CssColor, "deepClone")) { + compile_error = compile_error ++ @typeName(CssColor) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(CssColor, "parse")) { + compile_error = compile_error ++ @typeName(CssColor) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(CssColor, "toCss")) { + compile_error = compile_error ++ @typeName(CssColor) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(CssColor, "eql")) { + compile_error = compile_error ++ @typeName(CssColor) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(CssColor, "deepClone")) { + compile_error = compile_error ++ @typeName(CssColor) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(CssColor, "parse")) { + compile_error = compile_error ++ @typeName(CssColor) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(CssColor, "toCss")) { + compile_error = compile_error ++ @typeName(CssColor) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(CssColor, "eql")) { + compile_error = compile_error ++ @typeName(CssColor) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(SmallList(TextShadow, 1), "deepClone")) { + compile_error = compile_error ++ @typeName(SmallList(TextShadow, 1)) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(SmallList(TextShadow, 1), "parse")) { + compile_error = compile_error ++ @typeName(SmallList(TextShadow, 1)) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(SmallList(TextShadow, 1), "toCss")) { + compile_error = compile_error ++ @typeName(SmallList(TextShadow, 1)) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(SmallList(TextShadow, 1), "eql")) { + compile_error = compile_error ++ @typeName(SmallList(TextShadow, 1)) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(Direction, "deepClone")) { + compile_error = compile_error ++ @typeName(Direction) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(Direction, "parse")) { + compile_error = compile_error ++ @typeName(Direction) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(Direction, "toCss")) { + compile_error = compile_error ++ @typeName(Direction) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(Direction, "eql")) { + compile_error = compile_error ++ @typeName(Direction) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(Composes, "deepClone")) { + compile_error = compile_error ++ @typeName(Composes) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(Composes, "parse")) { + compile_error = compile_error ++ @typeName(Composes) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(Composes, "toCss")) { + compile_error = compile_error ++ @typeName(Composes) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(Composes, "eql")) { + compile_error = compile_error ++ @typeName(Composes) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(SmallList(Image, 1), "deepClone")) { + compile_error = compile_error ++ @typeName(SmallList(Image, 1)) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(SmallList(Image, 1), "parse")) { + compile_error = compile_error ++ @typeName(SmallList(Image, 1)) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(SmallList(Image, 1), "toCss")) { + compile_error = compile_error ++ @typeName(SmallList(Image, 1)) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(SmallList(Image, 1), "eql")) { + compile_error = compile_error ++ @typeName(SmallList(Image, 1)) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(SmallList(MaskMode, 1), "deepClone")) { + compile_error = compile_error ++ @typeName(SmallList(MaskMode, 1)) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(SmallList(MaskMode, 1), "parse")) { + compile_error = compile_error ++ @typeName(SmallList(MaskMode, 1)) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(SmallList(MaskMode, 1), "toCss")) { + compile_error = compile_error ++ @typeName(SmallList(MaskMode, 1)) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(SmallList(MaskMode, 1), "eql")) { + compile_error = compile_error ++ @typeName(SmallList(MaskMode, 1)) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(SmallList(BackgroundRepeat, 1), "deepClone")) { + compile_error = compile_error ++ @typeName(SmallList(BackgroundRepeat, 1)) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(SmallList(BackgroundRepeat, 1), "parse")) { + compile_error = compile_error ++ @typeName(SmallList(BackgroundRepeat, 1)) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(SmallList(BackgroundRepeat, 1), "toCss")) { + compile_error = compile_error ++ @typeName(SmallList(BackgroundRepeat, 1)) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(SmallList(BackgroundRepeat, 1), "eql")) { + compile_error = compile_error ++ @typeName(SmallList(BackgroundRepeat, 1)) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(SmallList(HorizontalPosition, 1), "deepClone")) { + compile_error = compile_error ++ @typeName(SmallList(HorizontalPosition, 1)) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(SmallList(HorizontalPosition, 1), "parse")) { + compile_error = compile_error ++ @typeName(SmallList(HorizontalPosition, 1)) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(SmallList(HorizontalPosition, 1), "toCss")) { + compile_error = compile_error ++ @typeName(SmallList(HorizontalPosition, 1)) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(SmallList(HorizontalPosition, 1), "eql")) { + compile_error = compile_error ++ @typeName(SmallList(HorizontalPosition, 1)) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(SmallList(VerticalPosition, 1), "deepClone")) { + compile_error = compile_error ++ @typeName(SmallList(VerticalPosition, 1)) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(SmallList(VerticalPosition, 1), "parse")) { + compile_error = compile_error ++ @typeName(SmallList(VerticalPosition, 1)) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(SmallList(VerticalPosition, 1), "toCss")) { + compile_error = compile_error ++ @typeName(SmallList(VerticalPosition, 1)) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(SmallList(VerticalPosition, 1), "eql")) { + compile_error = compile_error ++ @typeName(SmallList(VerticalPosition, 1)) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(SmallList(Position, 1), "deepClone")) { + compile_error = compile_error ++ @typeName(SmallList(Position, 1)) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(SmallList(Position, 1), "parse")) { + compile_error = compile_error ++ @typeName(SmallList(Position, 1)) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(SmallList(Position, 1), "toCss")) { + compile_error = compile_error ++ @typeName(SmallList(Position, 1)) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(SmallList(Position, 1), "eql")) { + compile_error = compile_error ++ @typeName(SmallList(Position, 1)) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(SmallList(MaskClip, 1), "deepClone")) { + compile_error = compile_error ++ @typeName(SmallList(MaskClip, 1)) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(SmallList(MaskClip, 1), "parse")) { + compile_error = compile_error ++ @typeName(SmallList(MaskClip, 1)) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(SmallList(MaskClip, 1), "toCss")) { + compile_error = compile_error ++ @typeName(SmallList(MaskClip, 1)) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(SmallList(MaskClip, 1), "eql")) { + compile_error = compile_error ++ @typeName(SmallList(MaskClip, 1)) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(SmallList(GeometryBox, 1), "deepClone")) { + compile_error = compile_error ++ @typeName(SmallList(GeometryBox, 1)) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(SmallList(GeometryBox, 1), "parse")) { + compile_error = compile_error ++ @typeName(SmallList(GeometryBox, 1)) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(SmallList(GeometryBox, 1), "toCss")) { + compile_error = compile_error ++ @typeName(SmallList(GeometryBox, 1)) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(SmallList(GeometryBox, 1), "eql")) { + compile_error = compile_error ++ @typeName(SmallList(GeometryBox, 1)) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(SmallList(BackgroundSize, 1), "deepClone")) { + compile_error = compile_error ++ @typeName(SmallList(BackgroundSize, 1)) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(SmallList(BackgroundSize, 1), "parse")) { + compile_error = compile_error ++ @typeName(SmallList(BackgroundSize, 1)) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(SmallList(BackgroundSize, 1), "toCss")) { + compile_error = compile_error ++ @typeName(SmallList(BackgroundSize, 1)) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(SmallList(BackgroundSize, 1), "eql")) { + compile_error = compile_error ++ @typeName(SmallList(BackgroundSize, 1)) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(SmallList(MaskComposite, 1), "deepClone")) { + compile_error = compile_error ++ @typeName(SmallList(MaskComposite, 1)) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(SmallList(MaskComposite, 1), "parse")) { + compile_error = compile_error ++ @typeName(SmallList(MaskComposite, 1)) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(SmallList(MaskComposite, 1), "toCss")) { + compile_error = compile_error ++ @typeName(SmallList(MaskComposite, 1)) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(SmallList(MaskComposite, 1), "eql")) { + compile_error = compile_error ++ @typeName(SmallList(MaskComposite, 1)) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(MaskType, "deepClone")) { + compile_error = compile_error ++ @typeName(MaskType) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(MaskType, "parse")) { + compile_error = compile_error ++ @typeName(MaskType) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(MaskType, "toCss")) { + compile_error = compile_error ++ @typeName(MaskType) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(MaskType, "eql")) { + compile_error = compile_error ++ @typeName(MaskType) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(SmallList(Mask, 1), "deepClone")) { + compile_error = compile_error ++ @typeName(SmallList(Mask, 1)) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(SmallList(Mask, 1), "parse")) { + compile_error = compile_error ++ @typeName(SmallList(Mask, 1)) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(SmallList(Mask, 1), "toCss")) { + compile_error = compile_error ++ @typeName(SmallList(Mask, 1)) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(SmallList(Mask, 1), "eql")) { + compile_error = compile_error ++ @typeName(SmallList(Mask, 1)) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(Image, "deepClone")) { + compile_error = compile_error ++ @typeName(Image) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(Image, "parse")) { + compile_error = compile_error ++ @typeName(Image) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(Image, "toCss")) { + compile_error = compile_error ++ @typeName(Image) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(Image, "eql")) { + compile_error = compile_error ++ @typeName(Image) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(MaskBorderMode, "deepClone")) { + compile_error = compile_error ++ @typeName(MaskBorderMode) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(MaskBorderMode, "parse")) { + compile_error = compile_error ++ @typeName(MaskBorderMode) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(MaskBorderMode, "toCss")) { + compile_error = compile_error ++ @typeName(MaskBorderMode) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(MaskBorderMode, "eql")) { + compile_error = compile_error ++ @typeName(MaskBorderMode) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(BorderImageSlice, "deepClone")) { + compile_error = compile_error ++ @typeName(BorderImageSlice) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(BorderImageSlice, "parse")) { + compile_error = compile_error ++ @typeName(BorderImageSlice) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(BorderImageSlice, "toCss")) { + compile_error = compile_error ++ @typeName(BorderImageSlice) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(BorderImageSlice, "eql")) { + compile_error = compile_error ++ @typeName(BorderImageSlice) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(Rect(BorderImageSideWidth), "deepClone")) { + compile_error = compile_error ++ @typeName(Rect(BorderImageSideWidth)) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(Rect(BorderImageSideWidth), "parse")) { + compile_error = compile_error ++ @typeName(Rect(BorderImageSideWidth)) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(Rect(BorderImageSideWidth), "toCss")) { + compile_error = compile_error ++ @typeName(Rect(BorderImageSideWidth)) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(Rect(BorderImageSideWidth), "eql")) { + compile_error = compile_error ++ @typeName(Rect(BorderImageSideWidth)) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(Rect(LengthOrNumber), "deepClone")) { + compile_error = compile_error ++ @typeName(Rect(LengthOrNumber)) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(Rect(LengthOrNumber), "parse")) { + compile_error = compile_error ++ @typeName(Rect(LengthOrNumber)) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(Rect(LengthOrNumber), "toCss")) { + compile_error = compile_error ++ @typeName(Rect(LengthOrNumber)) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(Rect(LengthOrNumber), "eql")) { + compile_error = compile_error ++ @typeName(Rect(LengthOrNumber)) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(BorderImageRepeat, "deepClone")) { + compile_error = compile_error ++ @typeName(BorderImageRepeat) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(BorderImageRepeat, "parse")) { + compile_error = compile_error ++ @typeName(BorderImageRepeat) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(BorderImageRepeat, "toCss")) { + compile_error = compile_error ++ @typeName(BorderImageRepeat) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(BorderImageRepeat, "eql")) { + compile_error = compile_error ++ @typeName(BorderImageRepeat) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(MaskBorder, "deepClone")) { + compile_error = compile_error ++ @typeName(MaskBorder) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(MaskBorder, "parse")) { + compile_error = compile_error ++ @typeName(MaskBorder) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(MaskBorder, "toCss")) { + compile_error = compile_error ++ @typeName(MaskBorder) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(MaskBorder, "eql")) { + compile_error = compile_error ++ @typeName(MaskBorder) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(SmallList(WebKitMaskComposite, 1), "deepClone")) { + compile_error = compile_error ++ @typeName(SmallList(WebKitMaskComposite, 1)) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(SmallList(WebKitMaskComposite, 1), "parse")) { + compile_error = compile_error ++ @typeName(SmallList(WebKitMaskComposite, 1)) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(SmallList(WebKitMaskComposite, 1), "toCss")) { + compile_error = compile_error ++ @typeName(SmallList(WebKitMaskComposite, 1)) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(SmallList(WebKitMaskComposite, 1), "eql")) { + compile_error = compile_error ++ @typeName(SmallList(WebKitMaskComposite, 1)) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(SmallList(WebKitMaskSourceType, 1), "deepClone")) { + compile_error = compile_error ++ @typeName(SmallList(WebKitMaskSourceType, 1)) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(SmallList(WebKitMaskSourceType, 1), "parse")) { + compile_error = compile_error ++ @typeName(SmallList(WebKitMaskSourceType, 1)) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(SmallList(WebKitMaskSourceType, 1), "toCss")) { + compile_error = compile_error ++ @typeName(SmallList(WebKitMaskSourceType, 1)) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(SmallList(WebKitMaskSourceType, 1), "eql")) { + compile_error = compile_error ++ @typeName(SmallList(WebKitMaskSourceType, 1)) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(BorderImage, "deepClone")) { + compile_error = compile_error ++ @typeName(BorderImage) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(BorderImage, "parse")) { + compile_error = compile_error ++ @typeName(BorderImage) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(BorderImage, "toCss")) { + compile_error = compile_error ++ @typeName(BorderImage) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(BorderImage, "eql")) { + compile_error = compile_error ++ @typeName(BorderImage) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(Image, "deepClone")) { + compile_error = compile_error ++ @typeName(Image) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(Image, "parse")) { + compile_error = compile_error ++ @typeName(Image) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(Image, "toCss")) { + compile_error = compile_error ++ @typeName(Image) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(Image, "eql")) { + compile_error = compile_error ++ @typeName(Image) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(BorderImageSlice, "deepClone")) { + compile_error = compile_error ++ @typeName(BorderImageSlice) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(BorderImageSlice, "parse")) { + compile_error = compile_error ++ @typeName(BorderImageSlice) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(BorderImageSlice, "toCss")) { + compile_error = compile_error ++ @typeName(BorderImageSlice) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(BorderImageSlice, "eql")) { + compile_error = compile_error ++ @typeName(BorderImageSlice) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(Rect(BorderImageSideWidth), "deepClone")) { + compile_error = compile_error ++ @typeName(Rect(BorderImageSideWidth)) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(Rect(BorderImageSideWidth), "parse")) { + compile_error = compile_error ++ @typeName(Rect(BorderImageSideWidth)) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(Rect(BorderImageSideWidth), "toCss")) { + compile_error = compile_error ++ @typeName(Rect(BorderImageSideWidth)) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(Rect(BorderImageSideWidth), "eql")) { + compile_error = compile_error ++ @typeName(Rect(BorderImageSideWidth)) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(Rect(LengthOrNumber), "deepClone")) { + compile_error = compile_error ++ @typeName(Rect(LengthOrNumber)) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(Rect(LengthOrNumber), "parse")) { + compile_error = compile_error ++ @typeName(Rect(LengthOrNumber)) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(Rect(LengthOrNumber), "toCss")) { + compile_error = compile_error ++ @typeName(Rect(LengthOrNumber)) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(Rect(LengthOrNumber), "eql")) { + compile_error = compile_error ++ @typeName(Rect(LengthOrNumber)) ++ ": does not have a eql() function.\n"; + } + + if (!@hasDecl(BorderImageRepeat, "deepClone")) { + compile_error = compile_error ++ @typeName(BorderImageRepeat) ++ ": does not have a deepClone() function.\n"; + } + + if (!@hasDecl(BorderImageRepeat, "parse")) { + compile_error = compile_error ++ @typeName(BorderImageRepeat) ++ ": does not have a parse() function.\n"; + } + + if (!@hasDecl(BorderImageRepeat, "toCss")) { + compile_error = compile_error ++ @typeName(BorderImageRepeat) ++ ": does not have a toCss() function.\n"; + } + + if (!@hasDecl(BorderImageRepeat, "eql")) { + compile_error = compile_error ++ @typeName(BorderImageRepeat) ++ ": does not have a eql() function.\n"; + } + + const final_compile_error = compile_error; + break :compile_error final_compile_error; + }; + if (compile_error.len > 0) { + @compileError(compile_error); + } + } + + /// Parses a CSS property by name. + pub fn parse(property_id: PropertyId, input: *css.Parser, options: *const css.ParserOptions) Result(Property) { + const state = input.state(); + + switch (property_id) { + .@"background-color" => { + if (css.generic.parseWithOptions(CssColor, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"background-color" = c } }; + } + } + }, + .@"background-image" => { + if (css.generic.parseWithOptions(SmallList(Image, 1), input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"background-image" = c } }; + } + } + }, + .@"background-position-x" => { + if (css.generic.parseWithOptions(SmallList(css_values.position.HorizontalPosition, 1), input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"background-position-x" = c } }; + } + } + }, + .@"background-position-y" => { + if (css.generic.parseWithOptions(SmallList(css_values.position.VerticalPosition, 1), input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"background-position-y" = c } }; + } + } + }, + .@"background-position" => { + if (css.generic.parseWithOptions(SmallList(background.BackgroundPosition, 1), input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"background-position" = c } }; + } + } + }, + .@"background-size" => { + if (css.generic.parseWithOptions(SmallList(background.BackgroundSize, 1), input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"background-size" = c } }; + } + } + }, + .@"background-repeat" => { + if (css.generic.parseWithOptions(SmallList(background.BackgroundRepeat, 1), input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"background-repeat" = c } }; + } + } + }, + .@"background-attachment" => { + if (css.generic.parseWithOptions(SmallList(background.BackgroundAttachment, 1), input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"background-attachment" = c } }; + } + } + }, + .@"background-clip" => |pre| { + if (css.generic.parseWithOptions(SmallList(background.BackgroundClip, 1), input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"background-clip" = .{ c, pre } } }; + } + } + }, + .@"background-origin" => { + if (css.generic.parseWithOptions(SmallList(background.BackgroundOrigin, 1), input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"background-origin" = c } }; + } + } + }, + .background => { + if (css.generic.parseWithOptions(SmallList(background.Background, 1), input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .background = c } }; + } + } + }, + .@"box-shadow" => |pre| { + if (css.generic.parseWithOptions(SmallList(box_shadow.BoxShadow, 1), input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"box-shadow" = .{ c, pre } } }; + } + } + }, + .opacity => { + if (css.generic.parseWithOptions(css.css_values.alpha.AlphaValue, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .opacity = c } }; + } + } + }, + .color => { + if (css.generic.parseWithOptions(CssColor, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .color = c } }; + } + } + }, + .display => { + if (css.generic.parseWithOptions(display.Display, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .display = c } }; + } + } + }, + .visibility => { + if (css.generic.parseWithOptions(display.Visibility, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .visibility = c } }; + } + } + }, + .width => { + if (css.generic.parseWithOptions(size.Size, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .width = c } }; + } + } + }, + .height => { + if (css.generic.parseWithOptions(size.Size, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .height = c } }; + } + } + }, + .@"min-width" => { + if (css.generic.parseWithOptions(size.Size, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"min-width" = c } }; + } + } + }, + .@"min-height" => { + if (css.generic.parseWithOptions(size.Size, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"min-height" = c } }; + } + } + }, + .@"max-width" => { + if (css.generic.parseWithOptions(size.MaxSize, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"max-width" = c } }; + } + } + }, + .@"max-height" => { + if (css.generic.parseWithOptions(size.MaxSize, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"max-height" = c } }; + } + } + }, + .@"block-size" => { + if (css.generic.parseWithOptions(size.Size, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"block-size" = c } }; + } + } + }, + .@"inline-size" => { + if (css.generic.parseWithOptions(size.Size, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"inline-size" = c } }; + } + } + }, + .@"min-block-size" => { + if (css.generic.parseWithOptions(size.Size, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"min-block-size" = c } }; + } + } + }, + .@"min-inline-size" => { + if (css.generic.parseWithOptions(size.Size, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"min-inline-size" = c } }; + } + } + }, + .@"max-block-size" => { + if (css.generic.parseWithOptions(size.MaxSize, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"max-block-size" = c } }; + } + } + }, + .@"max-inline-size" => { + if (css.generic.parseWithOptions(size.MaxSize, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"max-inline-size" = c } }; + } + } + }, + .@"box-sizing" => |pre| { + if (css.generic.parseWithOptions(size.BoxSizing, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"box-sizing" = .{ c, pre } } }; + } + } + }, + .@"aspect-ratio" => { + if (css.generic.parseWithOptions(size.AspectRatio, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"aspect-ratio" = c } }; + } + } + }, + .overflow => { + if (css.generic.parseWithOptions(overflow.Overflow, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .overflow = c } }; + } + } + }, + .@"overflow-x" => { + if (css.generic.parseWithOptions(overflow.OverflowKeyword, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"overflow-x" = c } }; + } + } + }, + .@"overflow-y" => { + if (css.generic.parseWithOptions(overflow.OverflowKeyword, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"overflow-y" = c } }; + } + } + }, + .@"text-overflow" => |pre| { + if (css.generic.parseWithOptions(overflow.TextOverflow, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"text-overflow" = .{ c, pre } } }; + } + } + }, + .position => { + if (css.generic.parseWithOptions(position.Position, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .position = c } }; + } + } + }, + .top => { + if (css.generic.parseWithOptions(LengthPercentageOrAuto, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .top = c } }; + } + } + }, + .bottom => { + if (css.generic.parseWithOptions(LengthPercentageOrAuto, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .bottom = c } }; + } + } + }, + .left => { + if (css.generic.parseWithOptions(LengthPercentageOrAuto, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .left = c } }; + } + } + }, + .right => { + if (css.generic.parseWithOptions(LengthPercentageOrAuto, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .right = c } }; + } + } + }, + .@"inset-block-start" => { + if (css.generic.parseWithOptions(LengthPercentageOrAuto, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"inset-block-start" = c } }; + } + } + }, + .@"inset-block-end" => { + if (css.generic.parseWithOptions(LengthPercentageOrAuto, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"inset-block-end" = c } }; + } + } + }, + .@"inset-inline-start" => { + if (css.generic.parseWithOptions(LengthPercentageOrAuto, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"inset-inline-start" = c } }; + } + } + }, + .@"inset-inline-end" => { + if (css.generic.parseWithOptions(LengthPercentageOrAuto, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"inset-inline-end" = c } }; + } + } + }, + .@"inset-block" => { + if (css.generic.parseWithOptions(margin_padding.InsetBlock, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"inset-block" = c } }; + } + } + }, + .@"inset-inline" => { + if (css.generic.parseWithOptions(margin_padding.InsetInline, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"inset-inline" = c } }; + } + } + }, + .inset => { + if (css.generic.parseWithOptions(margin_padding.Inset, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .inset = c } }; + } + } + }, + .@"border-spacing" => { + if (css.generic.parseWithOptions(css.css_values.size.Size2D(Length), input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"border-spacing" = c } }; + } + } + }, + .@"border-top-color" => { + if (css.generic.parseWithOptions(CssColor, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"border-top-color" = c } }; + } + } + }, + .@"border-bottom-color" => { + if (css.generic.parseWithOptions(CssColor, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"border-bottom-color" = c } }; + } + } + }, + .@"border-left-color" => { + if (css.generic.parseWithOptions(CssColor, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"border-left-color" = c } }; + } + } + }, + .@"border-right-color" => { + if (css.generic.parseWithOptions(CssColor, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"border-right-color" = c } }; + } + } + }, + .@"border-block-start-color" => { + if (css.generic.parseWithOptions(CssColor, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"border-block-start-color" = c } }; + } + } + }, + .@"border-block-end-color" => { + if (css.generic.parseWithOptions(CssColor, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"border-block-end-color" = c } }; + } + } + }, + .@"border-inline-start-color" => { + if (css.generic.parseWithOptions(CssColor, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"border-inline-start-color" = c } }; + } + } + }, + .@"border-inline-end-color" => { + if (css.generic.parseWithOptions(CssColor, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"border-inline-end-color" = c } }; + } + } + }, + .@"border-top-style" => { + if (css.generic.parseWithOptions(border.LineStyle, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"border-top-style" = c } }; + } + } + }, + .@"border-bottom-style" => { + if (css.generic.parseWithOptions(border.LineStyle, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"border-bottom-style" = c } }; + } + } + }, + .@"border-left-style" => { + if (css.generic.parseWithOptions(border.LineStyle, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"border-left-style" = c } }; + } + } + }, + .@"border-right-style" => { + if (css.generic.parseWithOptions(border.LineStyle, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"border-right-style" = c } }; + } + } + }, + .@"border-block-start-style" => { + if (css.generic.parseWithOptions(border.LineStyle, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"border-block-start-style" = c } }; + } + } + }, + .@"border-block-end-style" => { + if (css.generic.parseWithOptions(border.LineStyle, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"border-block-end-style" = c } }; + } + } + }, + .@"border-inline-start-style" => { + if (css.generic.parseWithOptions(border.LineStyle, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"border-inline-start-style" = c } }; + } + } + }, + .@"border-inline-end-style" => { + if (css.generic.parseWithOptions(border.LineStyle, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"border-inline-end-style" = c } }; + } + } + }, + .@"border-top-width" => { + if (css.generic.parseWithOptions(BorderSideWidth, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"border-top-width" = c } }; + } + } + }, + .@"border-bottom-width" => { + if (css.generic.parseWithOptions(BorderSideWidth, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"border-bottom-width" = c } }; + } + } + }, + .@"border-left-width" => { + if (css.generic.parseWithOptions(BorderSideWidth, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"border-left-width" = c } }; + } + } + }, + .@"border-right-width" => { + if (css.generic.parseWithOptions(BorderSideWidth, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"border-right-width" = c } }; + } + } + }, + .@"border-block-start-width" => { + if (css.generic.parseWithOptions(BorderSideWidth, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"border-block-start-width" = c } }; + } + } + }, + .@"border-block-end-width" => { + if (css.generic.parseWithOptions(BorderSideWidth, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"border-block-end-width" = c } }; + } + } + }, + .@"border-inline-start-width" => { + if (css.generic.parseWithOptions(BorderSideWidth, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"border-inline-start-width" = c } }; + } + } + }, + .@"border-inline-end-width" => { + if (css.generic.parseWithOptions(BorderSideWidth, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"border-inline-end-width" = c } }; + } + } + }, + .@"border-top-left-radius" => |pre| { + if (css.generic.parseWithOptions(Size2D(LengthPercentage), input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"border-top-left-radius" = .{ c, pre } } }; + } + } + }, + .@"border-top-right-radius" => |pre| { + if (css.generic.parseWithOptions(Size2D(LengthPercentage), input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"border-top-right-radius" = .{ c, pre } } }; + } + } + }, + .@"border-bottom-left-radius" => |pre| { + if (css.generic.parseWithOptions(Size2D(LengthPercentage), input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"border-bottom-left-radius" = .{ c, pre } } }; + } + } + }, + .@"border-bottom-right-radius" => |pre| { + if (css.generic.parseWithOptions(Size2D(LengthPercentage), input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"border-bottom-right-radius" = .{ c, pre } } }; + } + } + }, + .@"border-start-start-radius" => { + if (css.generic.parseWithOptions(Size2D(LengthPercentage), input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"border-start-start-radius" = c } }; + } + } + }, + .@"border-start-end-radius" => { + if (css.generic.parseWithOptions(Size2D(LengthPercentage), input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"border-start-end-radius" = c } }; + } + } + }, + .@"border-end-start-radius" => { + if (css.generic.parseWithOptions(Size2D(LengthPercentage), input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"border-end-start-radius" = c } }; + } + } + }, + .@"border-end-end-radius" => { + if (css.generic.parseWithOptions(Size2D(LengthPercentage), input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"border-end-end-radius" = c } }; + } + } + }, + .@"border-radius" => |pre| { + if (css.generic.parseWithOptions(BorderRadius, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"border-radius" = .{ c, pre } } }; + } + } + }, + .@"border-image-source" => { + if (css.generic.parseWithOptions(Image, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"border-image-source" = c } }; + } + } + }, + .@"border-image-outset" => { + if (css.generic.parseWithOptions(Rect(LengthOrNumber), input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"border-image-outset" = c } }; + } + } + }, + .@"border-image-repeat" => { + if (css.generic.parseWithOptions(BorderImageRepeat, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"border-image-repeat" = c } }; + } + } + }, + .@"border-image-width" => { + if (css.generic.parseWithOptions(Rect(BorderImageSideWidth), input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"border-image-width" = c } }; + } + } + }, + .@"border-image-slice" => { + if (css.generic.parseWithOptions(BorderImageSlice, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"border-image-slice" = c } }; + } + } + }, + .@"border-image" => |pre| { + if (css.generic.parseWithOptions(BorderImage, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"border-image" = .{ c, pre } } }; + } + } + }, + .@"border-color" => { + if (css.generic.parseWithOptions(BorderColor, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"border-color" = c } }; + } + } + }, + .@"border-style" => { + if (css.generic.parseWithOptions(BorderStyle, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"border-style" = c } }; + } + } + }, + .@"border-width" => { + if (css.generic.parseWithOptions(BorderWidth, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"border-width" = c } }; + } + } + }, + .@"border-block-color" => { + if (css.generic.parseWithOptions(BorderBlockColor, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"border-block-color" = c } }; + } + } + }, + .@"border-block-style" => { + if (css.generic.parseWithOptions(BorderBlockStyle, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"border-block-style" = c } }; + } + } + }, + .@"border-block-width" => { + if (css.generic.parseWithOptions(BorderBlockWidth, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"border-block-width" = c } }; + } + } + }, + .@"border-inline-color" => { + if (css.generic.parseWithOptions(BorderInlineColor, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"border-inline-color" = c } }; + } + } + }, + .@"border-inline-style" => { + if (css.generic.parseWithOptions(BorderInlineStyle, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"border-inline-style" = c } }; + } + } + }, + .@"border-inline-width" => { + if (css.generic.parseWithOptions(BorderInlineWidth, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"border-inline-width" = c } }; + } + } + }, + .border => { + if (css.generic.parseWithOptions(Border, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .border = c } }; + } + } + }, + .@"border-top" => { + if (css.generic.parseWithOptions(BorderTop, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"border-top" = c } }; + } + } + }, + .@"border-bottom" => { + if (css.generic.parseWithOptions(BorderBottom, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"border-bottom" = c } }; + } + } + }, + .@"border-left" => { + if (css.generic.parseWithOptions(BorderLeft, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"border-left" = c } }; + } + } + }, + .@"border-right" => { + if (css.generic.parseWithOptions(BorderRight, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"border-right" = c } }; + } + } + }, + .@"border-block" => { + if (css.generic.parseWithOptions(BorderBlock, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"border-block" = c } }; + } + } + }, + .@"border-block-start" => { + if (css.generic.parseWithOptions(BorderBlockStart, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"border-block-start" = c } }; + } + } + }, + .@"border-block-end" => { + if (css.generic.parseWithOptions(BorderBlockEnd, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"border-block-end" = c } }; + } + } + }, + .@"border-inline" => { + if (css.generic.parseWithOptions(BorderInline, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"border-inline" = c } }; + } + } + }, + .@"border-inline-start" => { + if (css.generic.parseWithOptions(BorderInlineStart, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"border-inline-start" = c } }; + } + } + }, + .@"border-inline-end" => { + if (css.generic.parseWithOptions(BorderInlineEnd, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"border-inline-end" = c } }; + } + } + }, + .outline => { + if (css.generic.parseWithOptions(Outline, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .outline = c } }; + } + } + }, + .@"outline-color" => { + if (css.generic.parseWithOptions(CssColor, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"outline-color" = c } }; + } + } + }, + .@"outline-style" => { + if (css.generic.parseWithOptions(OutlineStyle, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"outline-style" = c } }; + } + } + }, + .@"outline-width" => { + if (css.generic.parseWithOptions(BorderSideWidth, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"outline-width" = c } }; + } + } + }, + .@"flex-direction" => |pre| { + if (css.generic.parseWithOptions(FlexDirection, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"flex-direction" = .{ c, pre } } }; + } + } + }, + .@"flex-wrap" => |pre| { + if (css.generic.parseWithOptions(FlexWrap, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"flex-wrap" = .{ c, pre } } }; + } + } + }, + .@"flex-flow" => |pre| { + if (css.generic.parseWithOptions(FlexFlow, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"flex-flow" = .{ c, pre } } }; + } + } + }, + .@"flex-grow" => |pre| { + if (css.generic.parseWithOptions(CSSNumber, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"flex-grow" = .{ c, pre } } }; + } + } + }, + .@"flex-shrink" => |pre| { + if (css.generic.parseWithOptions(CSSNumber, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"flex-shrink" = .{ c, pre } } }; + } + } + }, + .@"flex-basis" => |pre| { + if (css.generic.parseWithOptions(LengthPercentageOrAuto, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"flex-basis" = .{ c, pre } } }; + } + } + }, + .flex => |pre| { + if (css.generic.parseWithOptions(Flex, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .flex = .{ c, pre } } }; + } + } + }, + .order => |pre| { + if (css.generic.parseWithOptions(CSSInteger, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .order = .{ c, pre } } }; + } + } + }, + .@"align-content" => |pre| { + if (css.generic.parseWithOptions(AlignContent, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"align-content" = .{ c, pre } } }; + } + } + }, + .@"justify-content" => |pre| { + if (css.generic.parseWithOptions(JustifyContent, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"justify-content" = .{ c, pre } } }; + } + } + }, + .@"place-content" => { + if (css.generic.parseWithOptions(PlaceContent, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"place-content" = c } }; + } + } + }, + .@"align-self" => |pre| { + if (css.generic.parseWithOptions(AlignSelf, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"align-self" = .{ c, pre } } }; + } + } + }, + .@"justify-self" => { + if (css.generic.parseWithOptions(JustifySelf, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"justify-self" = c } }; + } + } + }, + .@"place-self" => { + if (css.generic.parseWithOptions(PlaceSelf, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"place-self" = c } }; + } + } + }, + .@"align-items" => |pre| { + if (css.generic.parseWithOptions(AlignItems, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"align-items" = .{ c, pre } } }; + } + } + }, + .@"justify-items" => { + if (css.generic.parseWithOptions(JustifyItems, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"justify-items" = c } }; + } + } + }, + .@"place-items" => { + if (css.generic.parseWithOptions(PlaceItems, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"place-items" = c } }; + } + } + }, + .@"row-gap" => { + if (css.generic.parseWithOptions(GapValue, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"row-gap" = c } }; + } + } + }, + .@"column-gap" => { + if (css.generic.parseWithOptions(GapValue, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"column-gap" = c } }; + } + } + }, + .gap => { + if (css.generic.parseWithOptions(Gap, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .gap = c } }; + } + } + }, + .@"box-orient" => |pre| { + if (css.generic.parseWithOptions(BoxOrient, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"box-orient" = .{ c, pre } } }; + } + } + }, + .@"box-direction" => |pre| { + if (css.generic.parseWithOptions(BoxDirection, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"box-direction" = .{ c, pre } } }; + } + } + }, + .@"box-ordinal-group" => |pre| { + if (css.generic.parseWithOptions(CSSInteger, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"box-ordinal-group" = .{ c, pre } } }; + } + } + }, + .@"box-align" => |pre| { + if (css.generic.parseWithOptions(BoxAlign, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"box-align" = .{ c, pre } } }; + } + } + }, + .@"box-flex" => |pre| { + if (css.generic.parseWithOptions(CSSNumber, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"box-flex" = .{ c, pre } } }; + } + } + }, + .@"box-flex-group" => |pre| { + if (css.generic.parseWithOptions(CSSInteger, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"box-flex-group" = .{ c, pre } } }; + } + } + }, + .@"box-pack" => |pre| { + if (css.generic.parseWithOptions(BoxPack, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"box-pack" = .{ c, pre } } }; + } + } + }, + .@"box-lines" => |pre| { + if (css.generic.parseWithOptions(BoxLines, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"box-lines" = .{ c, pre } } }; + } + } + }, + .@"flex-pack" => |pre| { + if (css.generic.parseWithOptions(FlexPack, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"flex-pack" = .{ c, pre } } }; + } + } + }, + .@"flex-order" => |pre| { + if (css.generic.parseWithOptions(CSSInteger, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"flex-order" = .{ c, pre } } }; + } + } + }, + .@"flex-align" => |pre| { + if (css.generic.parseWithOptions(BoxAlign, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"flex-align" = .{ c, pre } } }; + } + } + }, + .@"flex-item-align" => |pre| { + if (css.generic.parseWithOptions(FlexItemAlign, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"flex-item-align" = .{ c, pre } } }; + } + } + }, + .@"flex-line-pack" => |pre| { + if (css.generic.parseWithOptions(FlexLinePack, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"flex-line-pack" = .{ c, pre } } }; + } + } + }, + .@"flex-positive" => |pre| { + if (css.generic.parseWithOptions(CSSNumber, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"flex-positive" = .{ c, pre } } }; + } + } + }, + .@"flex-negative" => |pre| { + if (css.generic.parseWithOptions(CSSNumber, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"flex-negative" = .{ c, pre } } }; + } + } + }, + .@"flex-preferred-size" => |pre| { + if (css.generic.parseWithOptions(LengthPercentageOrAuto, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"flex-preferred-size" = .{ c, pre } } }; + } + } + }, + .@"margin-top" => { + if (css.generic.parseWithOptions(LengthPercentageOrAuto, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"margin-top" = c } }; + } + } + }, + .@"margin-bottom" => { + if (css.generic.parseWithOptions(LengthPercentageOrAuto, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"margin-bottom" = c } }; + } + } + }, + .@"margin-left" => { + if (css.generic.parseWithOptions(LengthPercentageOrAuto, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"margin-left" = c } }; + } + } + }, + .@"margin-right" => { + if (css.generic.parseWithOptions(LengthPercentageOrAuto, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"margin-right" = c } }; + } + } + }, + .@"margin-block-start" => { + @setEvalBranchQuota(5000); + if (css.generic.parseWithOptions(LengthPercentageOrAuto, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"margin-block-start" = c } }; + } + } + }, + .@"margin-block-end" => { + if (css.generic.parseWithOptions(LengthPercentageOrAuto, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"margin-block-end" = c } }; + } + } + }, + .@"margin-inline-start" => { + if (css.generic.parseWithOptions(LengthPercentageOrAuto, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"margin-inline-start" = c } }; + } + } + }, + .@"margin-inline-end" => { + if (css.generic.parseWithOptions(LengthPercentageOrAuto, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"margin-inline-end" = c } }; + } + } + }, + .@"margin-block" => { + if (css.generic.parseWithOptions(MarginBlock, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"margin-block" = c } }; + } + } + }, + .@"margin-inline" => { + if (css.generic.parseWithOptions(MarginInline, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"margin-inline" = c } }; + } + } + }, + .margin => { + @setEvalBranchQuota(5000); + if (css.generic.parseWithOptions(Margin, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .margin = c } }; + } + } + }, + .@"padding-top" => { + if (css.generic.parseWithOptions(LengthPercentageOrAuto, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"padding-top" = c } }; + } + } + }, + .@"padding-bottom" => { + if (css.generic.parseWithOptions(LengthPercentageOrAuto, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"padding-bottom" = c } }; + } + } + }, + .@"padding-left" => { + if (css.generic.parseWithOptions(LengthPercentageOrAuto, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"padding-left" = c } }; + } + } + }, + .@"padding-right" => { + if (css.generic.parseWithOptions(LengthPercentageOrAuto, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"padding-right" = c } }; + } + } + }, + .@"padding-block-start" => { + if (css.generic.parseWithOptions(LengthPercentageOrAuto, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"padding-block-start" = c } }; + } + } + }, + .@"padding-block-end" => { + if (css.generic.parseWithOptions(LengthPercentageOrAuto, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"padding-block-end" = c } }; + } + } + }, + .@"padding-inline-start" => { + if (css.generic.parseWithOptions(LengthPercentageOrAuto, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"padding-inline-start" = c } }; + } + } + }, + .@"padding-inline-end" => { + if (css.generic.parseWithOptions(LengthPercentageOrAuto, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"padding-inline-end" = c } }; + } + } + }, + .@"padding-block" => { + if (css.generic.parseWithOptions(PaddingBlock, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"padding-block" = c } }; + } + } + }, + .@"padding-inline" => { + if (css.generic.parseWithOptions(PaddingInline, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"padding-inline" = c } }; + } + } + }, + .padding => { + if (css.generic.parseWithOptions(Padding, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .padding = c } }; + } + } + }, + .@"scroll-margin-top" => { + if (css.generic.parseWithOptions(LengthPercentageOrAuto, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"scroll-margin-top" = c } }; + } + } + }, + .@"scroll-margin-bottom" => { + if (css.generic.parseWithOptions(LengthPercentageOrAuto, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"scroll-margin-bottom" = c } }; + } + } + }, + .@"scroll-margin-left" => { + if (css.generic.parseWithOptions(LengthPercentageOrAuto, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"scroll-margin-left" = c } }; + } + } + }, + .@"scroll-margin-right" => { + if (css.generic.parseWithOptions(LengthPercentageOrAuto, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"scroll-margin-right" = c } }; + } + } + }, + .@"scroll-margin-block-start" => { + if (css.generic.parseWithOptions(LengthPercentageOrAuto, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"scroll-margin-block-start" = c } }; + } + } + }, + .@"scroll-margin-block-end" => { + if (css.generic.parseWithOptions(LengthPercentageOrAuto, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"scroll-margin-block-end" = c } }; + } + } + }, + .@"scroll-margin-inline-start" => { + if (css.generic.parseWithOptions(LengthPercentageOrAuto, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"scroll-margin-inline-start" = c } }; + } + } + }, + .@"scroll-margin-inline-end" => { + if (css.generic.parseWithOptions(LengthPercentageOrAuto, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"scroll-margin-inline-end" = c } }; + } + } + }, + .@"scroll-margin-block" => { + if (css.generic.parseWithOptions(ScrollMarginBlock, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"scroll-margin-block" = c } }; + } + } + }, + .@"scroll-margin-inline" => { + if (css.generic.parseWithOptions(ScrollMarginInline, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"scroll-margin-inline" = c } }; + } + } + }, + .@"scroll-margin" => { + if (css.generic.parseWithOptions(ScrollMargin, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"scroll-margin" = c } }; + } + } + }, + .@"scroll-padding-top" => { + if (css.generic.parseWithOptions(LengthPercentageOrAuto, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"scroll-padding-top" = c } }; + } + } + }, + .@"scroll-padding-bottom" => { + if (css.generic.parseWithOptions(LengthPercentageOrAuto, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"scroll-padding-bottom" = c } }; + } + } + }, + .@"scroll-padding-left" => { + if (css.generic.parseWithOptions(LengthPercentageOrAuto, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"scroll-padding-left" = c } }; + } + } + }, + .@"scroll-padding-right" => { + if (css.generic.parseWithOptions(LengthPercentageOrAuto, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"scroll-padding-right" = c } }; + } + } + }, + .@"scroll-padding-block-start" => { + if (css.generic.parseWithOptions(LengthPercentageOrAuto, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"scroll-padding-block-start" = c } }; + } + } + }, + .@"scroll-padding-block-end" => { + if (css.generic.parseWithOptions(LengthPercentageOrAuto, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"scroll-padding-block-end" = c } }; + } + } + }, + .@"scroll-padding-inline-start" => { + if (css.generic.parseWithOptions(LengthPercentageOrAuto, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"scroll-padding-inline-start" = c } }; + } + } + }, + .@"scroll-padding-inline-end" => { + if (css.generic.parseWithOptions(LengthPercentageOrAuto, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"scroll-padding-inline-end" = c } }; + } + } + }, + .@"scroll-padding-block" => { + if (css.generic.parseWithOptions(ScrollPaddingBlock, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"scroll-padding-block" = c } }; + } + } + }, + .@"scroll-padding-inline" => { + if (css.generic.parseWithOptions(ScrollPaddingInline, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"scroll-padding-inline" = c } }; + } + } + }, + .@"scroll-padding" => { + if (css.generic.parseWithOptions(ScrollPadding, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"scroll-padding" = c } }; + } + } + }, + .@"font-weight" => { + if (css.generic.parseWithOptions(FontWeight, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"font-weight" = c } }; + } + } + }, + .@"font-size" => { + if (css.generic.parseWithOptions(FontSize, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"font-size" = c } }; + } + } + }, + .@"font-stretch" => { + if (css.generic.parseWithOptions(FontStretch, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"font-stretch" = c } }; + } + } + }, + .@"font-family" => { + if (css.generic.parseWithOptions(BabyList(FontFamily), input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"font-family" = c } }; + } + } + }, + .@"font-style" => { + if (css.generic.parseWithOptions(FontStyle, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"font-style" = c } }; + } + } + }, + .@"font-variant-caps" => { + if (css.generic.parseWithOptions(FontVariantCaps, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"font-variant-caps" = c } }; + } + } + }, + .@"line-height" => { + if (css.generic.parseWithOptions(LineHeight, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"line-height" = c } }; + } + } + }, + .font => { + if (css.generic.parseWithOptions(Font, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .font = c } }; + } + } + }, + .@"text-decoration-color" => |pre| { + if (css.generic.parseWithOptions(CssColor, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"text-decoration-color" = .{ c, pre } } }; + } + } + }, + .@"text-emphasis-color" => |pre| { + if (css.generic.parseWithOptions(CssColor, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"text-emphasis-color" = .{ c, pre } } }; + } + } + }, + .@"text-shadow" => { + if (css.generic.parseWithOptions(SmallList(TextShadow, 1), input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"text-shadow" = c } }; + } + } + }, + .direction => { + if (css.generic.parseWithOptions(Direction, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .direction = c } }; + } + } + }, + .composes => { + if (css.generic.parseWithOptions(Composes, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .composes = c } }; + } + } + }, + .@"mask-image" => |pre| { + if (css.generic.parseWithOptions(SmallList(Image, 1), input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"mask-image" = .{ c, pre } } }; + } + } + }, + .@"mask-mode" => { + if (css.generic.parseWithOptions(SmallList(MaskMode, 1), input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"mask-mode" = c } }; + } + } + }, + .@"mask-repeat" => |pre| { + if (css.generic.parseWithOptions(SmallList(BackgroundRepeat, 1), input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"mask-repeat" = .{ c, pre } } }; + } + } + }, + .@"mask-position-x" => { + if (css.generic.parseWithOptions(SmallList(HorizontalPosition, 1), input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"mask-position-x" = c } }; + } + } + }, + .@"mask-position-y" => { + if (css.generic.parseWithOptions(SmallList(VerticalPosition, 1), input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"mask-position-y" = c } }; + } + } + }, + .@"mask-position" => |pre| { + if (css.generic.parseWithOptions(SmallList(Position, 1), input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"mask-position" = .{ c, pre } } }; + } + } + }, + .@"mask-clip" => |pre| { + @setEvalBranchQuota(5000); + if (css.generic.parseWithOptions(SmallList(MaskClip, 1), input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"mask-clip" = .{ c, pre } } }; + } + } + }, + .@"mask-origin" => |pre| { + if (css.generic.parseWithOptions(SmallList(GeometryBox, 1), input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"mask-origin" = .{ c, pre } } }; + } + } + }, + .@"mask-size" => |pre| { + if (css.generic.parseWithOptions(SmallList(BackgroundSize, 1), input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"mask-size" = .{ c, pre } } }; + } + } + }, + .@"mask-composite" => { + if (css.generic.parseWithOptions(SmallList(MaskComposite, 1), input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"mask-composite" = c } }; + } + } + }, + .@"mask-type" => { + if (css.generic.parseWithOptions(MaskType, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"mask-type" = c } }; + } + } + }, + .mask => |pre| { + if (css.generic.parseWithOptions(SmallList(Mask, 1), input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .mask = .{ c, pre } } }; + } + } + }, + .@"mask-border-source" => { + if (css.generic.parseWithOptions(Image, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"mask-border-source" = c } }; + } + } + }, + .@"mask-border-mode" => { + if (css.generic.parseWithOptions(MaskBorderMode, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"mask-border-mode" = c } }; + } + } + }, + .@"mask-border-slice" => { + if (css.generic.parseWithOptions(BorderImageSlice, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"mask-border-slice" = c } }; + } + } + }, + .@"mask-border-width" => { + if (css.generic.parseWithOptions(Rect(BorderImageSideWidth), input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"mask-border-width" = c } }; + } + } + }, + .@"mask-border-outset" => { + if (css.generic.parseWithOptions(Rect(LengthOrNumber), input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"mask-border-outset" = c } }; + } + } + }, + .@"mask-border-repeat" => { + if (css.generic.parseWithOptions(BorderImageRepeat, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"mask-border-repeat" = c } }; + } + } + }, + .@"mask-border" => { + if (css.generic.parseWithOptions(MaskBorder, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"mask-border" = c } }; + } + } + }, + .@"-webkit-mask-composite" => { + if (css.generic.parseWithOptions(SmallList(WebKitMaskComposite, 1), input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"-webkit-mask-composite" = c } }; + } + } + }, + .@"mask-source-type" => |pre| { + if (css.generic.parseWithOptions(SmallList(WebKitMaskSourceType, 1), input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"mask-source-type" = .{ c, pre } } }; + } + } + }, + .@"mask-box-image" => |pre| { + if (css.generic.parseWithOptions(BorderImage, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"mask-box-image" = .{ c, pre } } }; + } + } + }, + .@"mask-box-image-source" => |pre| { + if (css.generic.parseWithOptions(Image, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"mask-box-image-source" = .{ c, pre } } }; + } + } + }, + .@"mask-box-image-slice" => |pre| { + if (css.generic.parseWithOptions(BorderImageSlice, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"mask-box-image-slice" = .{ c, pre } } }; + } + } + }, + .@"mask-box-image-width" => |pre| { + if (css.generic.parseWithOptions(Rect(BorderImageSideWidth), input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"mask-box-image-width" = .{ c, pre } } }; + } + } + }, + .@"mask-box-image-outset" => |pre| { + if (css.generic.parseWithOptions(Rect(LengthOrNumber), input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"mask-box-image-outset" = .{ c, pre } } }; + } + } + }, + .@"mask-box-image-repeat" => |pre| { + if (css.generic.parseWithOptions(BorderImageRepeat, input, options).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .@"mask-box-image-repeat" = .{ c, pre } } }; + } + } + }, + .all => return .{ .result = .{ .all = switch (CSSWideKeyword.parse(input)) { + .result => |v| v, + .err => |e| return .{ .err = e }, + } } }, + .custom => |name| return .{ .result = .{ .custom = switch (CustomProperty.parse(name, input, options)) { + .result => |v| v, + .err => |e| return .{ .err = e }, + } } }, + else => {}, + } + + // If a value was unable to be parsed, treat as an unparsed property. + // This is different from a custom property, handled below, in that the property name is known + // and stored as an enum rather than a string. This lets property handlers more easily deal with it. + // Ideally we'd only do this if var() or env() references were seen, but err on the safe side for now. + input.reset(&state); + return .{ .result = .{ .unparsed = switch (UnparsedProperty.parse(property_id, input, options)) { + .result => |v| v, + .err => |e| return .{ .err = e }, + } } }; + } + + pub fn propertyId(this: *const Property) PropertyId { + return switch (this.*) { + .@"background-color" => .@"background-color", + .@"background-image" => .@"background-image", + .@"background-position-x" => .@"background-position-x", + .@"background-position-y" => .@"background-position-y", + .@"background-position" => .@"background-position", + .@"background-size" => .@"background-size", + .@"background-repeat" => .@"background-repeat", + .@"background-attachment" => .@"background-attachment", + .@"background-clip" => |*v| PropertyId{ .@"background-clip" = v[1] }, + .@"background-origin" => .@"background-origin", + .background => .background, + .@"box-shadow" => |*v| PropertyId{ .@"box-shadow" = v[1] }, + .opacity => .opacity, + .color => .color, + .display => .display, + .visibility => .visibility, + .width => .width, + .height => .height, + .@"min-width" => .@"min-width", + .@"min-height" => .@"min-height", + .@"max-width" => .@"max-width", + .@"max-height" => .@"max-height", + .@"block-size" => .@"block-size", + .@"inline-size" => .@"inline-size", + .@"min-block-size" => .@"min-block-size", + .@"min-inline-size" => .@"min-inline-size", + .@"max-block-size" => .@"max-block-size", + .@"max-inline-size" => .@"max-inline-size", + .@"box-sizing" => |*v| PropertyId{ .@"box-sizing" = v[1] }, + .@"aspect-ratio" => .@"aspect-ratio", + .overflow => .overflow, + .@"overflow-x" => .@"overflow-x", + .@"overflow-y" => .@"overflow-y", + .@"text-overflow" => |*v| PropertyId{ .@"text-overflow" = v[1] }, + .position => .position, + .top => .top, + .bottom => .bottom, + .left => .left, + .right => .right, + .@"inset-block-start" => .@"inset-block-start", + .@"inset-block-end" => .@"inset-block-end", + .@"inset-inline-start" => .@"inset-inline-start", + .@"inset-inline-end" => .@"inset-inline-end", + .@"inset-block" => .@"inset-block", + .@"inset-inline" => .@"inset-inline", + .inset => .inset, + .@"border-spacing" => .@"border-spacing", + .@"border-top-color" => .@"border-top-color", + .@"border-bottom-color" => .@"border-bottom-color", + .@"border-left-color" => .@"border-left-color", + .@"border-right-color" => .@"border-right-color", + .@"border-block-start-color" => .@"border-block-start-color", + .@"border-block-end-color" => .@"border-block-end-color", + .@"border-inline-start-color" => .@"border-inline-start-color", + .@"border-inline-end-color" => .@"border-inline-end-color", + .@"border-top-style" => .@"border-top-style", + .@"border-bottom-style" => .@"border-bottom-style", + .@"border-left-style" => .@"border-left-style", + .@"border-right-style" => .@"border-right-style", + .@"border-block-start-style" => .@"border-block-start-style", + .@"border-block-end-style" => .@"border-block-end-style", + .@"border-inline-start-style" => .@"border-inline-start-style", + .@"border-inline-end-style" => .@"border-inline-end-style", + .@"border-top-width" => .@"border-top-width", + .@"border-bottom-width" => .@"border-bottom-width", + .@"border-left-width" => .@"border-left-width", + .@"border-right-width" => .@"border-right-width", + .@"border-block-start-width" => .@"border-block-start-width", + .@"border-block-end-width" => .@"border-block-end-width", + .@"border-inline-start-width" => .@"border-inline-start-width", + .@"border-inline-end-width" => .@"border-inline-end-width", + .@"border-top-left-radius" => |*v| PropertyId{ .@"border-top-left-radius" = v[1] }, + .@"border-top-right-radius" => |*v| PropertyId{ .@"border-top-right-radius" = v[1] }, + .@"border-bottom-left-radius" => |*v| PropertyId{ .@"border-bottom-left-radius" = v[1] }, + .@"border-bottom-right-radius" => |*v| PropertyId{ .@"border-bottom-right-radius" = v[1] }, + .@"border-start-start-radius" => .@"border-start-start-radius", + .@"border-start-end-radius" => .@"border-start-end-radius", + .@"border-end-start-radius" => .@"border-end-start-radius", + .@"border-end-end-radius" => .@"border-end-end-radius", + .@"border-radius" => |*v| PropertyId{ .@"border-radius" = v[1] }, + .@"border-image-source" => .@"border-image-source", + .@"border-image-outset" => .@"border-image-outset", + .@"border-image-repeat" => .@"border-image-repeat", + .@"border-image-width" => .@"border-image-width", + .@"border-image-slice" => .@"border-image-slice", + .@"border-image" => |*v| PropertyId{ .@"border-image" = v[1] }, + .@"border-color" => .@"border-color", + .@"border-style" => .@"border-style", + .@"border-width" => .@"border-width", + .@"border-block-color" => .@"border-block-color", + .@"border-block-style" => .@"border-block-style", + .@"border-block-width" => .@"border-block-width", + .@"border-inline-color" => .@"border-inline-color", + .@"border-inline-style" => .@"border-inline-style", + .@"border-inline-width" => .@"border-inline-width", + .border => .border, + .@"border-top" => .@"border-top", + .@"border-bottom" => .@"border-bottom", + .@"border-left" => .@"border-left", + .@"border-right" => .@"border-right", + .@"border-block" => .@"border-block", + .@"border-block-start" => .@"border-block-start", + .@"border-block-end" => .@"border-block-end", + .@"border-inline" => .@"border-inline", + .@"border-inline-start" => .@"border-inline-start", + .@"border-inline-end" => .@"border-inline-end", + .outline => .outline, + .@"outline-color" => .@"outline-color", + .@"outline-style" => .@"outline-style", + .@"outline-width" => .@"outline-width", + .@"flex-direction" => |*v| PropertyId{ .@"flex-direction" = v[1] }, + .@"flex-wrap" => |*v| PropertyId{ .@"flex-wrap" = v[1] }, + .@"flex-flow" => |*v| PropertyId{ .@"flex-flow" = v[1] }, + .@"flex-grow" => |*v| PropertyId{ .@"flex-grow" = v[1] }, + .@"flex-shrink" => |*v| PropertyId{ .@"flex-shrink" = v[1] }, + .@"flex-basis" => |*v| PropertyId{ .@"flex-basis" = v[1] }, + .flex => |*v| PropertyId{ .flex = v[1] }, + .order => |*v| PropertyId{ .order = v[1] }, + .@"align-content" => |*v| PropertyId{ .@"align-content" = v[1] }, + .@"justify-content" => |*v| PropertyId{ .@"justify-content" = v[1] }, + .@"place-content" => .@"place-content", + .@"align-self" => |*v| PropertyId{ .@"align-self" = v[1] }, + .@"justify-self" => .@"justify-self", + .@"place-self" => .@"place-self", + .@"align-items" => |*v| PropertyId{ .@"align-items" = v[1] }, + .@"justify-items" => .@"justify-items", + .@"place-items" => .@"place-items", + .@"row-gap" => .@"row-gap", + .@"column-gap" => .@"column-gap", + .gap => .gap, + .@"box-orient" => |*v| PropertyId{ .@"box-orient" = v[1] }, + .@"box-direction" => |*v| PropertyId{ .@"box-direction" = v[1] }, + .@"box-ordinal-group" => |*v| PropertyId{ .@"box-ordinal-group" = v[1] }, + .@"box-align" => |*v| PropertyId{ .@"box-align" = v[1] }, + .@"box-flex" => |*v| PropertyId{ .@"box-flex" = v[1] }, + .@"box-flex-group" => |*v| PropertyId{ .@"box-flex-group" = v[1] }, + .@"box-pack" => |*v| PropertyId{ .@"box-pack" = v[1] }, + .@"box-lines" => |*v| PropertyId{ .@"box-lines" = v[1] }, + .@"flex-pack" => |*v| PropertyId{ .@"flex-pack" = v[1] }, + .@"flex-order" => |*v| PropertyId{ .@"flex-order" = v[1] }, + .@"flex-align" => |*v| PropertyId{ .@"flex-align" = v[1] }, + .@"flex-item-align" => |*v| PropertyId{ .@"flex-item-align" = v[1] }, + .@"flex-line-pack" => |*v| PropertyId{ .@"flex-line-pack" = v[1] }, + .@"flex-positive" => |*v| PropertyId{ .@"flex-positive" = v[1] }, + .@"flex-negative" => |*v| PropertyId{ .@"flex-negative" = v[1] }, + .@"flex-preferred-size" => |*v| PropertyId{ .@"flex-preferred-size" = v[1] }, + .@"margin-top" => .@"margin-top", + .@"margin-bottom" => .@"margin-bottom", + .@"margin-left" => .@"margin-left", + .@"margin-right" => .@"margin-right", + .@"margin-block-start" => .@"margin-block-start", + .@"margin-block-end" => .@"margin-block-end", + .@"margin-inline-start" => .@"margin-inline-start", + .@"margin-inline-end" => .@"margin-inline-end", + .@"margin-block" => .@"margin-block", + .@"margin-inline" => .@"margin-inline", + .margin => .margin, + .@"padding-top" => .@"padding-top", + .@"padding-bottom" => .@"padding-bottom", + .@"padding-left" => .@"padding-left", + .@"padding-right" => .@"padding-right", + .@"padding-block-start" => .@"padding-block-start", + .@"padding-block-end" => .@"padding-block-end", + .@"padding-inline-start" => .@"padding-inline-start", + .@"padding-inline-end" => .@"padding-inline-end", + .@"padding-block" => .@"padding-block", + .@"padding-inline" => .@"padding-inline", + .padding => .padding, + .@"scroll-margin-top" => .@"scroll-margin-top", + .@"scroll-margin-bottom" => .@"scroll-margin-bottom", + .@"scroll-margin-left" => .@"scroll-margin-left", + .@"scroll-margin-right" => .@"scroll-margin-right", + .@"scroll-margin-block-start" => .@"scroll-margin-block-start", + .@"scroll-margin-block-end" => .@"scroll-margin-block-end", + .@"scroll-margin-inline-start" => .@"scroll-margin-inline-start", + .@"scroll-margin-inline-end" => .@"scroll-margin-inline-end", + .@"scroll-margin-block" => .@"scroll-margin-block", + .@"scroll-margin-inline" => .@"scroll-margin-inline", + .@"scroll-margin" => .@"scroll-margin", + .@"scroll-padding-top" => .@"scroll-padding-top", + .@"scroll-padding-bottom" => .@"scroll-padding-bottom", + .@"scroll-padding-left" => .@"scroll-padding-left", + .@"scroll-padding-right" => .@"scroll-padding-right", + .@"scroll-padding-block-start" => .@"scroll-padding-block-start", + .@"scroll-padding-block-end" => .@"scroll-padding-block-end", + .@"scroll-padding-inline-start" => .@"scroll-padding-inline-start", + .@"scroll-padding-inline-end" => .@"scroll-padding-inline-end", + .@"scroll-padding-block" => .@"scroll-padding-block", + .@"scroll-padding-inline" => .@"scroll-padding-inline", + .@"scroll-padding" => .@"scroll-padding", + .@"font-weight" => .@"font-weight", + .@"font-size" => .@"font-size", + .@"font-stretch" => .@"font-stretch", + .@"font-family" => .@"font-family", + .@"font-style" => .@"font-style", + .@"font-variant-caps" => .@"font-variant-caps", + .@"line-height" => .@"line-height", + .font => .font, + .@"text-decoration-color" => |*v| PropertyId{ .@"text-decoration-color" = v[1] }, + .@"text-emphasis-color" => |*v| PropertyId{ .@"text-emphasis-color" = v[1] }, + .@"text-shadow" => .@"text-shadow", + .direction => .direction, + .composes => .composes, + .@"mask-image" => |*v| PropertyId{ .@"mask-image" = v[1] }, + .@"mask-mode" => .@"mask-mode", + .@"mask-repeat" => |*v| PropertyId{ .@"mask-repeat" = v[1] }, + .@"mask-position-x" => .@"mask-position-x", + .@"mask-position-y" => .@"mask-position-y", + .@"mask-position" => |*v| PropertyId{ .@"mask-position" = v[1] }, + .@"mask-clip" => |*v| PropertyId{ .@"mask-clip" = v[1] }, + .@"mask-origin" => |*v| PropertyId{ .@"mask-origin" = v[1] }, + .@"mask-size" => |*v| PropertyId{ .@"mask-size" = v[1] }, + .@"mask-composite" => .@"mask-composite", + .@"mask-type" => .@"mask-type", + .mask => |*v| PropertyId{ .mask = v[1] }, + .@"mask-border-source" => .@"mask-border-source", + .@"mask-border-mode" => .@"mask-border-mode", + .@"mask-border-slice" => .@"mask-border-slice", + .@"mask-border-width" => .@"mask-border-width", + .@"mask-border-outset" => .@"mask-border-outset", + .@"mask-border-repeat" => .@"mask-border-repeat", + .@"mask-border" => .@"mask-border", + .@"-webkit-mask-composite" => .@"-webkit-mask-composite", + .@"mask-source-type" => |*v| PropertyId{ .@"mask-source-type" = v[1] }, + .@"mask-box-image" => |*v| PropertyId{ .@"mask-box-image" = v[1] }, + .@"mask-box-image-source" => |*v| PropertyId{ .@"mask-box-image-source" = v[1] }, + .@"mask-box-image-slice" => |*v| PropertyId{ .@"mask-box-image-slice" = v[1] }, + .@"mask-box-image-width" => |*v| PropertyId{ .@"mask-box-image-width" = v[1] }, + .@"mask-box-image-outset" => |*v| PropertyId{ .@"mask-box-image-outset" = v[1] }, + .@"mask-box-image-repeat" => |*v| PropertyId{ .@"mask-box-image-repeat" = v[1] }, + .all => PropertyId.all, + .unparsed => |unparsed| unparsed.property_id, + .custom => |c| .{ .custom = c.name }, + }; + } + + pub fn deepClone(this: *const Property, allocator: std.mem.Allocator) Property { + return switch (this.*) { + .@"background-color" => |*v| .{ .@"background-color" = v.deepClone(allocator) }, + .@"background-image" => |*v| .{ .@"background-image" = v.deepClone(allocator) }, + .@"background-position-x" => |*v| .{ .@"background-position-x" = v.deepClone(allocator) }, + .@"background-position-y" => |*v| .{ .@"background-position-y" = v.deepClone(allocator) }, + .@"background-position" => |*v| .{ .@"background-position" = v.deepClone(allocator) }, + .@"background-size" => |*v| .{ .@"background-size" = v.deepClone(allocator) }, + .@"background-repeat" => |*v| .{ .@"background-repeat" = v.deepClone(allocator) }, + .@"background-attachment" => |*v| .{ .@"background-attachment" = v.deepClone(allocator) }, + .@"background-clip" => |*v| .{ .@"background-clip" = .{ v[0].deepClone(allocator), v[1] } }, + .@"background-origin" => |*v| .{ .@"background-origin" = v.deepClone(allocator) }, + .background => |*v| .{ .background = v.deepClone(allocator) }, + .@"box-shadow" => |*v| .{ .@"box-shadow" = .{ v[0].deepClone(allocator), v[1] } }, + .opacity => |*v| .{ .opacity = v.deepClone(allocator) }, + .color => |*v| .{ .color = v.deepClone(allocator) }, + .display => |*v| .{ .display = v.deepClone(allocator) }, + .visibility => |*v| .{ .visibility = v.deepClone(allocator) }, + .width => |*v| .{ .width = v.deepClone(allocator) }, + .height => |*v| .{ .height = v.deepClone(allocator) }, + .@"min-width" => |*v| .{ .@"min-width" = v.deepClone(allocator) }, + .@"min-height" => |*v| .{ .@"min-height" = v.deepClone(allocator) }, + .@"max-width" => |*v| .{ .@"max-width" = v.deepClone(allocator) }, + .@"max-height" => |*v| .{ .@"max-height" = v.deepClone(allocator) }, + .@"block-size" => |*v| .{ .@"block-size" = v.deepClone(allocator) }, + .@"inline-size" => |*v| .{ .@"inline-size" = v.deepClone(allocator) }, + .@"min-block-size" => |*v| .{ .@"min-block-size" = v.deepClone(allocator) }, + .@"min-inline-size" => |*v| .{ .@"min-inline-size" = v.deepClone(allocator) }, + .@"max-block-size" => |*v| .{ .@"max-block-size" = v.deepClone(allocator) }, + .@"max-inline-size" => |*v| .{ .@"max-inline-size" = v.deepClone(allocator) }, + .@"box-sizing" => |*v| .{ .@"box-sizing" = .{ v[0].deepClone(allocator), v[1] } }, + .@"aspect-ratio" => |*v| .{ .@"aspect-ratio" = v.deepClone(allocator) }, + .overflow => |*v| .{ .overflow = v.deepClone(allocator) }, + .@"overflow-x" => |*v| .{ .@"overflow-x" = v.deepClone(allocator) }, + .@"overflow-y" => |*v| .{ .@"overflow-y" = v.deepClone(allocator) }, + .@"text-overflow" => |*v| .{ .@"text-overflow" = .{ v[0].deepClone(allocator), v[1] } }, + .position => |*v| .{ .position = v.deepClone(allocator) }, + .top => |*v| .{ .top = v.deepClone(allocator) }, + .bottom => |*v| .{ .bottom = v.deepClone(allocator) }, + .left => |*v| .{ .left = v.deepClone(allocator) }, + .right => |*v| .{ .right = v.deepClone(allocator) }, + .@"inset-block-start" => |*v| .{ .@"inset-block-start" = v.deepClone(allocator) }, + .@"inset-block-end" => |*v| .{ .@"inset-block-end" = v.deepClone(allocator) }, + .@"inset-inline-start" => |*v| .{ .@"inset-inline-start" = v.deepClone(allocator) }, + .@"inset-inline-end" => |*v| .{ .@"inset-inline-end" = v.deepClone(allocator) }, + .@"inset-block" => |*v| .{ .@"inset-block" = v.deepClone(allocator) }, + .@"inset-inline" => |*v| .{ .@"inset-inline" = v.deepClone(allocator) }, + .inset => |*v| .{ .inset = v.deepClone(allocator) }, + .@"border-spacing" => |*v| .{ .@"border-spacing" = v.deepClone(allocator) }, + .@"border-top-color" => |*v| .{ .@"border-top-color" = v.deepClone(allocator) }, + .@"border-bottom-color" => |*v| .{ .@"border-bottom-color" = v.deepClone(allocator) }, + .@"border-left-color" => |*v| .{ .@"border-left-color" = v.deepClone(allocator) }, + .@"border-right-color" => |*v| .{ .@"border-right-color" = v.deepClone(allocator) }, + .@"border-block-start-color" => |*v| .{ .@"border-block-start-color" = v.deepClone(allocator) }, + .@"border-block-end-color" => |*v| .{ .@"border-block-end-color" = v.deepClone(allocator) }, + .@"border-inline-start-color" => |*v| .{ .@"border-inline-start-color" = v.deepClone(allocator) }, + .@"border-inline-end-color" => |*v| .{ .@"border-inline-end-color" = v.deepClone(allocator) }, + .@"border-top-style" => |*v| .{ .@"border-top-style" = v.deepClone(allocator) }, + .@"border-bottom-style" => |*v| .{ .@"border-bottom-style" = v.deepClone(allocator) }, + .@"border-left-style" => |*v| .{ .@"border-left-style" = v.deepClone(allocator) }, + .@"border-right-style" => |*v| .{ .@"border-right-style" = v.deepClone(allocator) }, + .@"border-block-start-style" => |*v| .{ .@"border-block-start-style" = v.deepClone(allocator) }, + .@"border-block-end-style" => |*v| .{ .@"border-block-end-style" = v.deepClone(allocator) }, + .@"border-inline-start-style" => |*v| .{ .@"border-inline-start-style" = v.deepClone(allocator) }, + .@"border-inline-end-style" => |*v| .{ .@"border-inline-end-style" = v.deepClone(allocator) }, + .@"border-top-width" => |*v| .{ .@"border-top-width" = v.deepClone(allocator) }, + .@"border-bottom-width" => |*v| .{ .@"border-bottom-width" = v.deepClone(allocator) }, + .@"border-left-width" => |*v| .{ .@"border-left-width" = v.deepClone(allocator) }, + .@"border-right-width" => |*v| .{ .@"border-right-width" = v.deepClone(allocator) }, + .@"border-block-start-width" => |*v| .{ .@"border-block-start-width" = v.deepClone(allocator) }, + .@"border-block-end-width" => |*v| .{ .@"border-block-end-width" = v.deepClone(allocator) }, + .@"border-inline-start-width" => |*v| .{ .@"border-inline-start-width" = v.deepClone(allocator) }, + .@"border-inline-end-width" => |*v| .{ .@"border-inline-end-width" = v.deepClone(allocator) }, + .@"border-top-left-radius" => |*v| .{ .@"border-top-left-radius" = .{ v[0].deepClone(allocator), v[1] } }, + .@"border-top-right-radius" => |*v| .{ .@"border-top-right-radius" = .{ v[0].deepClone(allocator), v[1] } }, + .@"border-bottom-left-radius" => |*v| .{ .@"border-bottom-left-radius" = .{ v[0].deepClone(allocator), v[1] } }, + .@"border-bottom-right-radius" => |*v| .{ .@"border-bottom-right-radius" = .{ v[0].deepClone(allocator), v[1] } }, + .@"border-start-start-radius" => |*v| .{ .@"border-start-start-radius" = v.deepClone(allocator) }, + .@"border-start-end-radius" => |*v| .{ .@"border-start-end-radius" = v.deepClone(allocator) }, + .@"border-end-start-radius" => |*v| .{ .@"border-end-start-radius" = v.deepClone(allocator) }, + .@"border-end-end-radius" => |*v| .{ .@"border-end-end-radius" = v.deepClone(allocator) }, + .@"border-radius" => |*v| .{ .@"border-radius" = .{ v[0].deepClone(allocator), v[1] } }, + .@"border-image-source" => |*v| .{ .@"border-image-source" = v.deepClone(allocator) }, + .@"border-image-outset" => |*v| .{ .@"border-image-outset" = v.deepClone(allocator) }, + .@"border-image-repeat" => |*v| .{ .@"border-image-repeat" = v.deepClone(allocator) }, + .@"border-image-width" => |*v| .{ .@"border-image-width" = v.deepClone(allocator) }, + .@"border-image-slice" => |*v| .{ .@"border-image-slice" = v.deepClone(allocator) }, + .@"border-image" => |*v| .{ .@"border-image" = .{ v[0].deepClone(allocator), v[1] } }, + .@"border-color" => |*v| .{ .@"border-color" = v.deepClone(allocator) }, + .@"border-style" => |*v| .{ .@"border-style" = v.deepClone(allocator) }, + .@"border-width" => |*v| .{ .@"border-width" = v.deepClone(allocator) }, + .@"border-block-color" => |*v| .{ .@"border-block-color" = v.deepClone(allocator) }, + .@"border-block-style" => |*v| .{ .@"border-block-style" = v.deepClone(allocator) }, + .@"border-block-width" => |*v| .{ .@"border-block-width" = v.deepClone(allocator) }, + .@"border-inline-color" => |*v| .{ .@"border-inline-color" = v.deepClone(allocator) }, + .@"border-inline-style" => |*v| .{ .@"border-inline-style" = v.deepClone(allocator) }, + .@"border-inline-width" => |*v| .{ .@"border-inline-width" = v.deepClone(allocator) }, + .border => |*v| .{ .border = v.deepClone(allocator) }, + .@"border-top" => |*v| .{ .@"border-top" = v.deepClone(allocator) }, + .@"border-bottom" => |*v| .{ .@"border-bottom" = v.deepClone(allocator) }, + .@"border-left" => |*v| .{ .@"border-left" = v.deepClone(allocator) }, + .@"border-right" => |*v| .{ .@"border-right" = v.deepClone(allocator) }, + .@"border-block" => |*v| .{ .@"border-block" = v.deepClone(allocator) }, + .@"border-block-start" => |*v| .{ .@"border-block-start" = v.deepClone(allocator) }, + .@"border-block-end" => |*v| .{ .@"border-block-end" = v.deepClone(allocator) }, + .@"border-inline" => |*v| .{ .@"border-inline" = v.deepClone(allocator) }, + .@"border-inline-start" => |*v| .{ .@"border-inline-start" = v.deepClone(allocator) }, + .@"border-inline-end" => |*v| .{ .@"border-inline-end" = v.deepClone(allocator) }, + .outline => |*v| .{ .outline = v.deepClone(allocator) }, + .@"outline-color" => |*v| .{ .@"outline-color" = v.deepClone(allocator) }, + .@"outline-style" => |*v| .{ .@"outline-style" = v.deepClone(allocator) }, + .@"outline-width" => |*v| .{ .@"outline-width" = v.deepClone(allocator) }, + .@"flex-direction" => |*v| .{ .@"flex-direction" = .{ v[0].deepClone(allocator), v[1] } }, + .@"flex-wrap" => |*v| .{ .@"flex-wrap" = .{ v[0].deepClone(allocator), v[1] } }, + .@"flex-flow" => |*v| .{ .@"flex-flow" = .{ v[0].deepClone(allocator), v[1] } }, + .@"flex-grow" => |*v| .{ .@"flex-grow" = .{ v[0], v[1] } }, + .@"flex-shrink" => |*v| .{ .@"flex-shrink" = .{ v[0], v[1] } }, + .@"flex-basis" => |*v| .{ .@"flex-basis" = .{ v[0].deepClone(allocator), v[1] } }, + .flex => |*v| .{ .flex = .{ v[0].deepClone(allocator), v[1] } }, + .order => |*v| .{ .order = .{ v[0], v[1] } }, + .@"align-content" => |*v| .{ .@"align-content" = .{ v[0].deepClone(allocator), v[1] } }, + .@"justify-content" => |*v| .{ .@"justify-content" = .{ v[0].deepClone(allocator), v[1] } }, + .@"place-content" => |*v| .{ .@"place-content" = v.deepClone(allocator) }, + .@"align-self" => |*v| .{ .@"align-self" = .{ v[0].deepClone(allocator), v[1] } }, + .@"justify-self" => |*v| .{ .@"justify-self" = v.deepClone(allocator) }, + .@"place-self" => |*v| .{ .@"place-self" = v.deepClone(allocator) }, + .@"align-items" => |*v| .{ .@"align-items" = .{ v[0].deepClone(allocator), v[1] } }, + .@"justify-items" => |*v| .{ .@"justify-items" = v.deepClone(allocator) }, + .@"place-items" => |*v| .{ .@"place-items" = v.deepClone(allocator) }, + .@"row-gap" => |*v| .{ .@"row-gap" = v.deepClone(allocator) }, + .@"column-gap" => |*v| .{ .@"column-gap" = v.deepClone(allocator) }, + .gap => |*v| .{ .gap = v.deepClone(allocator) }, + .@"box-orient" => |*v| .{ .@"box-orient" = .{ v[0].deepClone(allocator), v[1] } }, + .@"box-direction" => |*v| .{ .@"box-direction" = .{ v[0].deepClone(allocator), v[1] } }, + .@"box-ordinal-group" => |*v| .{ .@"box-ordinal-group" = .{ v[0], v[1] } }, + .@"box-align" => |*v| .{ .@"box-align" = .{ v[0].deepClone(allocator), v[1] } }, + .@"box-flex" => |*v| .{ .@"box-flex" = .{ v[0], v[1] } }, + .@"box-flex-group" => |*v| .{ .@"box-flex-group" = .{ v[0], v[1] } }, + .@"box-pack" => |*v| .{ .@"box-pack" = .{ v[0].deepClone(allocator), v[1] } }, + .@"box-lines" => |*v| .{ .@"box-lines" = .{ v[0].deepClone(allocator), v[1] } }, + .@"flex-pack" => |*v| .{ .@"flex-pack" = .{ v[0].deepClone(allocator), v[1] } }, + .@"flex-order" => |*v| .{ .@"flex-order" = .{ v[0], v[1] } }, + .@"flex-align" => |*v| .{ .@"flex-align" = .{ v[0].deepClone(allocator), v[1] } }, + .@"flex-item-align" => |*v| .{ .@"flex-item-align" = .{ v[0].deepClone(allocator), v[1] } }, + .@"flex-line-pack" => |*v| .{ .@"flex-line-pack" = .{ v[0].deepClone(allocator), v[1] } }, + .@"flex-positive" => |*v| .{ .@"flex-positive" = .{ v[0], v[1] } }, + .@"flex-negative" => |*v| .{ .@"flex-negative" = .{ v[0], v[1] } }, + .@"flex-preferred-size" => |*v| .{ .@"flex-preferred-size" = .{ v[0].deepClone(allocator), v[1] } }, + .@"margin-top" => |*v| .{ .@"margin-top" = v.deepClone(allocator) }, + .@"margin-bottom" => |*v| .{ .@"margin-bottom" = v.deepClone(allocator) }, + .@"margin-left" => |*v| .{ .@"margin-left" = v.deepClone(allocator) }, + .@"margin-right" => |*v| .{ .@"margin-right" = v.deepClone(allocator) }, + .@"margin-block-start" => |*v| .{ .@"margin-block-start" = v.deepClone(allocator) }, + .@"margin-block-end" => |*v| .{ .@"margin-block-end" = v.deepClone(allocator) }, + .@"margin-inline-start" => |*v| .{ .@"margin-inline-start" = v.deepClone(allocator) }, + .@"margin-inline-end" => |*v| .{ .@"margin-inline-end" = v.deepClone(allocator) }, + .@"margin-block" => |*v| .{ .@"margin-block" = v.deepClone(allocator) }, + .@"margin-inline" => |*v| .{ .@"margin-inline" = v.deepClone(allocator) }, + .margin => |*v| .{ .margin = v.deepClone(allocator) }, + .@"padding-top" => |*v| .{ .@"padding-top" = v.deepClone(allocator) }, + .@"padding-bottom" => |*v| .{ .@"padding-bottom" = v.deepClone(allocator) }, + .@"padding-left" => |*v| .{ .@"padding-left" = v.deepClone(allocator) }, + .@"padding-right" => |*v| .{ .@"padding-right" = v.deepClone(allocator) }, + .@"padding-block-start" => |*v| .{ .@"padding-block-start" = v.deepClone(allocator) }, + .@"padding-block-end" => |*v| .{ .@"padding-block-end" = v.deepClone(allocator) }, + .@"padding-inline-start" => |*v| .{ .@"padding-inline-start" = v.deepClone(allocator) }, + .@"padding-inline-end" => |*v| .{ .@"padding-inline-end" = v.deepClone(allocator) }, + .@"padding-block" => |*v| .{ .@"padding-block" = v.deepClone(allocator) }, + .@"padding-inline" => |*v| .{ .@"padding-inline" = v.deepClone(allocator) }, + .padding => |*v| .{ .padding = v.deepClone(allocator) }, + .@"scroll-margin-top" => |*v| .{ .@"scroll-margin-top" = v.deepClone(allocator) }, + .@"scroll-margin-bottom" => |*v| .{ .@"scroll-margin-bottom" = v.deepClone(allocator) }, + .@"scroll-margin-left" => |*v| .{ .@"scroll-margin-left" = v.deepClone(allocator) }, + .@"scroll-margin-right" => |*v| .{ .@"scroll-margin-right" = v.deepClone(allocator) }, + .@"scroll-margin-block-start" => |*v| .{ .@"scroll-margin-block-start" = v.deepClone(allocator) }, + .@"scroll-margin-block-end" => |*v| .{ .@"scroll-margin-block-end" = v.deepClone(allocator) }, + .@"scroll-margin-inline-start" => |*v| .{ .@"scroll-margin-inline-start" = v.deepClone(allocator) }, + .@"scroll-margin-inline-end" => |*v| .{ .@"scroll-margin-inline-end" = v.deepClone(allocator) }, + .@"scroll-margin-block" => |*v| .{ .@"scroll-margin-block" = v.deepClone(allocator) }, + .@"scroll-margin-inline" => |*v| .{ .@"scroll-margin-inline" = v.deepClone(allocator) }, + .@"scroll-margin" => |*v| .{ .@"scroll-margin" = v.deepClone(allocator) }, + .@"scroll-padding-top" => |*v| .{ .@"scroll-padding-top" = v.deepClone(allocator) }, + .@"scroll-padding-bottom" => |*v| .{ .@"scroll-padding-bottom" = v.deepClone(allocator) }, + .@"scroll-padding-left" => |*v| .{ .@"scroll-padding-left" = v.deepClone(allocator) }, + .@"scroll-padding-right" => |*v| .{ .@"scroll-padding-right" = v.deepClone(allocator) }, + .@"scroll-padding-block-start" => |*v| .{ .@"scroll-padding-block-start" = v.deepClone(allocator) }, + .@"scroll-padding-block-end" => |*v| .{ .@"scroll-padding-block-end" = v.deepClone(allocator) }, + .@"scroll-padding-inline-start" => |*v| .{ .@"scroll-padding-inline-start" = v.deepClone(allocator) }, + .@"scroll-padding-inline-end" => |*v| .{ .@"scroll-padding-inline-end" = v.deepClone(allocator) }, + .@"scroll-padding-block" => |*v| .{ .@"scroll-padding-block" = v.deepClone(allocator) }, + .@"scroll-padding-inline" => |*v| .{ .@"scroll-padding-inline" = v.deepClone(allocator) }, + .@"scroll-padding" => |*v| .{ .@"scroll-padding" = v.deepClone(allocator) }, + .@"font-weight" => |*v| .{ .@"font-weight" = v.deepClone(allocator) }, + .@"font-size" => |*v| .{ .@"font-size" = v.deepClone(allocator) }, + .@"font-stretch" => |*v| .{ .@"font-stretch" = v.deepClone(allocator) }, + .@"font-family" => |*v| .{ .@"font-family" = css.generic.deepClone(BabyList(FontFamily), v, allocator) }, + .@"font-style" => |*v| .{ .@"font-style" = v.deepClone(allocator) }, + .@"font-variant-caps" => |*v| .{ .@"font-variant-caps" = v.deepClone(allocator) }, + .@"line-height" => |*v| .{ .@"line-height" = v.deepClone(allocator) }, + .font => |*v| .{ .font = v.deepClone(allocator) }, + .@"text-decoration-color" => |*v| .{ .@"text-decoration-color" = .{ v[0].deepClone(allocator), v[1] } }, + .@"text-emphasis-color" => |*v| .{ .@"text-emphasis-color" = .{ v[0].deepClone(allocator), v[1] } }, + .@"text-shadow" => |*v| .{ .@"text-shadow" = v.deepClone(allocator) }, + .direction => |*v| .{ .direction = v.deepClone(allocator) }, + .composes => |*v| .{ .composes = v.deepClone(allocator) }, + .@"mask-image" => |*v| .{ .@"mask-image" = .{ v[0].deepClone(allocator), v[1] } }, + .@"mask-mode" => |*v| .{ .@"mask-mode" = v.deepClone(allocator) }, + .@"mask-repeat" => |*v| .{ .@"mask-repeat" = .{ v[0].deepClone(allocator), v[1] } }, + .@"mask-position-x" => |*v| .{ .@"mask-position-x" = v.deepClone(allocator) }, + .@"mask-position-y" => |*v| .{ .@"mask-position-y" = v.deepClone(allocator) }, + .@"mask-position" => |*v| .{ .@"mask-position" = .{ v[0].deepClone(allocator), v[1] } }, + .@"mask-clip" => |*v| .{ .@"mask-clip" = .{ v[0].deepClone(allocator), v[1] } }, + .@"mask-origin" => |*v| .{ .@"mask-origin" = .{ v[0].deepClone(allocator), v[1] } }, + .@"mask-size" => |*v| .{ .@"mask-size" = .{ v[0].deepClone(allocator), v[1] } }, + .@"mask-composite" => |*v| .{ .@"mask-composite" = v.deepClone(allocator) }, + .@"mask-type" => |*v| .{ .@"mask-type" = v.deepClone(allocator) }, + .mask => |*v| .{ .mask = .{ v[0].deepClone(allocator), v[1] } }, + .@"mask-border-source" => |*v| .{ .@"mask-border-source" = v.deepClone(allocator) }, + .@"mask-border-mode" => |*v| .{ .@"mask-border-mode" = v.deepClone(allocator) }, + .@"mask-border-slice" => |*v| .{ .@"mask-border-slice" = v.deepClone(allocator) }, + .@"mask-border-width" => |*v| .{ .@"mask-border-width" = v.deepClone(allocator) }, + .@"mask-border-outset" => |*v| .{ .@"mask-border-outset" = v.deepClone(allocator) }, + .@"mask-border-repeat" => |*v| .{ .@"mask-border-repeat" = v.deepClone(allocator) }, + .@"mask-border" => |*v| .{ .@"mask-border" = v.deepClone(allocator) }, + .@"-webkit-mask-composite" => |*v| .{ .@"-webkit-mask-composite" = v.deepClone(allocator) }, + .@"mask-source-type" => |*v| .{ .@"mask-source-type" = .{ v[0].deepClone(allocator), v[1] } }, + .@"mask-box-image" => |*v| .{ .@"mask-box-image" = .{ v[0].deepClone(allocator), v[1] } }, + .@"mask-box-image-source" => |*v| .{ .@"mask-box-image-source" = .{ v[0].deepClone(allocator), v[1] } }, + .@"mask-box-image-slice" => |*v| .{ .@"mask-box-image-slice" = .{ v[0].deepClone(allocator), v[1] } }, + .@"mask-box-image-width" => |*v| .{ .@"mask-box-image-width" = .{ v[0].deepClone(allocator), v[1] } }, + .@"mask-box-image-outset" => |*v| .{ .@"mask-box-image-outset" = .{ v[0].deepClone(allocator), v[1] } }, + .@"mask-box-image-repeat" => |*v| .{ .@"mask-box-image-repeat" = .{ v[0].deepClone(allocator), v[1] } }, + .all => |*a| return .{ .all = a.deepClone(allocator) }, + .unparsed => |*u| return .{ .unparsed = u.deepClone(allocator) }, + .custom => |*c| return .{ .custom = c.deepClone(allocator) }, + }; + } + + /// We're going to have this empty for now since not every property has a deinit function. + /// It's not strictly necessary since all allocations are into an arena. + /// It's mostly intended as a performance optimization in the case where mimalloc arena is used, + /// since it can reclaim the memory and use it for subsequent allocations. + /// I haven't benchmarked that though, so I don't actually know how much faster it would actually make it. + pub fn deinit(this: *@This(), allocator: std.mem.Allocator) void { + _ = this; + _ = allocator; + } + + pub inline fn __toCssHelper(this: *const Property) struct { []const u8, VendorPrefix } { + return switch (this.*) { + .@"background-color" => .{ "background-color", VendorPrefix{ .none = true } }, + .@"background-image" => .{ "background-image", VendorPrefix{ .none = true } }, + .@"background-position-x" => .{ "background-position-x", VendorPrefix{ .none = true } }, + .@"background-position-y" => .{ "background-position-y", VendorPrefix{ .none = true } }, + .@"background-position" => .{ "background-position", VendorPrefix{ .none = true } }, + .@"background-size" => .{ "background-size", VendorPrefix{ .none = true } }, + .@"background-repeat" => .{ "background-repeat", VendorPrefix{ .none = true } }, + .@"background-attachment" => .{ "background-attachment", VendorPrefix{ .none = true } }, + .@"background-clip" => |*x| .{ "background-clip", x.@"1" }, + .@"background-origin" => .{ "background-origin", VendorPrefix{ .none = true } }, + .background => .{ "background", VendorPrefix{ .none = true } }, + .@"box-shadow" => |*x| .{ "box-shadow", x.@"1" }, + .opacity => .{ "opacity", VendorPrefix{ .none = true } }, + .color => .{ "color", VendorPrefix{ .none = true } }, + .display => .{ "display", VendorPrefix{ .none = true } }, + .visibility => .{ "visibility", VendorPrefix{ .none = true } }, + .width => .{ "width", VendorPrefix{ .none = true } }, + .height => .{ "height", VendorPrefix{ .none = true } }, + .@"min-width" => .{ "min-width", VendorPrefix{ .none = true } }, + .@"min-height" => .{ "min-height", VendorPrefix{ .none = true } }, + .@"max-width" => .{ "max-width", VendorPrefix{ .none = true } }, + .@"max-height" => .{ "max-height", VendorPrefix{ .none = true } }, + .@"block-size" => .{ "block-size", VendorPrefix{ .none = true } }, + .@"inline-size" => .{ "inline-size", VendorPrefix{ .none = true } }, + .@"min-block-size" => .{ "min-block-size", VendorPrefix{ .none = true } }, + .@"min-inline-size" => .{ "min-inline-size", VendorPrefix{ .none = true } }, + .@"max-block-size" => .{ "max-block-size", VendorPrefix{ .none = true } }, + .@"max-inline-size" => .{ "max-inline-size", VendorPrefix{ .none = true } }, + .@"box-sizing" => |*x| .{ "box-sizing", x.@"1" }, + .@"aspect-ratio" => .{ "aspect-ratio", VendorPrefix{ .none = true } }, + .overflow => .{ "overflow", VendorPrefix{ .none = true } }, + .@"overflow-x" => .{ "overflow-x", VendorPrefix{ .none = true } }, + .@"overflow-y" => .{ "overflow-y", VendorPrefix{ .none = true } }, + .@"text-overflow" => |*x| .{ "text-overflow", x.@"1" }, + .position => .{ "position", VendorPrefix{ .none = true } }, + .top => .{ "top", VendorPrefix{ .none = true } }, + .bottom => .{ "bottom", VendorPrefix{ .none = true } }, + .left => .{ "left", VendorPrefix{ .none = true } }, + .right => .{ "right", VendorPrefix{ .none = true } }, + .@"inset-block-start" => .{ "inset-block-start", VendorPrefix{ .none = true } }, + .@"inset-block-end" => .{ "inset-block-end", VendorPrefix{ .none = true } }, + .@"inset-inline-start" => .{ "inset-inline-start", VendorPrefix{ .none = true } }, + .@"inset-inline-end" => .{ "inset-inline-end", VendorPrefix{ .none = true } }, + .@"inset-block" => .{ "inset-block", VendorPrefix{ .none = true } }, + .@"inset-inline" => .{ "inset-inline", VendorPrefix{ .none = true } }, + .inset => .{ "inset", VendorPrefix{ .none = true } }, + .@"border-spacing" => .{ "border-spacing", VendorPrefix{ .none = true } }, + .@"border-top-color" => .{ "border-top-color", VendorPrefix{ .none = true } }, + .@"border-bottom-color" => .{ "border-bottom-color", VendorPrefix{ .none = true } }, + .@"border-left-color" => .{ "border-left-color", VendorPrefix{ .none = true } }, + .@"border-right-color" => .{ "border-right-color", VendorPrefix{ .none = true } }, + .@"border-block-start-color" => .{ "border-block-start-color", VendorPrefix{ .none = true } }, + .@"border-block-end-color" => .{ "border-block-end-color", VendorPrefix{ .none = true } }, + .@"border-inline-start-color" => .{ "border-inline-start-color", VendorPrefix{ .none = true } }, + .@"border-inline-end-color" => .{ "border-inline-end-color", VendorPrefix{ .none = true } }, + .@"border-top-style" => .{ "border-top-style", VendorPrefix{ .none = true } }, + .@"border-bottom-style" => .{ "border-bottom-style", VendorPrefix{ .none = true } }, + .@"border-left-style" => .{ "border-left-style", VendorPrefix{ .none = true } }, + .@"border-right-style" => .{ "border-right-style", VendorPrefix{ .none = true } }, + .@"border-block-start-style" => .{ "border-block-start-style", VendorPrefix{ .none = true } }, + .@"border-block-end-style" => .{ "border-block-end-style", VendorPrefix{ .none = true } }, + .@"border-inline-start-style" => .{ "border-inline-start-style", VendorPrefix{ .none = true } }, + .@"border-inline-end-style" => .{ "border-inline-end-style", VendorPrefix{ .none = true } }, + .@"border-top-width" => .{ "border-top-width", VendorPrefix{ .none = true } }, + .@"border-bottom-width" => .{ "border-bottom-width", VendorPrefix{ .none = true } }, + .@"border-left-width" => .{ "border-left-width", VendorPrefix{ .none = true } }, + .@"border-right-width" => .{ "border-right-width", VendorPrefix{ .none = true } }, + .@"border-block-start-width" => .{ "border-block-start-width", VendorPrefix{ .none = true } }, + .@"border-block-end-width" => .{ "border-block-end-width", VendorPrefix{ .none = true } }, + .@"border-inline-start-width" => .{ "border-inline-start-width", VendorPrefix{ .none = true } }, + .@"border-inline-end-width" => .{ "border-inline-end-width", VendorPrefix{ .none = true } }, + .@"border-top-left-radius" => |*x| .{ "border-top-left-radius", x.@"1" }, + .@"border-top-right-radius" => |*x| .{ "border-top-right-radius", x.@"1" }, + .@"border-bottom-left-radius" => |*x| .{ "border-bottom-left-radius", x.@"1" }, + .@"border-bottom-right-radius" => |*x| .{ "border-bottom-right-radius", x.@"1" }, + .@"border-start-start-radius" => .{ "border-start-start-radius", VendorPrefix{ .none = true } }, + .@"border-start-end-radius" => .{ "border-start-end-radius", VendorPrefix{ .none = true } }, + .@"border-end-start-radius" => .{ "border-end-start-radius", VendorPrefix{ .none = true } }, + .@"border-end-end-radius" => .{ "border-end-end-radius", VendorPrefix{ .none = true } }, + .@"border-radius" => |*x| .{ "border-radius", x.@"1" }, + .@"border-image-source" => .{ "border-image-source", VendorPrefix{ .none = true } }, + .@"border-image-outset" => .{ "border-image-outset", VendorPrefix{ .none = true } }, + .@"border-image-repeat" => .{ "border-image-repeat", VendorPrefix{ .none = true } }, + .@"border-image-width" => .{ "border-image-width", VendorPrefix{ .none = true } }, + .@"border-image-slice" => .{ "border-image-slice", VendorPrefix{ .none = true } }, + .@"border-image" => |*x| .{ "border-image", x.@"1" }, + .@"border-color" => .{ "border-color", VendorPrefix{ .none = true } }, + .@"border-style" => .{ "border-style", VendorPrefix{ .none = true } }, + .@"border-width" => .{ "border-width", VendorPrefix{ .none = true } }, + .@"border-block-color" => .{ "border-block-color", VendorPrefix{ .none = true } }, + .@"border-block-style" => .{ "border-block-style", VendorPrefix{ .none = true } }, + .@"border-block-width" => .{ "border-block-width", VendorPrefix{ .none = true } }, + .@"border-inline-color" => .{ "border-inline-color", VendorPrefix{ .none = true } }, + .@"border-inline-style" => .{ "border-inline-style", VendorPrefix{ .none = true } }, + .@"border-inline-width" => .{ "border-inline-width", VendorPrefix{ .none = true } }, + .border => .{ "border", VendorPrefix{ .none = true } }, + .@"border-top" => .{ "border-top", VendorPrefix{ .none = true } }, + .@"border-bottom" => .{ "border-bottom", VendorPrefix{ .none = true } }, + .@"border-left" => .{ "border-left", VendorPrefix{ .none = true } }, + .@"border-right" => .{ "border-right", VendorPrefix{ .none = true } }, + .@"border-block" => .{ "border-block", VendorPrefix{ .none = true } }, + .@"border-block-start" => .{ "border-block-start", VendorPrefix{ .none = true } }, + .@"border-block-end" => .{ "border-block-end", VendorPrefix{ .none = true } }, + .@"border-inline" => .{ "border-inline", VendorPrefix{ .none = true } }, + .@"border-inline-start" => .{ "border-inline-start", VendorPrefix{ .none = true } }, + .@"border-inline-end" => .{ "border-inline-end", VendorPrefix{ .none = true } }, + .outline => .{ "outline", VendorPrefix{ .none = true } }, + .@"outline-color" => .{ "outline-color", VendorPrefix{ .none = true } }, + .@"outline-style" => .{ "outline-style", VendorPrefix{ .none = true } }, + .@"outline-width" => .{ "outline-width", VendorPrefix{ .none = true } }, + .@"flex-direction" => |*x| .{ "flex-direction", x.@"1" }, + .@"flex-wrap" => |*x| .{ "flex-wrap", x.@"1" }, + .@"flex-flow" => |*x| .{ "flex-flow", x.@"1" }, + .@"flex-grow" => |*x| .{ "flex-grow", x.@"1" }, + .@"flex-shrink" => |*x| .{ "flex-shrink", x.@"1" }, + .@"flex-basis" => |*x| .{ "flex-basis", x.@"1" }, + .flex => |*x| .{ "flex", x.@"1" }, + .order => |*x| .{ "order", x.@"1" }, + .@"align-content" => |*x| .{ "align-content", x.@"1" }, + .@"justify-content" => |*x| .{ "justify-content", x.@"1" }, + .@"place-content" => .{ "place-content", VendorPrefix{ .none = true } }, + .@"align-self" => |*x| .{ "align-self", x.@"1" }, + .@"justify-self" => .{ "justify-self", VendorPrefix{ .none = true } }, + .@"place-self" => .{ "place-self", VendorPrefix{ .none = true } }, + .@"align-items" => |*x| .{ "align-items", x.@"1" }, + .@"justify-items" => .{ "justify-items", VendorPrefix{ .none = true } }, + .@"place-items" => .{ "place-items", VendorPrefix{ .none = true } }, + .@"row-gap" => .{ "row-gap", VendorPrefix{ .none = true } }, + .@"column-gap" => .{ "column-gap", VendorPrefix{ .none = true } }, + .gap => .{ "gap", VendorPrefix{ .none = true } }, + .@"box-orient" => |*x| .{ "box-orient", x.@"1" }, + .@"box-direction" => |*x| .{ "box-direction", x.@"1" }, + .@"box-ordinal-group" => |*x| .{ "box-ordinal-group", x.@"1" }, + .@"box-align" => |*x| .{ "box-align", x.@"1" }, + .@"box-flex" => |*x| .{ "box-flex", x.@"1" }, + .@"box-flex-group" => |*x| .{ "box-flex-group", x.@"1" }, + .@"box-pack" => |*x| .{ "box-pack", x.@"1" }, + .@"box-lines" => |*x| .{ "box-lines", x.@"1" }, + .@"flex-pack" => |*x| .{ "flex-pack", x.@"1" }, + .@"flex-order" => |*x| .{ "flex-order", x.@"1" }, + .@"flex-align" => |*x| .{ "flex-align", x.@"1" }, + .@"flex-item-align" => |*x| .{ "flex-item-align", x.@"1" }, + .@"flex-line-pack" => |*x| .{ "flex-line-pack", x.@"1" }, + .@"flex-positive" => |*x| .{ "flex-positive", x.@"1" }, + .@"flex-negative" => |*x| .{ "flex-negative", x.@"1" }, + .@"flex-preferred-size" => |*x| .{ "flex-preferred-size", x.@"1" }, + .@"margin-top" => .{ "margin-top", VendorPrefix{ .none = true } }, + .@"margin-bottom" => .{ "margin-bottom", VendorPrefix{ .none = true } }, + .@"margin-left" => .{ "margin-left", VendorPrefix{ .none = true } }, + .@"margin-right" => .{ "margin-right", VendorPrefix{ .none = true } }, + .@"margin-block-start" => .{ "margin-block-start", VendorPrefix{ .none = true } }, + .@"margin-block-end" => .{ "margin-block-end", VendorPrefix{ .none = true } }, + .@"margin-inline-start" => .{ "margin-inline-start", VendorPrefix{ .none = true } }, + .@"margin-inline-end" => .{ "margin-inline-end", VendorPrefix{ .none = true } }, + .@"margin-block" => .{ "margin-block", VendorPrefix{ .none = true } }, + .@"margin-inline" => .{ "margin-inline", VendorPrefix{ .none = true } }, + .margin => .{ "margin", VendorPrefix{ .none = true } }, + .@"padding-top" => .{ "padding-top", VendorPrefix{ .none = true } }, + .@"padding-bottom" => .{ "padding-bottom", VendorPrefix{ .none = true } }, + .@"padding-left" => .{ "padding-left", VendorPrefix{ .none = true } }, + .@"padding-right" => .{ "padding-right", VendorPrefix{ .none = true } }, + .@"padding-block-start" => .{ "padding-block-start", VendorPrefix{ .none = true } }, + .@"padding-block-end" => .{ "padding-block-end", VendorPrefix{ .none = true } }, + .@"padding-inline-start" => .{ "padding-inline-start", VendorPrefix{ .none = true } }, + .@"padding-inline-end" => .{ "padding-inline-end", VendorPrefix{ .none = true } }, + .@"padding-block" => .{ "padding-block", VendorPrefix{ .none = true } }, + .@"padding-inline" => .{ "padding-inline", VendorPrefix{ .none = true } }, + .padding => .{ "padding", VendorPrefix{ .none = true } }, + .@"scroll-margin-top" => .{ "scroll-margin-top", VendorPrefix{ .none = true } }, + .@"scroll-margin-bottom" => .{ "scroll-margin-bottom", VendorPrefix{ .none = true } }, + .@"scroll-margin-left" => .{ "scroll-margin-left", VendorPrefix{ .none = true } }, + .@"scroll-margin-right" => .{ "scroll-margin-right", VendorPrefix{ .none = true } }, + .@"scroll-margin-block-start" => .{ "scroll-margin-block-start", VendorPrefix{ .none = true } }, + .@"scroll-margin-block-end" => .{ "scroll-margin-block-end", VendorPrefix{ .none = true } }, + .@"scroll-margin-inline-start" => .{ "scroll-margin-inline-start", VendorPrefix{ .none = true } }, + .@"scroll-margin-inline-end" => .{ "scroll-margin-inline-end", VendorPrefix{ .none = true } }, + .@"scroll-margin-block" => .{ "scroll-margin-block", VendorPrefix{ .none = true } }, + .@"scroll-margin-inline" => .{ "scroll-margin-inline", VendorPrefix{ .none = true } }, + .@"scroll-margin" => .{ "scroll-margin", VendorPrefix{ .none = true } }, + .@"scroll-padding-top" => .{ "scroll-padding-top", VendorPrefix{ .none = true } }, + .@"scroll-padding-bottom" => .{ "scroll-padding-bottom", VendorPrefix{ .none = true } }, + .@"scroll-padding-left" => .{ "scroll-padding-left", VendorPrefix{ .none = true } }, + .@"scroll-padding-right" => .{ "scroll-padding-right", VendorPrefix{ .none = true } }, + .@"scroll-padding-block-start" => .{ "scroll-padding-block-start", VendorPrefix{ .none = true } }, + .@"scroll-padding-block-end" => .{ "scroll-padding-block-end", VendorPrefix{ .none = true } }, + .@"scroll-padding-inline-start" => .{ "scroll-padding-inline-start", VendorPrefix{ .none = true } }, + .@"scroll-padding-inline-end" => .{ "scroll-padding-inline-end", VendorPrefix{ .none = true } }, + .@"scroll-padding-block" => .{ "scroll-padding-block", VendorPrefix{ .none = true } }, + .@"scroll-padding-inline" => .{ "scroll-padding-inline", VendorPrefix{ .none = true } }, + .@"scroll-padding" => .{ "scroll-padding", VendorPrefix{ .none = true } }, + .@"font-weight" => .{ "font-weight", VendorPrefix{ .none = true } }, + .@"font-size" => .{ "font-size", VendorPrefix{ .none = true } }, + .@"font-stretch" => .{ "font-stretch", VendorPrefix{ .none = true } }, + .@"font-family" => .{ "font-family", VendorPrefix{ .none = true } }, + .@"font-style" => .{ "font-style", VendorPrefix{ .none = true } }, + .@"font-variant-caps" => .{ "font-variant-caps", VendorPrefix{ .none = true } }, + .@"line-height" => .{ "line-height", VendorPrefix{ .none = true } }, + .font => .{ "font", VendorPrefix{ .none = true } }, + .@"text-decoration-color" => |*x| .{ "text-decoration-color", x.@"1" }, + .@"text-emphasis-color" => |*x| .{ "text-emphasis-color", x.@"1" }, + .@"text-shadow" => .{ "text-shadow", VendorPrefix{ .none = true } }, + .direction => .{ "direction", VendorPrefix{ .none = true } }, + .composes => .{ "composes", VendorPrefix{ .none = true } }, + .@"mask-image" => |*x| .{ "mask-image", x.@"1" }, + .@"mask-mode" => .{ "mask-mode", VendorPrefix{ .none = true } }, + .@"mask-repeat" => |*x| .{ "mask-repeat", x.@"1" }, + .@"mask-position-x" => .{ "mask-position-x", VendorPrefix{ .none = true } }, + .@"mask-position-y" => .{ "mask-position-y", VendorPrefix{ .none = true } }, + .@"mask-position" => |*x| .{ "mask-position", x.@"1" }, + .@"mask-clip" => |*x| .{ "mask-clip", x.@"1" }, + .@"mask-origin" => |*x| .{ "mask-origin", x.@"1" }, + .@"mask-size" => |*x| .{ "mask-size", x.@"1" }, + .@"mask-composite" => .{ "mask-composite", VendorPrefix{ .none = true } }, + .@"mask-type" => .{ "mask-type", VendorPrefix{ .none = true } }, + .mask => |*x| .{ "mask", x.@"1" }, + .@"mask-border-source" => .{ "mask-border-source", VendorPrefix{ .none = true } }, + .@"mask-border-mode" => .{ "mask-border-mode", VendorPrefix{ .none = true } }, + .@"mask-border-slice" => .{ "mask-border-slice", VendorPrefix{ .none = true } }, + .@"mask-border-width" => .{ "mask-border-width", VendorPrefix{ .none = true } }, + .@"mask-border-outset" => .{ "mask-border-outset", VendorPrefix{ .none = true } }, + .@"mask-border-repeat" => .{ "mask-border-repeat", VendorPrefix{ .none = true } }, + .@"mask-border" => .{ "mask-border", VendorPrefix{ .none = true } }, + .@"-webkit-mask-composite" => .{ "-webkit-mask-composite", VendorPrefix{ .none = true } }, + .@"mask-source-type" => |*x| .{ "mask-source-type", x.@"1" }, + .@"mask-box-image" => |*x| .{ "mask-box-image", x.@"1" }, + .@"mask-box-image-source" => |*x| .{ "mask-box-image-source", x.@"1" }, + .@"mask-box-image-slice" => |*x| .{ "mask-box-image-slice", x.@"1" }, + .@"mask-box-image-width" => |*x| .{ "mask-box-image-width", x.@"1" }, + .@"mask-box-image-outset" => |*x| .{ "mask-box-image-outset", x.@"1" }, + .@"mask-box-image-repeat" => |*x| .{ "mask-box-image-repeat", x.@"1" }, + .all => .{ "all", VendorPrefix{ .none = true } }, + .unparsed => |*unparsed| brk: { + var prefix = unparsed.property_id.prefix(); + if (prefix.isEmpty()) { + prefix = VendorPrefix{ .none = true }; + } + break :brk .{ unparsed.property_id.name(), prefix }; + }, + .custom => unreachable, + }; + } + + /// Serializes the value of a CSS property without its name or `!important` flag. + pub fn valueToCss(this: *const Property, comptime W: type, dest: *css.Printer(W)) PrintErr!void { + return switch (this.*) { + .@"background-color" => |*value| value.toCss(W, dest), + .@"background-image" => |*value| value.toCss(W, dest), + .@"background-position-x" => |*value| value.toCss(W, dest), + .@"background-position-y" => |*value| value.toCss(W, dest), + .@"background-position" => |*value| value.toCss(W, dest), + .@"background-size" => |*value| value.toCss(W, dest), + .@"background-repeat" => |*value| value.toCss(W, dest), + .@"background-attachment" => |*value| value.toCss(W, dest), + .@"background-clip" => |*value| value[0].toCss(W, dest), + .@"background-origin" => |*value| value.toCss(W, dest), + .background => |*value| value.toCss(W, dest), + .@"box-shadow" => |*value| value[0].toCss(W, dest), + .opacity => |*value| value.toCss(W, dest), + .color => |*value| value.toCss(W, dest), + .display => |*value| value.toCss(W, dest), + .visibility => |*value| value.toCss(W, dest), + .width => |*value| value.toCss(W, dest), + .height => |*value| value.toCss(W, dest), + .@"min-width" => |*value| value.toCss(W, dest), + .@"min-height" => |*value| value.toCss(W, dest), + .@"max-width" => |*value| value.toCss(W, dest), + .@"max-height" => |*value| value.toCss(W, dest), + .@"block-size" => |*value| value.toCss(W, dest), + .@"inline-size" => |*value| value.toCss(W, dest), + .@"min-block-size" => |*value| value.toCss(W, dest), + .@"min-inline-size" => |*value| value.toCss(W, dest), + .@"max-block-size" => |*value| value.toCss(W, dest), + .@"max-inline-size" => |*value| value.toCss(W, dest), + .@"box-sizing" => |*value| value[0].toCss(W, dest), + .@"aspect-ratio" => |*value| value.toCss(W, dest), + .overflow => |*value| value.toCss(W, dest), + .@"overflow-x" => |*value| value.toCss(W, dest), + .@"overflow-y" => |*value| value.toCss(W, dest), + .@"text-overflow" => |*value| value[0].toCss(W, dest), + .position => |*value| value.toCss(W, dest), + .top => |*value| value.toCss(W, dest), + .bottom => |*value| value.toCss(W, dest), + .left => |*value| value.toCss(W, dest), + .right => |*value| value.toCss(W, dest), + .@"inset-block-start" => |*value| value.toCss(W, dest), + .@"inset-block-end" => |*value| value.toCss(W, dest), + .@"inset-inline-start" => |*value| value.toCss(W, dest), + .@"inset-inline-end" => |*value| value.toCss(W, dest), + .@"inset-block" => |*value| value.toCss(W, dest), + .@"inset-inline" => |*value| value.toCss(W, dest), + .inset => |*value| value.toCss(W, dest), + .@"border-spacing" => |*value| value.toCss(W, dest), + .@"border-top-color" => |*value| value.toCss(W, dest), + .@"border-bottom-color" => |*value| value.toCss(W, dest), + .@"border-left-color" => |*value| value.toCss(W, dest), + .@"border-right-color" => |*value| value.toCss(W, dest), + .@"border-block-start-color" => |*value| value.toCss(W, dest), + .@"border-block-end-color" => |*value| value.toCss(W, dest), + .@"border-inline-start-color" => |*value| value.toCss(W, dest), + .@"border-inline-end-color" => |*value| value.toCss(W, dest), + .@"border-top-style" => |*value| value.toCss(W, dest), + .@"border-bottom-style" => |*value| value.toCss(W, dest), + .@"border-left-style" => |*value| value.toCss(W, dest), + .@"border-right-style" => |*value| value.toCss(W, dest), + .@"border-block-start-style" => |*value| value.toCss(W, dest), + .@"border-block-end-style" => |*value| value.toCss(W, dest), + .@"border-inline-start-style" => |*value| value.toCss(W, dest), + .@"border-inline-end-style" => |*value| value.toCss(W, dest), + .@"border-top-width" => |*value| value.toCss(W, dest), + .@"border-bottom-width" => |*value| value.toCss(W, dest), + .@"border-left-width" => |*value| value.toCss(W, dest), + .@"border-right-width" => |*value| value.toCss(W, dest), + .@"border-block-start-width" => |*value| value.toCss(W, dest), + .@"border-block-end-width" => |*value| value.toCss(W, dest), + .@"border-inline-start-width" => |*value| value.toCss(W, dest), + .@"border-inline-end-width" => |*value| value.toCss(W, dest), + .@"border-top-left-radius" => |*value| value[0].toCss(W, dest), + .@"border-top-right-radius" => |*value| value[0].toCss(W, dest), + .@"border-bottom-left-radius" => |*value| value[0].toCss(W, dest), + .@"border-bottom-right-radius" => |*value| value[0].toCss(W, dest), + .@"border-start-start-radius" => |*value| value.toCss(W, dest), + .@"border-start-end-radius" => |*value| value.toCss(W, dest), + .@"border-end-start-radius" => |*value| value.toCss(W, dest), + .@"border-end-end-radius" => |*value| value.toCss(W, dest), + .@"border-radius" => |*value| value[0].toCss(W, dest), + .@"border-image-source" => |*value| value.toCss(W, dest), + .@"border-image-outset" => |*value| value.toCss(W, dest), + .@"border-image-repeat" => |*value| value.toCss(W, dest), + .@"border-image-width" => |*value| value.toCss(W, dest), + .@"border-image-slice" => |*value| value.toCss(W, dest), + .@"border-image" => |*value| value[0].toCss(W, dest), + .@"border-color" => |*value| value.toCss(W, dest), + .@"border-style" => |*value| value.toCss(W, dest), + .@"border-width" => |*value| value.toCss(W, dest), + .@"border-block-color" => |*value| value.toCss(W, dest), + .@"border-block-style" => |*value| value.toCss(W, dest), + .@"border-block-width" => |*value| value.toCss(W, dest), + .@"border-inline-color" => |*value| value.toCss(W, dest), + .@"border-inline-style" => |*value| value.toCss(W, dest), + .@"border-inline-width" => |*value| value.toCss(W, dest), + .border => |*value| value.toCss(W, dest), + .@"border-top" => |*value| value.toCss(W, dest), + .@"border-bottom" => |*value| value.toCss(W, dest), + .@"border-left" => |*value| value.toCss(W, dest), + .@"border-right" => |*value| value.toCss(W, dest), + .@"border-block" => |*value| value.toCss(W, dest), + .@"border-block-start" => |*value| value.toCss(W, dest), + .@"border-block-end" => |*value| value.toCss(W, dest), + .@"border-inline" => |*value| value.toCss(W, dest), + .@"border-inline-start" => |*value| value.toCss(W, dest), + .@"border-inline-end" => |*value| value.toCss(W, dest), + .outline => |*value| value.toCss(W, dest), + .@"outline-color" => |*value| value.toCss(W, dest), + .@"outline-style" => |*value| value.toCss(W, dest), + .@"outline-width" => |*value| value.toCss(W, dest), + .@"flex-direction" => |*value| value[0].toCss(W, dest), + .@"flex-wrap" => |*value| value[0].toCss(W, dest), + .@"flex-flow" => |*value| value[0].toCss(W, dest), + .@"flex-grow" => |*value| CSSNumberFns.toCss(&value[0], W, dest), + .@"flex-shrink" => |*value| CSSNumberFns.toCss(&value[0], W, dest), + .@"flex-basis" => |*value| value[0].toCss(W, dest), + .flex => |*value| value[0].toCss(W, dest), + .order => |*value| CSSIntegerFns.toCss(&value[0], W, dest), + .@"align-content" => |*value| value[0].toCss(W, dest), + .@"justify-content" => |*value| value[0].toCss(W, dest), + .@"place-content" => |*value| value.toCss(W, dest), + .@"align-self" => |*value| value[0].toCss(W, dest), + .@"justify-self" => |*value| value.toCss(W, dest), + .@"place-self" => |*value| value.toCss(W, dest), + .@"align-items" => |*value| value[0].toCss(W, dest), + .@"justify-items" => |*value| value.toCss(W, dest), + .@"place-items" => |*value| value.toCss(W, dest), + .@"row-gap" => |*value| value.toCss(W, dest), + .@"column-gap" => |*value| value.toCss(W, dest), + .gap => |*value| value.toCss(W, dest), + .@"box-orient" => |*value| value[0].toCss(W, dest), + .@"box-direction" => |*value| value[0].toCss(W, dest), + .@"box-ordinal-group" => |*value| CSSIntegerFns.toCss(&value[0], W, dest), + .@"box-align" => |*value| value[0].toCss(W, dest), + .@"box-flex" => |*value| CSSNumberFns.toCss(&value[0], W, dest), + .@"box-flex-group" => |*value| CSSIntegerFns.toCss(&value[0], W, dest), + .@"box-pack" => |*value| value[0].toCss(W, dest), + .@"box-lines" => |*value| value[0].toCss(W, dest), + .@"flex-pack" => |*value| value[0].toCss(W, dest), + .@"flex-order" => |*value| CSSIntegerFns.toCss(&value[0], W, dest), + .@"flex-align" => |*value| value[0].toCss(W, dest), + .@"flex-item-align" => |*value| value[0].toCss(W, dest), + .@"flex-line-pack" => |*value| value[0].toCss(W, dest), + .@"flex-positive" => |*value| CSSNumberFns.toCss(&value[0], W, dest), + .@"flex-negative" => |*value| CSSNumberFns.toCss(&value[0], W, dest), + .@"flex-preferred-size" => |*value| value[0].toCss(W, dest), + .@"margin-top" => |*value| value.toCss(W, dest), + .@"margin-bottom" => |*value| value.toCss(W, dest), + .@"margin-left" => |*value| value.toCss(W, dest), + .@"margin-right" => |*value| value.toCss(W, dest), + .@"margin-block-start" => |*value| value.toCss(W, dest), + .@"margin-block-end" => |*value| value.toCss(W, dest), + .@"margin-inline-start" => |*value| value.toCss(W, dest), + .@"margin-inline-end" => |*value| value.toCss(W, dest), + .@"margin-block" => |*value| value.toCss(W, dest), + .@"margin-inline" => |*value| value.toCss(W, dest), + .margin => |*value| value.toCss(W, dest), + .@"padding-top" => |*value| value.toCss(W, dest), + .@"padding-bottom" => |*value| value.toCss(W, dest), + .@"padding-left" => |*value| value.toCss(W, dest), + .@"padding-right" => |*value| value.toCss(W, dest), + .@"padding-block-start" => |*value| value.toCss(W, dest), + .@"padding-block-end" => |*value| value.toCss(W, dest), + .@"padding-inline-start" => |*value| value.toCss(W, dest), + .@"padding-inline-end" => |*value| value.toCss(W, dest), + .@"padding-block" => |*value| value.toCss(W, dest), + .@"padding-inline" => |*value| value.toCss(W, dest), + .padding => |*value| value.toCss(W, dest), + .@"scroll-margin-top" => |*value| value.toCss(W, dest), + .@"scroll-margin-bottom" => |*value| value.toCss(W, dest), + .@"scroll-margin-left" => |*value| value.toCss(W, dest), + .@"scroll-margin-right" => |*value| value.toCss(W, dest), + .@"scroll-margin-block-start" => |*value| value.toCss(W, dest), + .@"scroll-margin-block-end" => |*value| value.toCss(W, dest), + .@"scroll-margin-inline-start" => |*value| value.toCss(W, dest), + .@"scroll-margin-inline-end" => |*value| value.toCss(W, dest), + .@"scroll-margin-block" => |*value| value.toCss(W, dest), + .@"scroll-margin-inline" => |*value| value.toCss(W, dest), + .@"scroll-margin" => |*value| value.toCss(W, dest), + .@"scroll-padding-top" => |*value| value.toCss(W, dest), + .@"scroll-padding-bottom" => |*value| value.toCss(W, dest), + .@"scroll-padding-left" => |*value| value.toCss(W, dest), + .@"scroll-padding-right" => |*value| value.toCss(W, dest), + .@"scroll-padding-block-start" => |*value| value.toCss(W, dest), + .@"scroll-padding-block-end" => |*value| value.toCss(W, dest), + .@"scroll-padding-inline-start" => |*value| value.toCss(W, dest), + .@"scroll-padding-inline-end" => |*value| value.toCss(W, dest), + .@"scroll-padding-block" => |*value| value.toCss(W, dest), + .@"scroll-padding-inline" => |*value| value.toCss(W, dest), + .@"scroll-padding" => |*value| value.toCss(W, dest), + .@"font-weight" => |*value| value.toCss(W, dest), + .@"font-size" => |*value| value.toCss(W, dest), + .@"font-stretch" => |*value| value.toCss(W, dest), + .@"font-family" => |*value| value.toCss(W, dest), + .@"font-style" => |*value| value.toCss(W, dest), + .@"font-variant-caps" => |*value| value.toCss(W, dest), + .@"line-height" => |*value| value.toCss(W, dest), + .font => |*value| value.toCss(W, dest), + .@"text-decoration-color" => |*value| value[0].toCss(W, dest), + .@"text-emphasis-color" => |*value| value[0].toCss(W, dest), + .@"text-shadow" => |*value| value.toCss(W, dest), + .direction => |*value| value.toCss(W, dest), + .composes => |*value| value.toCss(W, dest), + .@"mask-image" => |*value| value[0].toCss(W, dest), + .@"mask-mode" => |*value| value.toCss(W, dest), + .@"mask-repeat" => |*value| value[0].toCss(W, dest), + .@"mask-position-x" => |*value| value.toCss(W, dest), + .@"mask-position-y" => |*value| value.toCss(W, dest), + .@"mask-position" => |*value| value[0].toCss(W, dest), + .@"mask-clip" => |*value| value[0].toCss(W, dest), + .@"mask-origin" => |*value| value[0].toCss(W, dest), + .@"mask-size" => |*value| value[0].toCss(W, dest), + .@"mask-composite" => |*value| value.toCss(W, dest), + .@"mask-type" => |*value| value.toCss(W, dest), + .mask => |*value| value[0].toCss(W, dest), + .@"mask-border-source" => |*value| value.toCss(W, dest), + .@"mask-border-mode" => |*value| value.toCss(W, dest), + .@"mask-border-slice" => |*value| value.toCss(W, dest), + .@"mask-border-width" => |*value| value.toCss(W, dest), + .@"mask-border-outset" => |*value| value.toCss(W, dest), + .@"mask-border-repeat" => |*value| value.toCss(W, dest), + .@"mask-border" => |*value| value.toCss(W, dest), + .@"-webkit-mask-composite" => |*value| value.toCss(W, dest), + .@"mask-source-type" => |*value| value[0].toCss(W, dest), + .@"mask-box-image" => |*value| value[0].toCss(W, dest), + .@"mask-box-image-source" => |*value| value[0].toCss(W, dest), + .@"mask-box-image-slice" => |*value| value[0].toCss(W, dest), + .@"mask-box-image-width" => |*value| value[0].toCss(W, dest), + .@"mask-box-image-outset" => |*value| value[0].toCss(W, dest), + .@"mask-box-image-repeat" => |*value| value[0].toCss(W, dest), + .all => |*keyword| keyword.toCss(W, dest), + .unparsed => |*unparsed| unparsed.value.toCss(W, dest, false), + .custom => |*c| c.value.toCss(W, dest, c.name == .custom), + }; + } + + /// Returns the given longhand property for a shorthand. + pub fn longhand(this: *const Property, property_id: *const PropertyId) ?Property { + switch (this.*) { + .@"background-position" => |*v| return v.longhand(property_id), + .overflow => |*v| return v.longhand(property_id), + .@"inset-block" => |*v| return v.longhand(property_id), + .@"inset-inline" => |*v| return v.longhand(property_id), + .inset => |*v| return v.longhand(property_id), + .@"border-radius" => |*v| { + if (!v[1].eq(property_id.prefix())) return null; + return v[0].longhand(property_id); + }, + .@"border-image" => |*v| { + if (!v[1].eq(property_id.prefix())) return null; + return v[0].longhand(property_id); + }, + .@"border-color" => |*v| return v.longhand(property_id), + .@"border-style" => |*v| return v.longhand(property_id), + .@"border-width" => |*v| return v.longhand(property_id), + .@"border-block-color" => |*v| return v.longhand(property_id), + .@"border-block-style" => |*v| return v.longhand(property_id), + .@"border-block-width" => |*v| return v.longhand(property_id), + .@"border-inline-color" => |*v| return v.longhand(property_id), + .@"border-inline-style" => |*v| return v.longhand(property_id), + .@"border-inline-width" => |*v| return v.longhand(property_id), + .border => |*v| return v.longhand(property_id), + .@"border-top" => |*v| return v.longhand(property_id), + .@"border-bottom" => |*v| return v.longhand(property_id), + .@"border-left" => |*v| return v.longhand(property_id), + .@"border-right" => |*v| return v.longhand(property_id), + .@"border-block" => |*v| return v.longhand(property_id), + .@"border-block-start" => |*v| return v.longhand(property_id), + .@"border-block-end" => |*v| return v.longhand(property_id), + .@"border-inline" => |*v| return v.longhand(property_id), + .@"border-inline-start" => |*v| return v.longhand(property_id), + .@"border-inline-end" => |*v| return v.longhand(property_id), + .outline => |*v| return v.longhand(property_id), + .@"flex-flow" => |*v| { + if (!v[1].eq(property_id.prefix())) return null; + return v[0].longhand(property_id); + }, + .flex => |*v| { + if (!v[1].eq(property_id.prefix())) return null; + return v[0].longhand(property_id); + }, + .@"place-content" => |*v| return v.longhand(property_id), + .@"place-self" => |*v| return v.longhand(property_id), + .@"place-items" => |*v| return v.longhand(property_id), + .gap => |*v| return v.longhand(property_id), + .@"margin-block" => |*v| return v.longhand(property_id), + .@"margin-inline" => |*v| return v.longhand(property_id), + .margin => |*v| return v.longhand(property_id), + .@"padding-block" => |*v| return v.longhand(property_id), + .@"padding-inline" => |*v| return v.longhand(property_id), + .padding => |*v| return v.longhand(property_id), + .@"scroll-margin-block" => |*v| return v.longhand(property_id), + .@"scroll-margin-inline" => |*v| return v.longhand(property_id), + .@"scroll-margin" => |*v| return v.longhand(property_id), + .@"scroll-padding-block" => |*v| return v.longhand(property_id), + .@"scroll-padding-inline" => |*v| return v.longhand(property_id), + .@"scroll-padding" => |*v| return v.longhand(property_id), + .font => |*v| return v.longhand(property_id), + .mask => |*v| { + if (!v[1].eq(property_id.prefix())) return null; + return v[0].longhand(property_id); + }, + .@"mask-border" => |*v| return v.longhand(property_id), + else => {}, + } + return null; + } + + pub fn eql(lhs: *const Property, rhs: *const Property) bool { + if (@intFromEnum(lhs.*) != @intFromEnum(rhs.*)) return false; + return switch (lhs.*) { + .@"background-color" => |*v| css.generic.eql(CssColor, v, &rhs.@"background-color"), + .@"background-image" => |*v| css.generic.eql(SmallList(Image, 1), v, &rhs.@"background-image"), + .@"background-position-x" => |*v| css.generic.eql(SmallList(css_values.position.HorizontalPosition, 1), v, &rhs.@"background-position-x"), + .@"background-position-y" => |*v| css.generic.eql(SmallList(css_values.position.VerticalPosition, 1), v, &rhs.@"background-position-y"), + .@"background-position" => |*v| css.generic.eql(SmallList(background.BackgroundPosition, 1), v, &rhs.@"background-position"), + .@"background-size" => |*v| css.generic.eql(SmallList(background.BackgroundSize, 1), v, &rhs.@"background-size"), + .@"background-repeat" => |*v| css.generic.eql(SmallList(background.BackgroundRepeat, 1), v, &rhs.@"background-repeat"), + .@"background-attachment" => |*v| css.generic.eql(SmallList(background.BackgroundAttachment, 1), v, &rhs.@"background-attachment"), + .@"background-clip" => |*v| css.generic.eql(SmallList(background.BackgroundClip, 1), &v[0], &v[0]) and v[1].eq(rhs.@"background-clip"[1]), + .@"background-origin" => |*v| css.generic.eql(SmallList(background.BackgroundOrigin, 1), v, &rhs.@"background-origin"), + .background => |*v| css.generic.eql(SmallList(background.Background, 1), v, &rhs.background), + .@"box-shadow" => |*v| css.generic.eql(SmallList(box_shadow.BoxShadow, 1), &v[0], &v[0]) and v[1].eq(rhs.@"box-shadow"[1]), + .opacity => |*v| css.generic.eql(css.css_values.alpha.AlphaValue, v, &rhs.opacity), + .color => |*v| css.generic.eql(CssColor, v, &rhs.color), + .display => |*v| css.generic.eql(display.Display, v, &rhs.display), + .visibility => |*v| css.generic.eql(display.Visibility, v, &rhs.visibility), + .width => |*v| css.generic.eql(size.Size, v, &rhs.width), + .height => |*v| css.generic.eql(size.Size, v, &rhs.height), + .@"min-width" => |*v| css.generic.eql(size.Size, v, &rhs.@"min-width"), + .@"min-height" => |*v| css.generic.eql(size.Size, v, &rhs.@"min-height"), + .@"max-width" => |*v| css.generic.eql(size.MaxSize, v, &rhs.@"max-width"), + .@"max-height" => |*v| css.generic.eql(size.MaxSize, v, &rhs.@"max-height"), + .@"block-size" => |*v| css.generic.eql(size.Size, v, &rhs.@"block-size"), + .@"inline-size" => |*v| css.generic.eql(size.Size, v, &rhs.@"inline-size"), + .@"min-block-size" => |*v| css.generic.eql(size.Size, v, &rhs.@"min-block-size"), + .@"min-inline-size" => |*v| css.generic.eql(size.Size, v, &rhs.@"min-inline-size"), + .@"max-block-size" => |*v| css.generic.eql(size.MaxSize, v, &rhs.@"max-block-size"), + .@"max-inline-size" => |*v| css.generic.eql(size.MaxSize, v, &rhs.@"max-inline-size"), + .@"box-sizing" => |*v| css.generic.eql(size.BoxSizing, &v[0], &v[0]) and v[1].eq(rhs.@"box-sizing"[1]), + .@"aspect-ratio" => |*v| css.generic.eql(size.AspectRatio, v, &rhs.@"aspect-ratio"), + .overflow => |*v| css.generic.eql(overflow.Overflow, v, &rhs.overflow), + .@"overflow-x" => |*v| css.generic.eql(overflow.OverflowKeyword, v, &rhs.@"overflow-x"), + .@"overflow-y" => |*v| css.generic.eql(overflow.OverflowKeyword, v, &rhs.@"overflow-y"), + .@"text-overflow" => |*v| css.generic.eql(overflow.TextOverflow, &v[0], &v[0]) and v[1].eq(rhs.@"text-overflow"[1]), + .position => |*v| css.generic.eql(position.Position, v, &rhs.position), + .top => |*v| css.generic.eql(LengthPercentageOrAuto, v, &rhs.top), + .bottom => |*v| css.generic.eql(LengthPercentageOrAuto, v, &rhs.bottom), + .left => |*v| css.generic.eql(LengthPercentageOrAuto, v, &rhs.left), + .right => |*v| css.generic.eql(LengthPercentageOrAuto, v, &rhs.right), + .@"inset-block-start" => |*v| css.generic.eql(LengthPercentageOrAuto, v, &rhs.@"inset-block-start"), + .@"inset-block-end" => |*v| css.generic.eql(LengthPercentageOrAuto, v, &rhs.@"inset-block-end"), + .@"inset-inline-start" => |*v| css.generic.eql(LengthPercentageOrAuto, v, &rhs.@"inset-inline-start"), + .@"inset-inline-end" => |*v| css.generic.eql(LengthPercentageOrAuto, v, &rhs.@"inset-inline-end"), + .@"inset-block" => |*v| css.generic.eql(margin_padding.InsetBlock, v, &rhs.@"inset-block"), + .@"inset-inline" => |*v| css.generic.eql(margin_padding.InsetInline, v, &rhs.@"inset-inline"), + .inset => |*v| css.generic.eql(margin_padding.Inset, v, &rhs.inset), + .@"border-spacing" => |*v| css.generic.eql(css.css_values.size.Size2D(Length), v, &rhs.@"border-spacing"), + .@"border-top-color" => |*v| css.generic.eql(CssColor, v, &rhs.@"border-top-color"), + .@"border-bottom-color" => |*v| css.generic.eql(CssColor, v, &rhs.@"border-bottom-color"), + .@"border-left-color" => |*v| css.generic.eql(CssColor, v, &rhs.@"border-left-color"), + .@"border-right-color" => |*v| css.generic.eql(CssColor, v, &rhs.@"border-right-color"), + .@"border-block-start-color" => |*v| css.generic.eql(CssColor, v, &rhs.@"border-block-start-color"), + .@"border-block-end-color" => |*v| css.generic.eql(CssColor, v, &rhs.@"border-block-end-color"), + .@"border-inline-start-color" => |*v| css.generic.eql(CssColor, v, &rhs.@"border-inline-start-color"), + .@"border-inline-end-color" => |*v| css.generic.eql(CssColor, v, &rhs.@"border-inline-end-color"), + .@"border-top-style" => |*v| css.generic.eql(border.LineStyle, v, &rhs.@"border-top-style"), + .@"border-bottom-style" => |*v| css.generic.eql(border.LineStyle, v, &rhs.@"border-bottom-style"), + .@"border-left-style" => |*v| css.generic.eql(border.LineStyle, v, &rhs.@"border-left-style"), + .@"border-right-style" => |*v| css.generic.eql(border.LineStyle, v, &rhs.@"border-right-style"), + .@"border-block-start-style" => |*v| css.generic.eql(border.LineStyle, v, &rhs.@"border-block-start-style"), + .@"border-block-end-style" => |*v| css.generic.eql(border.LineStyle, v, &rhs.@"border-block-end-style"), + .@"border-inline-start-style" => |*v| css.generic.eql(border.LineStyle, v, &rhs.@"border-inline-start-style"), + .@"border-inline-end-style" => |*v| css.generic.eql(border.LineStyle, v, &rhs.@"border-inline-end-style"), + .@"border-top-width" => |*v| css.generic.eql(BorderSideWidth, v, &rhs.@"border-top-width"), + .@"border-bottom-width" => |*v| css.generic.eql(BorderSideWidth, v, &rhs.@"border-bottom-width"), + .@"border-left-width" => |*v| css.generic.eql(BorderSideWidth, v, &rhs.@"border-left-width"), + .@"border-right-width" => |*v| css.generic.eql(BorderSideWidth, v, &rhs.@"border-right-width"), + .@"border-block-start-width" => |*v| css.generic.eql(BorderSideWidth, v, &rhs.@"border-block-start-width"), + .@"border-block-end-width" => |*v| css.generic.eql(BorderSideWidth, v, &rhs.@"border-block-end-width"), + .@"border-inline-start-width" => |*v| css.generic.eql(BorderSideWidth, v, &rhs.@"border-inline-start-width"), + .@"border-inline-end-width" => |*v| css.generic.eql(BorderSideWidth, v, &rhs.@"border-inline-end-width"), + .@"border-top-left-radius" => |*v| css.generic.eql(Size2D(LengthPercentage), &v[0], &v[0]) and v[1].eq(rhs.@"border-top-left-radius"[1]), + .@"border-top-right-radius" => |*v| css.generic.eql(Size2D(LengthPercentage), &v[0], &v[0]) and v[1].eq(rhs.@"border-top-right-radius"[1]), + .@"border-bottom-left-radius" => |*v| css.generic.eql(Size2D(LengthPercentage), &v[0], &v[0]) and v[1].eq(rhs.@"border-bottom-left-radius"[1]), + .@"border-bottom-right-radius" => |*v| css.generic.eql(Size2D(LengthPercentage), &v[0], &v[0]) and v[1].eq(rhs.@"border-bottom-right-radius"[1]), + .@"border-start-start-radius" => |*v| css.generic.eql(Size2D(LengthPercentage), v, &rhs.@"border-start-start-radius"), + .@"border-start-end-radius" => |*v| css.generic.eql(Size2D(LengthPercentage), v, &rhs.@"border-start-end-radius"), + .@"border-end-start-radius" => |*v| css.generic.eql(Size2D(LengthPercentage), v, &rhs.@"border-end-start-radius"), + .@"border-end-end-radius" => |*v| css.generic.eql(Size2D(LengthPercentage), v, &rhs.@"border-end-end-radius"), + .@"border-radius" => |*v| css.generic.eql(BorderRadius, &v[0], &v[0]) and v[1].eq(rhs.@"border-radius"[1]), + .@"border-image-source" => |*v| css.generic.eql(Image, v, &rhs.@"border-image-source"), + .@"border-image-outset" => |*v| css.generic.eql(Rect(LengthOrNumber), v, &rhs.@"border-image-outset"), + .@"border-image-repeat" => |*v| css.generic.eql(BorderImageRepeat, v, &rhs.@"border-image-repeat"), + .@"border-image-width" => |*v| css.generic.eql(Rect(BorderImageSideWidth), v, &rhs.@"border-image-width"), + .@"border-image-slice" => |*v| css.generic.eql(BorderImageSlice, v, &rhs.@"border-image-slice"), + .@"border-image" => |*v| css.generic.eql(BorderImage, &v[0], &v[0]) and v[1].eq(rhs.@"border-image"[1]), + .@"border-color" => |*v| css.generic.eql(BorderColor, v, &rhs.@"border-color"), + .@"border-style" => |*v| css.generic.eql(BorderStyle, v, &rhs.@"border-style"), + .@"border-width" => |*v| css.generic.eql(BorderWidth, v, &rhs.@"border-width"), + .@"border-block-color" => |*v| css.generic.eql(BorderBlockColor, v, &rhs.@"border-block-color"), + .@"border-block-style" => |*v| css.generic.eql(BorderBlockStyle, v, &rhs.@"border-block-style"), + .@"border-block-width" => |*v| css.generic.eql(BorderBlockWidth, v, &rhs.@"border-block-width"), + .@"border-inline-color" => |*v| css.generic.eql(BorderInlineColor, v, &rhs.@"border-inline-color"), + .@"border-inline-style" => |*v| css.generic.eql(BorderInlineStyle, v, &rhs.@"border-inline-style"), + .@"border-inline-width" => |*v| css.generic.eql(BorderInlineWidth, v, &rhs.@"border-inline-width"), + .border => |*v| css.generic.eql(Border, v, &rhs.border), + .@"border-top" => |*v| css.generic.eql(BorderTop, v, &rhs.@"border-top"), + .@"border-bottom" => |*v| css.generic.eql(BorderBottom, v, &rhs.@"border-bottom"), + .@"border-left" => |*v| css.generic.eql(BorderLeft, v, &rhs.@"border-left"), + .@"border-right" => |*v| css.generic.eql(BorderRight, v, &rhs.@"border-right"), + .@"border-block" => |*v| css.generic.eql(BorderBlock, v, &rhs.@"border-block"), + .@"border-block-start" => |*v| css.generic.eql(BorderBlockStart, v, &rhs.@"border-block-start"), + .@"border-block-end" => |*v| css.generic.eql(BorderBlockEnd, v, &rhs.@"border-block-end"), + .@"border-inline" => |*v| css.generic.eql(BorderInline, v, &rhs.@"border-inline"), + .@"border-inline-start" => |*v| css.generic.eql(BorderInlineStart, v, &rhs.@"border-inline-start"), + .@"border-inline-end" => |*v| css.generic.eql(BorderInlineEnd, v, &rhs.@"border-inline-end"), + .outline => |*v| css.generic.eql(Outline, v, &rhs.outline), + .@"outline-color" => |*v| css.generic.eql(CssColor, v, &rhs.@"outline-color"), + .@"outline-style" => |*v| css.generic.eql(OutlineStyle, v, &rhs.@"outline-style"), + .@"outline-width" => |*v| css.generic.eql(BorderSideWidth, v, &rhs.@"outline-width"), + .@"flex-direction" => |*v| css.generic.eql(FlexDirection, &v[0], &v[0]) and v[1].eq(rhs.@"flex-direction"[1]), + .@"flex-wrap" => |*v| css.generic.eql(FlexWrap, &v[0], &v[0]) and v[1].eq(rhs.@"flex-wrap"[1]), + .@"flex-flow" => |*v| css.generic.eql(FlexFlow, &v[0], &v[0]) and v[1].eq(rhs.@"flex-flow"[1]), + .@"flex-grow" => |*v| css.generic.eql(CSSNumber, &v[0], &v[0]) and v[1].eq(rhs.@"flex-grow"[1]), + .@"flex-shrink" => |*v| css.generic.eql(CSSNumber, &v[0], &v[0]) and v[1].eq(rhs.@"flex-shrink"[1]), + .@"flex-basis" => |*v| css.generic.eql(LengthPercentageOrAuto, &v[0], &v[0]) and v[1].eq(rhs.@"flex-basis"[1]), + .flex => |*v| css.generic.eql(Flex, &v[0], &v[0]) and v[1].eq(rhs.flex[1]), + .order => |*v| css.generic.eql(CSSInteger, &v[0], &v[0]) and v[1].eq(rhs.order[1]), + .@"align-content" => |*v| css.generic.eql(AlignContent, &v[0], &v[0]) and v[1].eq(rhs.@"align-content"[1]), + .@"justify-content" => |*v| css.generic.eql(JustifyContent, &v[0], &v[0]) and v[1].eq(rhs.@"justify-content"[1]), + .@"place-content" => |*v| css.generic.eql(PlaceContent, v, &rhs.@"place-content"), + .@"align-self" => |*v| css.generic.eql(AlignSelf, &v[0], &v[0]) and v[1].eq(rhs.@"align-self"[1]), + .@"justify-self" => |*v| css.generic.eql(JustifySelf, v, &rhs.@"justify-self"), + .@"place-self" => |*v| css.generic.eql(PlaceSelf, v, &rhs.@"place-self"), + .@"align-items" => |*v| css.generic.eql(AlignItems, &v[0], &v[0]) and v[1].eq(rhs.@"align-items"[1]), + .@"justify-items" => |*v| css.generic.eql(JustifyItems, v, &rhs.@"justify-items"), + .@"place-items" => |*v| css.generic.eql(PlaceItems, v, &rhs.@"place-items"), + .@"row-gap" => |*v| css.generic.eql(GapValue, v, &rhs.@"row-gap"), + .@"column-gap" => |*v| css.generic.eql(GapValue, v, &rhs.@"column-gap"), + .gap => |*v| css.generic.eql(Gap, v, &rhs.gap), + .@"box-orient" => |*v| css.generic.eql(BoxOrient, &v[0], &v[0]) and v[1].eq(rhs.@"box-orient"[1]), + .@"box-direction" => |*v| css.generic.eql(BoxDirection, &v[0], &v[0]) and v[1].eq(rhs.@"box-direction"[1]), + .@"box-ordinal-group" => |*v| css.generic.eql(CSSInteger, &v[0], &v[0]) and v[1].eq(rhs.@"box-ordinal-group"[1]), + .@"box-align" => |*v| css.generic.eql(BoxAlign, &v[0], &v[0]) and v[1].eq(rhs.@"box-align"[1]), + .@"box-flex" => |*v| css.generic.eql(CSSNumber, &v[0], &v[0]) and v[1].eq(rhs.@"box-flex"[1]), + .@"box-flex-group" => |*v| css.generic.eql(CSSInteger, &v[0], &v[0]) and v[1].eq(rhs.@"box-flex-group"[1]), + .@"box-pack" => |*v| css.generic.eql(BoxPack, &v[0], &v[0]) and v[1].eq(rhs.@"box-pack"[1]), + .@"box-lines" => |*v| css.generic.eql(BoxLines, &v[0], &v[0]) and v[1].eq(rhs.@"box-lines"[1]), + .@"flex-pack" => |*v| css.generic.eql(FlexPack, &v[0], &v[0]) and v[1].eq(rhs.@"flex-pack"[1]), + .@"flex-order" => |*v| css.generic.eql(CSSInteger, &v[0], &v[0]) and v[1].eq(rhs.@"flex-order"[1]), + .@"flex-align" => |*v| css.generic.eql(BoxAlign, &v[0], &v[0]) and v[1].eq(rhs.@"flex-align"[1]), + .@"flex-item-align" => |*v| css.generic.eql(FlexItemAlign, &v[0], &v[0]) and v[1].eq(rhs.@"flex-item-align"[1]), + .@"flex-line-pack" => |*v| css.generic.eql(FlexLinePack, &v[0], &v[0]) and v[1].eq(rhs.@"flex-line-pack"[1]), + .@"flex-positive" => |*v| css.generic.eql(CSSNumber, &v[0], &v[0]) and v[1].eq(rhs.@"flex-positive"[1]), + .@"flex-negative" => |*v| css.generic.eql(CSSNumber, &v[0], &v[0]) and v[1].eq(rhs.@"flex-negative"[1]), + .@"flex-preferred-size" => |*v| css.generic.eql(LengthPercentageOrAuto, &v[0], &v[0]) and v[1].eq(rhs.@"flex-preferred-size"[1]), + .@"margin-top" => |*v| css.generic.eql(LengthPercentageOrAuto, v, &rhs.@"margin-top"), + .@"margin-bottom" => |*v| css.generic.eql(LengthPercentageOrAuto, v, &rhs.@"margin-bottom"), + .@"margin-left" => |*v| css.generic.eql(LengthPercentageOrAuto, v, &rhs.@"margin-left"), + .@"margin-right" => |*v| css.generic.eql(LengthPercentageOrAuto, v, &rhs.@"margin-right"), + .@"margin-block-start" => |*v| css.generic.eql(LengthPercentageOrAuto, v, &rhs.@"margin-block-start"), + .@"margin-block-end" => |*v| css.generic.eql(LengthPercentageOrAuto, v, &rhs.@"margin-block-end"), + .@"margin-inline-start" => |*v| css.generic.eql(LengthPercentageOrAuto, v, &rhs.@"margin-inline-start"), + .@"margin-inline-end" => |*v| css.generic.eql(LengthPercentageOrAuto, v, &rhs.@"margin-inline-end"), + .@"margin-block" => |*v| css.generic.eql(MarginBlock, v, &rhs.@"margin-block"), + .@"margin-inline" => |*v| css.generic.eql(MarginInline, v, &rhs.@"margin-inline"), + .margin => |*v| css.generic.eql(Margin, v, &rhs.margin), + .@"padding-top" => |*v| css.generic.eql(LengthPercentageOrAuto, v, &rhs.@"padding-top"), + .@"padding-bottom" => |*v| css.generic.eql(LengthPercentageOrAuto, v, &rhs.@"padding-bottom"), + .@"padding-left" => |*v| css.generic.eql(LengthPercentageOrAuto, v, &rhs.@"padding-left"), + .@"padding-right" => |*v| css.generic.eql(LengthPercentageOrAuto, v, &rhs.@"padding-right"), + .@"padding-block-start" => |*v| css.generic.eql(LengthPercentageOrAuto, v, &rhs.@"padding-block-start"), + .@"padding-block-end" => |*v| css.generic.eql(LengthPercentageOrAuto, v, &rhs.@"padding-block-end"), + .@"padding-inline-start" => |*v| css.generic.eql(LengthPercentageOrAuto, v, &rhs.@"padding-inline-start"), + .@"padding-inline-end" => |*v| css.generic.eql(LengthPercentageOrAuto, v, &rhs.@"padding-inline-end"), + .@"padding-block" => |*v| css.generic.eql(PaddingBlock, v, &rhs.@"padding-block"), + .@"padding-inline" => |*v| css.generic.eql(PaddingInline, v, &rhs.@"padding-inline"), + .padding => |*v| css.generic.eql(Padding, v, &rhs.padding), + .@"scroll-margin-top" => |*v| css.generic.eql(LengthPercentageOrAuto, v, &rhs.@"scroll-margin-top"), + .@"scroll-margin-bottom" => |*v| css.generic.eql(LengthPercentageOrAuto, v, &rhs.@"scroll-margin-bottom"), + .@"scroll-margin-left" => |*v| css.generic.eql(LengthPercentageOrAuto, v, &rhs.@"scroll-margin-left"), + .@"scroll-margin-right" => |*v| css.generic.eql(LengthPercentageOrAuto, v, &rhs.@"scroll-margin-right"), + .@"scroll-margin-block-start" => |*v| css.generic.eql(LengthPercentageOrAuto, v, &rhs.@"scroll-margin-block-start"), + .@"scroll-margin-block-end" => |*v| css.generic.eql(LengthPercentageOrAuto, v, &rhs.@"scroll-margin-block-end"), + .@"scroll-margin-inline-start" => |*v| css.generic.eql(LengthPercentageOrAuto, v, &rhs.@"scroll-margin-inline-start"), + .@"scroll-margin-inline-end" => |*v| css.generic.eql(LengthPercentageOrAuto, v, &rhs.@"scroll-margin-inline-end"), + .@"scroll-margin-block" => |*v| css.generic.eql(ScrollMarginBlock, v, &rhs.@"scroll-margin-block"), + .@"scroll-margin-inline" => |*v| css.generic.eql(ScrollMarginInline, v, &rhs.@"scroll-margin-inline"), + .@"scroll-margin" => |*v| css.generic.eql(ScrollMargin, v, &rhs.@"scroll-margin"), + .@"scroll-padding-top" => |*v| css.generic.eql(LengthPercentageOrAuto, v, &rhs.@"scroll-padding-top"), + .@"scroll-padding-bottom" => |*v| css.generic.eql(LengthPercentageOrAuto, v, &rhs.@"scroll-padding-bottom"), + .@"scroll-padding-left" => |*v| css.generic.eql(LengthPercentageOrAuto, v, &rhs.@"scroll-padding-left"), + .@"scroll-padding-right" => |*v| css.generic.eql(LengthPercentageOrAuto, v, &rhs.@"scroll-padding-right"), + .@"scroll-padding-block-start" => |*v| css.generic.eql(LengthPercentageOrAuto, v, &rhs.@"scroll-padding-block-start"), + .@"scroll-padding-block-end" => |*v| css.generic.eql(LengthPercentageOrAuto, v, &rhs.@"scroll-padding-block-end"), + .@"scroll-padding-inline-start" => |*v| css.generic.eql(LengthPercentageOrAuto, v, &rhs.@"scroll-padding-inline-start"), + .@"scroll-padding-inline-end" => |*v| css.generic.eql(LengthPercentageOrAuto, v, &rhs.@"scroll-padding-inline-end"), + .@"scroll-padding-block" => |*v| css.generic.eql(ScrollPaddingBlock, v, &rhs.@"scroll-padding-block"), + .@"scroll-padding-inline" => |*v| css.generic.eql(ScrollPaddingInline, v, &rhs.@"scroll-padding-inline"), + .@"scroll-padding" => |*v| css.generic.eql(ScrollPadding, v, &rhs.@"scroll-padding"), + .@"font-weight" => |*v| css.generic.eql(FontWeight, v, &rhs.@"font-weight"), + .@"font-size" => |*v| css.generic.eql(FontSize, v, &rhs.@"font-size"), + .@"font-stretch" => |*v| css.generic.eql(FontStretch, v, &rhs.@"font-stretch"), + .@"font-family" => |*v| css.generic.eql(BabyList(FontFamily), v, &rhs.@"font-family"), + .@"font-style" => |*v| css.generic.eql(FontStyle, v, &rhs.@"font-style"), + .@"font-variant-caps" => |*v| css.generic.eql(FontVariantCaps, v, &rhs.@"font-variant-caps"), + .@"line-height" => |*v| css.generic.eql(LineHeight, v, &rhs.@"line-height"), + .font => |*v| css.generic.eql(Font, v, &rhs.font), + .@"text-decoration-color" => |*v| css.generic.eql(CssColor, &v[0], &v[0]) and v[1].eq(rhs.@"text-decoration-color"[1]), + .@"text-emphasis-color" => |*v| css.generic.eql(CssColor, &v[0], &v[0]) and v[1].eq(rhs.@"text-emphasis-color"[1]), + .@"text-shadow" => |*v| css.generic.eql(SmallList(TextShadow, 1), v, &rhs.@"text-shadow"), + .direction => |*v| css.generic.eql(Direction, v, &rhs.direction), + .composes => |*v| css.generic.eql(Composes, v, &rhs.composes), + .@"mask-image" => |*v| css.generic.eql(SmallList(Image, 1), &v[0], &v[0]) and v[1].eq(rhs.@"mask-image"[1]), + .@"mask-mode" => |*v| css.generic.eql(SmallList(MaskMode, 1), v, &rhs.@"mask-mode"), + .@"mask-repeat" => |*v| css.generic.eql(SmallList(BackgroundRepeat, 1), &v[0], &v[0]) and v[1].eq(rhs.@"mask-repeat"[1]), + .@"mask-position-x" => |*v| css.generic.eql(SmallList(HorizontalPosition, 1), v, &rhs.@"mask-position-x"), + .@"mask-position-y" => |*v| css.generic.eql(SmallList(VerticalPosition, 1), v, &rhs.@"mask-position-y"), + .@"mask-position" => |*v| css.generic.eql(SmallList(Position, 1), &v[0], &v[0]) and v[1].eq(rhs.@"mask-position"[1]), + .@"mask-clip" => |*v| css.generic.eql(SmallList(MaskClip, 1), &v[0], &v[0]) and v[1].eq(rhs.@"mask-clip"[1]), + .@"mask-origin" => |*v| css.generic.eql(SmallList(GeometryBox, 1), &v[0], &v[0]) and v[1].eq(rhs.@"mask-origin"[1]), + .@"mask-size" => |*v| css.generic.eql(SmallList(BackgroundSize, 1), &v[0], &v[0]) and v[1].eq(rhs.@"mask-size"[1]), + .@"mask-composite" => |*v| css.generic.eql(SmallList(MaskComposite, 1), v, &rhs.@"mask-composite"), + .@"mask-type" => |*v| css.generic.eql(MaskType, v, &rhs.@"mask-type"), + .mask => |*v| css.generic.eql(SmallList(Mask, 1), &v[0], &v[0]) and v[1].eq(rhs.mask[1]), + .@"mask-border-source" => |*v| css.generic.eql(Image, v, &rhs.@"mask-border-source"), + .@"mask-border-mode" => |*v| css.generic.eql(MaskBorderMode, v, &rhs.@"mask-border-mode"), + .@"mask-border-slice" => |*v| css.generic.eql(BorderImageSlice, v, &rhs.@"mask-border-slice"), + .@"mask-border-width" => |*v| css.generic.eql(Rect(BorderImageSideWidth), v, &rhs.@"mask-border-width"), + .@"mask-border-outset" => |*v| css.generic.eql(Rect(LengthOrNumber), v, &rhs.@"mask-border-outset"), + .@"mask-border-repeat" => |*v| css.generic.eql(BorderImageRepeat, v, &rhs.@"mask-border-repeat"), + .@"mask-border" => |*v| css.generic.eql(MaskBorder, v, &rhs.@"mask-border"), + .@"-webkit-mask-composite" => |*v| css.generic.eql(SmallList(WebKitMaskComposite, 1), v, &rhs.@"-webkit-mask-composite"), + .@"mask-source-type" => |*v| css.generic.eql(SmallList(WebKitMaskSourceType, 1), &v[0], &v[0]) and v[1].eq(rhs.@"mask-source-type"[1]), + .@"mask-box-image" => |*v| css.generic.eql(BorderImage, &v[0], &v[0]) and v[1].eq(rhs.@"mask-box-image"[1]), + .@"mask-box-image-source" => |*v| css.generic.eql(Image, &v[0], &v[0]) and v[1].eq(rhs.@"mask-box-image-source"[1]), + .@"mask-box-image-slice" => |*v| css.generic.eql(BorderImageSlice, &v[0], &v[0]) and v[1].eq(rhs.@"mask-box-image-slice"[1]), + .@"mask-box-image-width" => |*v| css.generic.eql(Rect(BorderImageSideWidth), &v[0], &v[0]) and v[1].eq(rhs.@"mask-box-image-width"[1]), + .@"mask-box-image-outset" => |*v| css.generic.eql(Rect(LengthOrNumber), &v[0], &v[0]) and v[1].eq(rhs.@"mask-box-image-outset"[1]), + .@"mask-box-image-repeat" => |*v| css.generic.eql(BorderImageRepeat, &v[0], &v[0]) and v[1].eq(rhs.@"mask-box-image-repeat"[1]), + .all, .unparsed => true, + .custom => |*c| c.eql(&rhs.custom), + }; + } +}; +pub const PropertyId = union(PropertyIdTag) { + @"background-color", + @"background-image", + @"background-position-x", + @"background-position-y", + @"background-position", + @"background-size", + @"background-repeat", + @"background-attachment", + @"background-clip": VendorPrefix, + @"background-origin", + background, + @"box-shadow": VendorPrefix, + opacity, + color, + display, + visibility, + width, + height, + @"min-width", + @"min-height", + @"max-width", + @"max-height", + @"block-size", + @"inline-size", + @"min-block-size", + @"min-inline-size", + @"max-block-size", + @"max-inline-size", + @"box-sizing": VendorPrefix, + @"aspect-ratio", + overflow, + @"overflow-x", + @"overflow-y", + @"text-overflow": VendorPrefix, + position, + top, + bottom, + left, + right, + @"inset-block-start", + @"inset-block-end", + @"inset-inline-start", + @"inset-inline-end", + @"inset-block", + @"inset-inline", + inset, + @"border-spacing", + @"border-top-color", + @"border-bottom-color", + @"border-left-color", + @"border-right-color", + @"border-block-start-color", + @"border-block-end-color", + @"border-inline-start-color", + @"border-inline-end-color", + @"border-top-style", + @"border-bottom-style", + @"border-left-style", + @"border-right-style", + @"border-block-start-style", + @"border-block-end-style", + @"border-inline-start-style", + @"border-inline-end-style", + @"border-top-width", + @"border-bottom-width", + @"border-left-width", + @"border-right-width", + @"border-block-start-width", + @"border-block-end-width", + @"border-inline-start-width", + @"border-inline-end-width", + @"border-top-left-radius": VendorPrefix, + @"border-top-right-radius": VendorPrefix, + @"border-bottom-left-radius": VendorPrefix, + @"border-bottom-right-radius": VendorPrefix, + @"border-start-start-radius", + @"border-start-end-radius", + @"border-end-start-radius", + @"border-end-end-radius", + @"border-radius": VendorPrefix, + @"border-image-source", + @"border-image-outset", + @"border-image-repeat", + @"border-image-width", + @"border-image-slice", + @"border-image": VendorPrefix, + @"border-color", + @"border-style", + @"border-width", + @"border-block-color", + @"border-block-style", + @"border-block-width", + @"border-inline-color", + @"border-inline-style", + @"border-inline-width", + border, + @"border-top", + @"border-bottom", + @"border-left", + @"border-right", + @"border-block", + @"border-block-start", + @"border-block-end", + @"border-inline", + @"border-inline-start", + @"border-inline-end", + outline, + @"outline-color", + @"outline-style", + @"outline-width", + @"flex-direction": VendorPrefix, + @"flex-wrap": VendorPrefix, + @"flex-flow": VendorPrefix, + @"flex-grow": VendorPrefix, + @"flex-shrink": VendorPrefix, + @"flex-basis": VendorPrefix, + flex: VendorPrefix, + order: VendorPrefix, + @"align-content": VendorPrefix, + @"justify-content": VendorPrefix, + @"place-content", + @"align-self": VendorPrefix, + @"justify-self", + @"place-self", + @"align-items": VendorPrefix, + @"justify-items", + @"place-items", + @"row-gap", + @"column-gap", + gap, + @"box-orient": VendorPrefix, + @"box-direction": VendorPrefix, + @"box-ordinal-group": VendorPrefix, + @"box-align": VendorPrefix, + @"box-flex": VendorPrefix, + @"box-flex-group": VendorPrefix, + @"box-pack": VendorPrefix, + @"box-lines": VendorPrefix, + @"flex-pack": VendorPrefix, + @"flex-order": VendorPrefix, + @"flex-align": VendorPrefix, + @"flex-item-align": VendorPrefix, + @"flex-line-pack": VendorPrefix, + @"flex-positive": VendorPrefix, + @"flex-negative": VendorPrefix, + @"flex-preferred-size": VendorPrefix, + @"margin-top", + @"margin-bottom", + @"margin-left", + @"margin-right", + @"margin-block-start", + @"margin-block-end", + @"margin-inline-start", + @"margin-inline-end", + @"margin-block", + @"margin-inline", + margin, + @"padding-top", + @"padding-bottom", + @"padding-left", + @"padding-right", + @"padding-block-start", + @"padding-block-end", + @"padding-inline-start", + @"padding-inline-end", + @"padding-block", + @"padding-inline", + padding, + @"scroll-margin-top", + @"scroll-margin-bottom", + @"scroll-margin-left", + @"scroll-margin-right", + @"scroll-margin-block-start", + @"scroll-margin-block-end", + @"scroll-margin-inline-start", + @"scroll-margin-inline-end", + @"scroll-margin-block", + @"scroll-margin-inline", + @"scroll-margin", + @"scroll-padding-top", + @"scroll-padding-bottom", + @"scroll-padding-left", + @"scroll-padding-right", + @"scroll-padding-block-start", + @"scroll-padding-block-end", + @"scroll-padding-inline-start", + @"scroll-padding-inline-end", + @"scroll-padding-block", + @"scroll-padding-inline", + @"scroll-padding", + @"font-weight", + @"font-size", + @"font-stretch", + @"font-family", + @"font-style", + @"font-variant-caps", + @"line-height", + font, + @"text-decoration-color": VendorPrefix, + @"text-emphasis-color": VendorPrefix, + @"text-shadow", + direction, + composes, + @"mask-image": VendorPrefix, + @"mask-mode", + @"mask-repeat": VendorPrefix, + @"mask-position-x", + @"mask-position-y", + @"mask-position": VendorPrefix, + @"mask-clip": VendorPrefix, + @"mask-origin": VendorPrefix, + @"mask-size": VendorPrefix, + @"mask-composite", + @"mask-type", + mask: VendorPrefix, + @"mask-border-source", + @"mask-border-mode", + @"mask-border-slice", + @"mask-border-width", + @"mask-border-outset", + @"mask-border-repeat", + @"mask-border", + @"-webkit-mask-composite", + @"mask-source-type": VendorPrefix, + @"mask-box-image": VendorPrefix, + @"mask-box-image-source": VendorPrefix, + @"mask-box-image-slice": VendorPrefix, + @"mask-box-image-width": VendorPrefix, + @"mask-box-image-outset": VendorPrefix, + @"mask-box-image-repeat": VendorPrefix, + all, + unparsed, + custom: CustomPropertyName, + + pub usingnamespace PropertyIdImpl(); + + /// Returns the property name, without any vendor prefixes. + pub inline fn name(this: *const PropertyId) []const u8 { + return @tagName(this.*); + } + + /// Returns the vendor prefix for this property id. + pub fn prefix(this: *const PropertyId) VendorPrefix { + return switch (this.*) { + .@"background-color" => VendorPrefix.empty(), + .@"background-image" => VendorPrefix.empty(), + .@"background-position-x" => VendorPrefix.empty(), + .@"background-position-y" => VendorPrefix.empty(), + .@"background-position" => VendorPrefix.empty(), + .@"background-size" => VendorPrefix.empty(), + .@"background-repeat" => VendorPrefix.empty(), + .@"background-attachment" => VendorPrefix.empty(), + .@"background-clip" => |p| p, + .@"background-origin" => VendorPrefix.empty(), + .background => VendorPrefix.empty(), + .@"box-shadow" => |p| p, + .opacity => VendorPrefix.empty(), + .color => VendorPrefix.empty(), + .display => VendorPrefix.empty(), + .visibility => VendorPrefix.empty(), + .width => VendorPrefix.empty(), + .height => VendorPrefix.empty(), + .@"min-width" => VendorPrefix.empty(), + .@"min-height" => VendorPrefix.empty(), + .@"max-width" => VendorPrefix.empty(), + .@"max-height" => VendorPrefix.empty(), + .@"block-size" => VendorPrefix.empty(), + .@"inline-size" => VendorPrefix.empty(), + .@"min-block-size" => VendorPrefix.empty(), + .@"min-inline-size" => VendorPrefix.empty(), + .@"max-block-size" => VendorPrefix.empty(), + .@"max-inline-size" => VendorPrefix.empty(), + .@"box-sizing" => |p| p, + .@"aspect-ratio" => VendorPrefix.empty(), + .overflow => VendorPrefix.empty(), + .@"overflow-x" => VendorPrefix.empty(), + .@"overflow-y" => VendorPrefix.empty(), + .@"text-overflow" => |p| p, + .position => VendorPrefix.empty(), + .top => VendorPrefix.empty(), + .bottom => VendorPrefix.empty(), + .left => VendorPrefix.empty(), + .right => VendorPrefix.empty(), + .@"inset-block-start" => VendorPrefix.empty(), + .@"inset-block-end" => VendorPrefix.empty(), + .@"inset-inline-start" => VendorPrefix.empty(), + .@"inset-inline-end" => VendorPrefix.empty(), + .@"inset-block" => VendorPrefix.empty(), + .@"inset-inline" => VendorPrefix.empty(), + .inset => VendorPrefix.empty(), + .@"border-spacing" => VendorPrefix.empty(), + .@"border-top-color" => VendorPrefix.empty(), + .@"border-bottom-color" => VendorPrefix.empty(), + .@"border-left-color" => VendorPrefix.empty(), + .@"border-right-color" => VendorPrefix.empty(), + .@"border-block-start-color" => VendorPrefix.empty(), + .@"border-block-end-color" => VendorPrefix.empty(), + .@"border-inline-start-color" => VendorPrefix.empty(), + .@"border-inline-end-color" => VendorPrefix.empty(), + .@"border-top-style" => VendorPrefix.empty(), + .@"border-bottom-style" => VendorPrefix.empty(), + .@"border-left-style" => VendorPrefix.empty(), + .@"border-right-style" => VendorPrefix.empty(), + .@"border-block-start-style" => VendorPrefix.empty(), + .@"border-block-end-style" => VendorPrefix.empty(), + .@"border-inline-start-style" => VendorPrefix.empty(), + .@"border-inline-end-style" => VendorPrefix.empty(), + .@"border-top-width" => VendorPrefix.empty(), + .@"border-bottom-width" => VendorPrefix.empty(), + .@"border-left-width" => VendorPrefix.empty(), + .@"border-right-width" => VendorPrefix.empty(), + .@"border-block-start-width" => VendorPrefix.empty(), + .@"border-block-end-width" => VendorPrefix.empty(), + .@"border-inline-start-width" => VendorPrefix.empty(), + .@"border-inline-end-width" => VendorPrefix.empty(), + .@"border-top-left-radius" => |p| p, + .@"border-top-right-radius" => |p| p, + .@"border-bottom-left-radius" => |p| p, + .@"border-bottom-right-radius" => |p| p, + .@"border-start-start-radius" => VendorPrefix.empty(), + .@"border-start-end-radius" => VendorPrefix.empty(), + .@"border-end-start-radius" => VendorPrefix.empty(), + .@"border-end-end-radius" => VendorPrefix.empty(), + .@"border-radius" => |p| p, + .@"border-image-source" => VendorPrefix.empty(), + .@"border-image-outset" => VendorPrefix.empty(), + .@"border-image-repeat" => VendorPrefix.empty(), + .@"border-image-width" => VendorPrefix.empty(), + .@"border-image-slice" => VendorPrefix.empty(), + .@"border-image" => |p| p, + .@"border-color" => VendorPrefix.empty(), + .@"border-style" => VendorPrefix.empty(), + .@"border-width" => VendorPrefix.empty(), + .@"border-block-color" => VendorPrefix.empty(), + .@"border-block-style" => VendorPrefix.empty(), + .@"border-block-width" => VendorPrefix.empty(), + .@"border-inline-color" => VendorPrefix.empty(), + .@"border-inline-style" => VendorPrefix.empty(), + .@"border-inline-width" => VendorPrefix.empty(), + .border => VendorPrefix.empty(), + .@"border-top" => VendorPrefix.empty(), + .@"border-bottom" => VendorPrefix.empty(), + .@"border-left" => VendorPrefix.empty(), + .@"border-right" => VendorPrefix.empty(), + .@"border-block" => VendorPrefix.empty(), + .@"border-block-start" => VendorPrefix.empty(), + .@"border-block-end" => VendorPrefix.empty(), + .@"border-inline" => VendorPrefix.empty(), + .@"border-inline-start" => VendorPrefix.empty(), + .@"border-inline-end" => VendorPrefix.empty(), + .outline => VendorPrefix.empty(), + .@"outline-color" => VendorPrefix.empty(), + .@"outline-style" => VendorPrefix.empty(), + .@"outline-width" => VendorPrefix.empty(), + .@"flex-direction" => |p| p, + .@"flex-wrap" => |p| p, + .@"flex-flow" => |p| p, + .@"flex-grow" => |p| p, + .@"flex-shrink" => |p| p, + .@"flex-basis" => |p| p, + .flex => |p| p, + .order => |p| p, + .@"align-content" => |p| p, + .@"justify-content" => |p| p, + .@"place-content" => VendorPrefix.empty(), + .@"align-self" => |p| p, + .@"justify-self" => VendorPrefix.empty(), + .@"place-self" => VendorPrefix.empty(), + .@"align-items" => |p| p, + .@"justify-items" => VendorPrefix.empty(), + .@"place-items" => VendorPrefix.empty(), + .@"row-gap" => VendorPrefix.empty(), + .@"column-gap" => VendorPrefix.empty(), + .gap => VendorPrefix.empty(), + .@"box-orient" => |p| p, + .@"box-direction" => |p| p, + .@"box-ordinal-group" => |p| p, + .@"box-align" => |p| p, + .@"box-flex" => |p| p, + .@"box-flex-group" => |p| p, + .@"box-pack" => |p| p, + .@"box-lines" => |p| p, + .@"flex-pack" => |p| p, + .@"flex-order" => |p| p, + .@"flex-align" => |p| p, + .@"flex-item-align" => |p| p, + .@"flex-line-pack" => |p| p, + .@"flex-positive" => |p| p, + .@"flex-negative" => |p| p, + .@"flex-preferred-size" => |p| p, + .@"margin-top" => VendorPrefix.empty(), + .@"margin-bottom" => VendorPrefix.empty(), + .@"margin-left" => VendorPrefix.empty(), + .@"margin-right" => VendorPrefix.empty(), + .@"margin-block-start" => VendorPrefix.empty(), + .@"margin-block-end" => VendorPrefix.empty(), + .@"margin-inline-start" => VendorPrefix.empty(), + .@"margin-inline-end" => VendorPrefix.empty(), + .@"margin-block" => VendorPrefix.empty(), + .@"margin-inline" => VendorPrefix.empty(), + .margin => VendorPrefix.empty(), + .@"padding-top" => VendorPrefix.empty(), + .@"padding-bottom" => VendorPrefix.empty(), + .@"padding-left" => VendorPrefix.empty(), + .@"padding-right" => VendorPrefix.empty(), + .@"padding-block-start" => VendorPrefix.empty(), + .@"padding-block-end" => VendorPrefix.empty(), + .@"padding-inline-start" => VendorPrefix.empty(), + .@"padding-inline-end" => VendorPrefix.empty(), + .@"padding-block" => VendorPrefix.empty(), + .@"padding-inline" => VendorPrefix.empty(), + .padding => VendorPrefix.empty(), + .@"scroll-margin-top" => VendorPrefix.empty(), + .@"scroll-margin-bottom" => VendorPrefix.empty(), + .@"scroll-margin-left" => VendorPrefix.empty(), + .@"scroll-margin-right" => VendorPrefix.empty(), + .@"scroll-margin-block-start" => VendorPrefix.empty(), + .@"scroll-margin-block-end" => VendorPrefix.empty(), + .@"scroll-margin-inline-start" => VendorPrefix.empty(), + .@"scroll-margin-inline-end" => VendorPrefix.empty(), + .@"scroll-margin-block" => VendorPrefix.empty(), + .@"scroll-margin-inline" => VendorPrefix.empty(), + .@"scroll-margin" => VendorPrefix.empty(), + .@"scroll-padding-top" => VendorPrefix.empty(), + .@"scroll-padding-bottom" => VendorPrefix.empty(), + .@"scroll-padding-left" => VendorPrefix.empty(), + .@"scroll-padding-right" => VendorPrefix.empty(), + .@"scroll-padding-block-start" => VendorPrefix.empty(), + .@"scroll-padding-block-end" => VendorPrefix.empty(), + .@"scroll-padding-inline-start" => VendorPrefix.empty(), + .@"scroll-padding-inline-end" => VendorPrefix.empty(), + .@"scroll-padding-block" => VendorPrefix.empty(), + .@"scroll-padding-inline" => VendorPrefix.empty(), + .@"scroll-padding" => VendorPrefix.empty(), + .@"font-weight" => VendorPrefix.empty(), + .@"font-size" => VendorPrefix.empty(), + .@"font-stretch" => VendorPrefix.empty(), + .@"font-family" => VendorPrefix.empty(), + .@"font-style" => VendorPrefix.empty(), + .@"font-variant-caps" => VendorPrefix.empty(), + .@"line-height" => VendorPrefix.empty(), + .font => VendorPrefix.empty(), + .@"text-decoration-color" => |p| p, + .@"text-emphasis-color" => |p| p, + .@"text-shadow" => VendorPrefix.empty(), + .direction => VendorPrefix.empty(), + .composes => VendorPrefix.empty(), + .@"mask-image" => |p| p, + .@"mask-mode" => VendorPrefix.empty(), + .@"mask-repeat" => |p| p, + .@"mask-position-x" => VendorPrefix.empty(), + .@"mask-position-y" => VendorPrefix.empty(), + .@"mask-position" => |p| p, + .@"mask-clip" => |p| p, + .@"mask-origin" => |p| p, + .@"mask-size" => |p| p, + .@"mask-composite" => VendorPrefix.empty(), + .@"mask-type" => VendorPrefix.empty(), + .mask => |p| p, + .@"mask-border-source" => VendorPrefix.empty(), + .@"mask-border-mode" => VendorPrefix.empty(), + .@"mask-border-slice" => VendorPrefix.empty(), + .@"mask-border-width" => VendorPrefix.empty(), + .@"mask-border-outset" => VendorPrefix.empty(), + .@"mask-border-repeat" => VendorPrefix.empty(), + .@"mask-border" => VendorPrefix.empty(), + .@"-webkit-mask-composite" => VendorPrefix.empty(), + .@"mask-source-type" => |p| p, + .@"mask-box-image" => |p| p, + .@"mask-box-image-source" => |p| p, + .@"mask-box-image-slice" => |p| p, + .@"mask-box-image-width" => |p| p, + .@"mask-box-image-outset" => |p| p, + .@"mask-box-image-repeat" => |p| p, + .all, .custom, .unparsed => VendorPrefix.empty(), + }; + } + + pub fn fromNameAndPrefix(name1: []const u8, pre: VendorPrefix) ?PropertyId { + const Enum = enum { @"background-color", @"background-image", @"background-position-x", @"background-position-y", @"background-position", @"background-size", @"background-repeat", @"background-attachment", @"background-clip", @"background-origin", background, @"box-shadow", opacity, color, display, visibility, width, height, @"min-width", @"min-height", @"max-width", @"max-height", @"block-size", @"inline-size", @"min-block-size", @"min-inline-size", @"max-block-size", @"max-inline-size", @"box-sizing", @"aspect-ratio", overflow, @"overflow-x", @"overflow-y", @"text-overflow", position, top, bottom, left, right, @"inset-block-start", @"inset-block-end", @"inset-inline-start", @"inset-inline-end", @"inset-block", @"inset-inline", inset, @"border-spacing", @"border-top-color", @"border-bottom-color", @"border-left-color", @"border-right-color", @"border-block-start-color", @"border-block-end-color", @"border-inline-start-color", @"border-inline-end-color", @"border-top-style", @"border-bottom-style", @"border-left-style", @"border-right-style", @"border-block-start-style", @"border-block-end-style", @"border-inline-start-style", @"border-inline-end-style", @"border-top-width", @"border-bottom-width", @"border-left-width", @"border-right-width", @"border-block-start-width", @"border-block-end-width", @"border-inline-start-width", @"border-inline-end-width", @"border-top-left-radius", @"border-top-right-radius", @"border-bottom-left-radius", @"border-bottom-right-radius", @"border-start-start-radius", @"border-start-end-radius", @"border-end-start-radius", @"border-end-end-radius", @"border-radius", @"border-image-source", @"border-image-outset", @"border-image-repeat", @"border-image-width", @"border-image-slice", @"border-image", @"border-color", @"border-style", @"border-width", @"border-block-color", @"border-block-style", @"border-block-width", @"border-inline-color", @"border-inline-style", @"border-inline-width", border, @"border-top", @"border-bottom", @"border-left", @"border-right", @"border-block", @"border-block-start", @"border-block-end", @"border-inline", @"border-inline-start", @"border-inline-end", outline, @"outline-color", @"outline-style", @"outline-width", @"flex-direction", @"flex-wrap", @"flex-flow", @"flex-grow", @"flex-shrink", @"flex-basis", flex, order, @"align-content", @"justify-content", @"place-content", @"align-self", @"justify-self", @"place-self", @"align-items", @"justify-items", @"place-items", @"row-gap", @"column-gap", gap, @"box-orient", @"box-direction", @"box-ordinal-group", @"box-align", @"box-flex", @"box-flex-group", @"box-pack", @"box-lines", @"flex-pack", @"flex-order", @"flex-align", @"flex-item-align", @"flex-line-pack", @"flex-positive", @"flex-negative", @"flex-preferred-size", @"margin-top", @"margin-bottom", @"margin-left", @"margin-right", @"margin-block-start", @"margin-block-end", @"margin-inline-start", @"margin-inline-end", @"margin-block", @"margin-inline", margin, @"padding-top", @"padding-bottom", @"padding-left", @"padding-right", @"padding-block-start", @"padding-block-end", @"padding-inline-start", @"padding-inline-end", @"padding-block", @"padding-inline", padding, @"scroll-margin-top", @"scroll-margin-bottom", @"scroll-margin-left", @"scroll-margin-right", @"scroll-margin-block-start", @"scroll-margin-block-end", @"scroll-margin-inline-start", @"scroll-margin-inline-end", @"scroll-margin-block", @"scroll-margin-inline", @"scroll-margin", @"scroll-padding-top", @"scroll-padding-bottom", @"scroll-padding-left", @"scroll-padding-right", @"scroll-padding-block-start", @"scroll-padding-block-end", @"scroll-padding-inline-start", @"scroll-padding-inline-end", @"scroll-padding-block", @"scroll-padding-inline", @"scroll-padding", @"font-weight", @"font-size", @"font-stretch", @"font-family", @"font-style", @"font-variant-caps", @"line-height", font, @"text-decoration-color", @"text-emphasis-color", @"text-shadow", direction, composes, @"mask-image", @"mask-mode", @"mask-repeat", @"mask-position-x", @"mask-position-y", @"mask-position", @"mask-clip", @"mask-origin", @"mask-size", @"mask-composite", @"mask-type", mask, @"mask-border-source", @"mask-border-mode", @"mask-border-slice", @"mask-border-width", @"mask-border-outset", @"mask-border-repeat", @"mask-border", @"-webkit-mask-composite", @"mask-source-type", @"mask-box-image", @"mask-box-image-source", @"mask-box-image-slice", @"mask-box-image-width", @"mask-box-image-outset", @"mask-box-image-repeat" }; + const Map = comptime bun.ComptimeEnumMap(Enum); + if (Map.getASCIIICaseInsensitive(name1)) |prop| { + switch (prop) { + .@"background-color" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"background-color"; + }, + .@"background-image" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"background-image"; + }, + .@"background-position-x" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"background-position-x"; + }, + .@"background-position-y" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"background-position-y"; + }, + .@"background-position" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"background-position"; + }, + .@"background-size" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"background-size"; + }, + .@"background-repeat" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"background-repeat"; + }, + .@"background-attachment" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"background-attachment"; + }, + .@"background-clip" => { + const allowed_prefixes = VendorPrefix{ .none = true, .webkit = true, .moz = true }; + if (allowed_prefixes.contains(pre)) return .{ .@"background-clip" = pre }; + }, + .@"background-origin" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"background-origin"; + }, + .background => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .background; + }, + .@"box-shadow" => { + const allowed_prefixes = VendorPrefix{ .none = true, .webkit = true, .moz = true }; + if (allowed_prefixes.contains(pre)) return .{ .@"box-shadow" = pre }; + }, + .opacity => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .opacity; + }, + .color => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .color; + }, + .display => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .display; + }, + .visibility => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .visibility; + }, + .width => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .width; + }, + .height => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .height; + }, + .@"min-width" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"min-width"; + }, + .@"min-height" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"min-height"; + }, + .@"max-width" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"max-width"; + }, + .@"max-height" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"max-height"; + }, + .@"block-size" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"block-size"; + }, + .@"inline-size" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"inline-size"; + }, + .@"min-block-size" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"min-block-size"; + }, + .@"min-inline-size" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"min-inline-size"; + }, + .@"max-block-size" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"max-block-size"; + }, + .@"max-inline-size" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"max-inline-size"; + }, + .@"box-sizing" => { + const allowed_prefixes = VendorPrefix{ .none = true, .webkit = true, .moz = true }; + if (allowed_prefixes.contains(pre)) return .{ .@"box-sizing" = pre }; + }, + .@"aspect-ratio" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"aspect-ratio"; + }, + .overflow => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .overflow; + }, + .@"overflow-x" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"overflow-x"; + }, + .@"overflow-y" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"overflow-y"; + }, + .@"text-overflow" => { + const allowed_prefixes = VendorPrefix{ .none = true, .o = true }; + if (allowed_prefixes.contains(pre)) return .{ .@"text-overflow" = pre }; + }, + .position => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .position; + }, + .top => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .top; + }, + .bottom => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .bottom; + }, + .left => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .left; + }, + .right => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .right; + }, + .@"inset-block-start" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"inset-block-start"; + }, + .@"inset-block-end" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"inset-block-end"; + }, + .@"inset-inline-start" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"inset-inline-start"; + }, + .@"inset-inline-end" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"inset-inline-end"; + }, + .@"inset-block" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"inset-block"; + }, + .@"inset-inline" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"inset-inline"; + }, + .inset => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .inset; + }, + .@"border-spacing" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"border-spacing"; + }, + .@"border-top-color" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"border-top-color"; + }, + .@"border-bottom-color" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"border-bottom-color"; + }, + .@"border-left-color" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"border-left-color"; + }, + .@"border-right-color" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"border-right-color"; + }, + .@"border-block-start-color" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"border-block-start-color"; + }, + .@"border-block-end-color" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"border-block-end-color"; + }, + .@"border-inline-start-color" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"border-inline-start-color"; + }, + .@"border-inline-end-color" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"border-inline-end-color"; + }, + .@"border-top-style" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"border-top-style"; + }, + .@"border-bottom-style" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"border-bottom-style"; + }, + .@"border-left-style" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"border-left-style"; + }, + .@"border-right-style" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"border-right-style"; + }, + .@"border-block-start-style" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"border-block-start-style"; + }, + .@"border-block-end-style" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"border-block-end-style"; + }, + .@"border-inline-start-style" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"border-inline-start-style"; + }, + .@"border-inline-end-style" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"border-inline-end-style"; + }, + .@"border-top-width" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"border-top-width"; + }, + .@"border-bottom-width" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"border-bottom-width"; + }, + .@"border-left-width" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"border-left-width"; + }, + .@"border-right-width" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"border-right-width"; + }, + .@"border-block-start-width" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"border-block-start-width"; + }, + .@"border-block-end-width" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"border-block-end-width"; + }, + .@"border-inline-start-width" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"border-inline-start-width"; + }, + .@"border-inline-end-width" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"border-inline-end-width"; + }, + .@"border-top-left-radius" => { + const allowed_prefixes = VendorPrefix{ .none = true, .webkit = true, .moz = true }; + if (allowed_prefixes.contains(pre)) return .{ .@"border-top-left-radius" = pre }; + }, + .@"border-top-right-radius" => { + const allowed_prefixes = VendorPrefix{ .none = true, .webkit = true, .moz = true }; + if (allowed_prefixes.contains(pre)) return .{ .@"border-top-right-radius" = pre }; + }, + .@"border-bottom-left-radius" => { + const allowed_prefixes = VendorPrefix{ .none = true, .webkit = true, .moz = true }; + if (allowed_prefixes.contains(pre)) return .{ .@"border-bottom-left-radius" = pre }; + }, + .@"border-bottom-right-radius" => { + const allowed_prefixes = VendorPrefix{ .none = true, .webkit = true, .moz = true }; + if (allowed_prefixes.contains(pre)) return .{ .@"border-bottom-right-radius" = pre }; + }, + .@"border-start-start-radius" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"border-start-start-radius"; + }, + .@"border-start-end-radius" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"border-start-end-radius"; + }, + .@"border-end-start-radius" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"border-end-start-radius"; + }, + .@"border-end-end-radius" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"border-end-end-radius"; + }, + .@"border-radius" => { + const allowed_prefixes = VendorPrefix{ .none = true, .webkit = true, .moz = true }; + if (allowed_prefixes.contains(pre)) return .{ .@"border-radius" = pre }; + }, + .@"border-image-source" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"border-image-source"; + }, + .@"border-image-outset" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"border-image-outset"; + }, + .@"border-image-repeat" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"border-image-repeat"; + }, + .@"border-image-width" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"border-image-width"; + }, + .@"border-image-slice" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"border-image-slice"; + }, + .@"border-image" => { + const allowed_prefixes = VendorPrefix{ .none = true, .webkit = true, .moz = true, .o = true }; + if (allowed_prefixes.contains(pre)) return .{ .@"border-image" = pre }; + }, + .@"border-color" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"border-color"; + }, + .@"border-style" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"border-style"; + }, + .@"border-width" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"border-width"; + }, + .@"border-block-color" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"border-block-color"; + }, + .@"border-block-style" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"border-block-style"; + }, + .@"border-block-width" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"border-block-width"; + }, + .@"border-inline-color" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"border-inline-color"; + }, + .@"border-inline-style" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"border-inline-style"; + }, + .@"border-inline-width" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"border-inline-width"; + }, + .border => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .border; + }, + .@"border-top" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"border-top"; + }, + .@"border-bottom" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"border-bottom"; + }, + .@"border-left" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"border-left"; + }, + .@"border-right" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"border-right"; + }, + .@"border-block" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"border-block"; + }, + .@"border-block-start" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"border-block-start"; + }, + .@"border-block-end" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"border-block-end"; + }, + .@"border-inline" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"border-inline"; + }, + .@"border-inline-start" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"border-inline-start"; + }, + .@"border-inline-end" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"border-inline-end"; + }, + .outline => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .outline; + }, + .@"outline-color" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"outline-color"; + }, + .@"outline-style" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"outline-style"; + }, + .@"outline-width" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"outline-width"; + }, + .@"flex-direction" => { + const allowed_prefixes = VendorPrefix{ .none = true, .webkit = true, .ms = true }; + if (allowed_prefixes.contains(pre)) return .{ .@"flex-direction" = pre }; + }, + .@"flex-wrap" => { + const allowed_prefixes = VendorPrefix{ .none = true, .webkit = true, .ms = true }; + if (allowed_prefixes.contains(pre)) return .{ .@"flex-wrap" = pre }; + }, + .@"flex-flow" => { + const allowed_prefixes = VendorPrefix{ .none = true, .webkit = true, .ms = true }; + if (allowed_prefixes.contains(pre)) return .{ .@"flex-flow" = pre }; + }, + .@"flex-grow" => { + const allowed_prefixes = VendorPrefix{ .none = true, .webkit = true }; + if (allowed_prefixes.contains(pre)) return .{ .@"flex-grow" = pre }; + }, + .@"flex-shrink" => { + const allowed_prefixes = VendorPrefix{ .none = true, .webkit = true }; + if (allowed_prefixes.contains(pre)) return .{ .@"flex-shrink" = pre }; + }, + .@"flex-basis" => { + const allowed_prefixes = VendorPrefix{ .none = true, .webkit = true }; + if (allowed_prefixes.contains(pre)) return .{ .@"flex-basis" = pre }; + }, + .flex => { + const allowed_prefixes = VendorPrefix{ .none = true, .webkit = true, .ms = true }; + if (allowed_prefixes.contains(pre)) return .{ .flex = pre }; + }, + .order => { + const allowed_prefixes = VendorPrefix{ .none = true, .webkit = true }; + if (allowed_prefixes.contains(pre)) return .{ .order = pre }; + }, + .@"align-content" => { + const allowed_prefixes = VendorPrefix{ .none = true, .webkit = true }; + if (allowed_prefixes.contains(pre)) return .{ .@"align-content" = pre }; + }, + .@"justify-content" => { + const allowed_prefixes = VendorPrefix{ .none = true, .webkit = true }; + if (allowed_prefixes.contains(pre)) return .{ .@"justify-content" = pre }; + }, + .@"place-content" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"place-content"; + }, + .@"align-self" => { + const allowed_prefixes = VendorPrefix{ .none = true, .webkit = true }; + if (allowed_prefixes.contains(pre)) return .{ .@"align-self" = pre }; + }, + .@"justify-self" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"justify-self"; + }, + .@"place-self" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"place-self"; + }, + .@"align-items" => { + const allowed_prefixes = VendorPrefix{ .none = true, .webkit = true }; + if (allowed_prefixes.contains(pre)) return .{ .@"align-items" = pre }; + }, + .@"justify-items" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"justify-items"; + }, + .@"place-items" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"place-items"; + }, + .@"row-gap" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"row-gap"; + }, + .@"column-gap" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"column-gap"; + }, + .gap => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .gap; + }, + .@"box-orient" => { + const allowed_prefixes = VendorPrefix{ .none = true, .webkit = true, .moz = true }; + if (allowed_prefixes.contains(pre)) return .{ .@"box-orient" = pre }; + }, + .@"box-direction" => { + const allowed_prefixes = VendorPrefix{ .none = true, .webkit = true, .moz = true }; + if (allowed_prefixes.contains(pre)) return .{ .@"box-direction" = pre }; + }, + .@"box-ordinal-group" => { + const allowed_prefixes = VendorPrefix{ .none = true, .webkit = true, .moz = true }; + if (allowed_prefixes.contains(pre)) return .{ .@"box-ordinal-group" = pre }; + }, + .@"box-align" => { + const allowed_prefixes = VendorPrefix{ .none = true, .webkit = true, .moz = true }; + if (allowed_prefixes.contains(pre)) return .{ .@"box-align" = pre }; + }, + .@"box-flex" => { + const allowed_prefixes = VendorPrefix{ .none = true, .webkit = true, .moz = true }; + if (allowed_prefixes.contains(pre)) return .{ .@"box-flex" = pre }; + }, + .@"box-flex-group" => { + const allowed_prefixes = VendorPrefix{ .none = true, .webkit = true }; + if (allowed_prefixes.contains(pre)) return .{ .@"box-flex-group" = pre }; + }, + .@"box-pack" => { + const allowed_prefixes = VendorPrefix{ .none = true, .webkit = true, .moz = true }; + if (allowed_prefixes.contains(pre)) return .{ .@"box-pack" = pre }; + }, + .@"box-lines" => { + const allowed_prefixes = VendorPrefix{ .none = true, .webkit = true, .moz = true }; + if (allowed_prefixes.contains(pre)) return .{ .@"box-lines" = pre }; + }, + .@"flex-pack" => { + const allowed_prefixes = VendorPrefix{ .none = true, .ms = true }; + if (allowed_prefixes.contains(pre)) return .{ .@"flex-pack" = pre }; + }, + .@"flex-order" => { + const allowed_prefixes = VendorPrefix{ .none = true, .ms = true }; + if (allowed_prefixes.contains(pre)) return .{ .@"flex-order" = pre }; + }, + .@"flex-align" => { + const allowed_prefixes = VendorPrefix{ .none = true, .ms = true }; + if (allowed_prefixes.contains(pre)) return .{ .@"flex-align" = pre }; + }, + .@"flex-item-align" => { + const allowed_prefixes = VendorPrefix{ .none = true, .ms = true }; + if (allowed_prefixes.contains(pre)) return .{ .@"flex-item-align" = pre }; + }, + .@"flex-line-pack" => { + const allowed_prefixes = VendorPrefix{ .none = true, .ms = true }; + if (allowed_prefixes.contains(pre)) return .{ .@"flex-line-pack" = pre }; + }, + .@"flex-positive" => { + const allowed_prefixes = VendorPrefix{ .none = true, .ms = true }; + if (allowed_prefixes.contains(pre)) return .{ .@"flex-positive" = pre }; + }, + .@"flex-negative" => { + const allowed_prefixes = VendorPrefix{ .none = true, .ms = true }; + if (allowed_prefixes.contains(pre)) return .{ .@"flex-negative" = pre }; + }, + .@"flex-preferred-size" => { + const allowed_prefixes = VendorPrefix{ .none = true, .ms = true }; + if (allowed_prefixes.contains(pre)) return .{ .@"flex-preferred-size" = pre }; + }, + .@"margin-top" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"margin-top"; + }, + .@"margin-bottom" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"margin-bottom"; + }, + .@"margin-left" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"margin-left"; + }, + .@"margin-right" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"margin-right"; + }, + .@"margin-block-start" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"margin-block-start"; + }, + .@"margin-block-end" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"margin-block-end"; + }, + .@"margin-inline-start" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"margin-inline-start"; + }, + .@"margin-inline-end" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"margin-inline-end"; + }, + .@"margin-block" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"margin-block"; + }, + .@"margin-inline" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"margin-inline"; + }, + .margin => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .margin; + }, + .@"padding-top" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"padding-top"; + }, + .@"padding-bottom" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"padding-bottom"; + }, + .@"padding-left" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"padding-left"; + }, + .@"padding-right" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"padding-right"; + }, + .@"padding-block-start" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"padding-block-start"; + }, + .@"padding-block-end" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"padding-block-end"; + }, + .@"padding-inline-start" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"padding-inline-start"; + }, + .@"padding-inline-end" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"padding-inline-end"; + }, + .@"padding-block" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"padding-block"; + }, + .@"padding-inline" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"padding-inline"; + }, + .padding => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .padding; + }, + .@"scroll-margin-top" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"scroll-margin-top"; + }, + .@"scroll-margin-bottom" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"scroll-margin-bottom"; + }, + .@"scroll-margin-left" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"scroll-margin-left"; + }, + .@"scroll-margin-right" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"scroll-margin-right"; + }, + .@"scroll-margin-block-start" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"scroll-margin-block-start"; + }, + .@"scroll-margin-block-end" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"scroll-margin-block-end"; + }, + .@"scroll-margin-inline-start" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"scroll-margin-inline-start"; + }, + .@"scroll-margin-inline-end" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"scroll-margin-inline-end"; + }, + .@"scroll-margin-block" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"scroll-margin-block"; + }, + .@"scroll-margin-inline" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"scroll-margin-inline"; + }, + .@"scroll-margin" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"scroll-margin"; + }, + .@"scroll-padding-top" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"scroll-padding-top"; + }, + .@"scroll-padding-bottom" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"scroll-padding-bottom"; + }, + .@"scroll-padding-left" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"scroll-padding-left"; + }, + .@"scroll-padding-right" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"scroll-padding-right"; + }, + .@"scroll-padding-block-start" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"scroll-padding-block-start"; + }, + .@"scroll-padding-block-end" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"scroll-padding-block-end"; + }, + .@"scroll-padding-inline-start" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"scroll-padding-inline-start"; + }, + .@"scroll-padding-inline-end" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"scroll-padding-inline-end"; + }, + .@"scroll-padding-block" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"scroll-padding-block"; + }, + .@"scroll-padding-inline" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"scroll-padding-inline"; + }, + .@"scroll-padding" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"scroll-padding"; + }, + .@"font-weight" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"font-weight"; + }, + .@"font-size" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"font-size"; + }, + .@"font-stretch" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"font-stretch"; + }, + .@"font-family" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"font-family"; + }, + .@"font-style" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"font-style"; + }, + .@"font-variant-caps" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"font-variant-caps"; + }, + .@"line-height" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"line-height"; + }, + .font => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .font; + }, + .@"text-decoration-color" => { + const allowed_prefixes = VendorPrefix{ .none = true, .webkit = true, .moz = true }; + if (allowed_prefixes.contains(pre)) return .{ .@"text-decoration-color" = pre }; + }, + .@"text-emphasis-color" => { + const allowed_prefixes = VendorPrefix{ .none = true, .webkit = true }; + if (allowed_prefixes.contains(pre)) return .{ .@"text-emphasis-color" = pre }; + }, + .@"text-shadow" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"text-shadow"; + }, + .direction => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .direction; + }, + .composes => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .composes; + }, + .@"mask-image" => { + const allowed_prefixes = VendorPrefix{ .none = true, .webkit = true }; + if (allowed_prefixes.contains(pre)) return .{ .@"mask-image" = pre }; + }, + .@"mask-mode" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"mask-mode"; + }, + .@"mask-repeat" => { + const allowed_prefixes = VendorPrefix{ .none = true, .webkit = true }; + if (allowed_prefixes.contains(pre)) return .{ .@"mask-repeat" = pre }; + }, + .@"mask-position-x" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"mask-position-x"; + }, + .@"mask-position-y" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"mask-position-y"; + }, + .@"mask-position" => { + const allowed_prefixes = VendorPrefix{ .none = true, .webkit = true }; + if (allowed_prefixes.contains(pre)) return .{ .@"mask-position" = pre }; + }, + .@"mask-clip" => { + const allowed_prefixes = VendorPrefix{ .none = true, .webkit = true }; + if (allowed_prefixes.contains(pre)) return .{ .@"mask-clip" = pre }; + }, + .@"mask-origin" => { + const allowed_prefixes = VendorPrefix{ .none = true, .webkit = true }; + if (allowed_prefixes.contains(pre)) return .{ .@"mask-origin" = pre }; + }, + .@"mask-size" => { + const allowed_prefixes = VendorPrefix{ .none = true, .webkit = true }; + if (allowed_prefixes.contains(pre)) return .{ .@"mask-size" = pre }; + }, + .@"mask-composite" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"mask-composite"; + }, + .@"mask-type" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"mask-type"; + }, + .mask => { + const allowed_prefixes = VendorPrefix{ .none = true, .webkit = true }; + if (allowed_prefixes.contains(pre)) return .{ .mask = pre }; + }, + .@"mask-border-source" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"mask-border-source"; + }, + .@"mask-border-mode" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"mask-border-mode"; + }, + .@"mask-border-slice" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"mask-border-slice"; + }, + .@"mask-border-width" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"mask-border-width"; + }, + .@"mask-border-outset" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"mask-border-outset"; + }, + .@"mask-border-repeat" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"mask-border-repeat"; + }, + .@"mask-border" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"mask-border"; + }, + .@"-webkit-mask-composite" => { + const allowed_prefixes = VendorPrefix{ .none = true }; + if (allowed_prefixes.contains(pre)) return .@"-webkit-mask-composite"; + }, + .@"mask-source-type" => { + const allowed_prefixes = VendorPrefix{ .none = true, .webkit = true }; + if (allowed_prefixes.contains(pre)) return .{ .@"mask-source-type" = pre }; + }, + .@"mask-box-image" => { + const allowed_prefixes = VendorPrefix{ .none = true, .webkit = true }; + if (allowed_prefixes.contains(pre)) return .{ .@"mask-box-image" = pre }; + }, + .@"mask-box-image-source" => { + const allowed_prefixes = VendorPrefix{ .none = true, .webkit = true }; + if (allowed_prefixes.contains(pre)) return .{ .@"mask-box-image-source" = pre }; + }, + .@"mask-box-image-slice" => { + const allowed_prefixes = VendorPrefix{ .none = true, .webkit = true }; + if (allowed_prefixes.contains(pre)) return .{ .@"mask-box-image-slice" = pre }; + }, + .@"mask-box-image-width" => { + const allowed_prefixes = VendorPrefix{ .none = true, .webkit = true }; + if (allowed_prefixes.contains(pre)) return .{ .@"mask-box-image-width" = pre }; + }, + .@"mask-box-image-outset" => { + const allowed_prefixes = VendorPrefix{ .none = true, .webkit = true }; + if (allowed_prefixes.contains(pre)) return .{ .@"mask-box-image-outset" = pre }; + }, + .@"mask-box-image-repeat" => { + const allowed_prefixes = VendorPrefix{ .none = true, .webkit = true }; + if (allowed_prefixes.contains(pre)) return .{ .@"mask-box-image-repeat" = pre }; + }, + } + } + + return null; + } + + pub fn withPrefix(this: *const PropertyId, pre: VendorPrefix) PropertyId { + return switch (this.*) { + .@"background-color" => .@"background-color", + .@"background-image" => .@"background-image", + .@"background-position-x" => .@"background-position-x", + .@"background-position-y" => .@"background-position-y", + .@"background-position" => .@"background-position", + .@"background-size" => .@"background-size", + .@"background-repeat" => .@"background-repeat", + .@"background-attachment" => .@"background-attachment", + .@"background-clip" => .{ .@"background-clip" = pre }, + .@"background-origin" => .@"background-origin", + .background => .background, + .@"box-shadow" => .{ .@"box-shadow" = pre }, + .opacity => .opacity, + .color => .color, + .display => .display, + .visibility => .visibility, + .width => .width, + .height => .height, + .@"min-width" => .@"min-width", + .@"min-height" => .@"min-height", + .@"max-width" => .@"max-width", + .@"max-height" => .@"max-height", + .@"block-size" => .@"block-size", + .@"inline-size" => .@"inline-size", + .@"min-block-size" => .@"min-block-size", + .@"min-inline-size" => .@"min-inline-size", + .@"max-block-size" => .@"max-block-size", + .@"max-inline-size" => .@"max-inline-size", + .@"box-sizing" => .{ .@"box-sizing" = pre }, + .@"aspect-ratio" => .@"aspect-ratio", + .overflow => .overflow, + .@"overflow-x" => .@"overflow-x", + .@"overflow-y" => .@"overflow-y", + .@"text-overflow" => .{ .@"text-overflow" = pre }, + .position => .position, + .top => .top, + .bottom => .bottom, + .left => .left, + .right => .right, + .@"inset-block-start" => .@"inset-block-start", + .@"inset-block-end" => .@"inset-block-end", + .@"inset-inline-start" => .@"inset-inline-start", + .@"inset-inline-end" => .@"inset-inline-end", + .@"inset-block" => .@"inset-block", + .@"inset-inline" => .@"inset-inline", + .inset => .inset, + .@"border-spacing" => .@"border-spacing", + .@"border-top-color" => .@"border-top-color", + .@"border-bottom-color" => .@"border-bottom-color", + .@"border-left-color" => .@"border-left-color", + .@"border-right-color" => .@"border-right-color", + .@"border-block-start-color" => .@"border-block-start-color", + .@"border-block-end-color" => .@"border-block-end-color", + .@"border-inline-start-color" => .@"border-inline-start-color", + .@"border-inline-end-color" => .@"border-inline-end-color", + .@"border-top-style" => .@"border-top-style", + .@"border-bottom-style" => .@"border-bottom-style", + .@"border-left-style" => .@"border-left-style", + .@"border-right-style" => .@"border-right-style", + .@"border-block-start-style" => .@"border-block-start-style", + .@"border-block-end-style" => .@"border-block-end-style", + .@"border-inline-start-style" => .@"border-inline-start-style", + .@"border-inline-end-style" => .@"border-inline-end-style", + .@"border-top-width" => .@"border-top-width", + .@"border-bottom-width" => .@"border-bottom-width", + .@"border-left-width" => .@"border-left-width", + .@"border-right-width" => .@"border-right-width", + .@"border-block-start-width" => .@"border-block-start-width", + .@"border-block-end-width" => .@"border-block-end-width", + .@"border-inline-start-width" => .@"border-inline-start-width", + .@"border-inline-end-width" => .@"border-inline-end-width", + .@"border-top-left-radius" => .{ .@"border-top-left-radius" = pre }, + .@"border-top-right-radius" => .{ .@"border-top-right-radius" = pre }, + .@"border-bottom-left-radius" => .{ .@"border-bottom-left-radius" = pre }, + .@"border-bottom-right-radius" => .{ .@"border-bottom-right-radius" = pre }, + .@"border-start-start-radius" => .@"border-start-start-radius", + .@"border-start-end-radius" => .@"border-start-end-radius", + .@"border-end-start-radius" => .@"border-end-start-radius", + .@"border-end-end-radius" => .@"border-end-end-radius", + .@"border-radius" => .{ .@"border-radius" = pre }, + .@"border-image-source" => .@"border-image-source", + .@"border-image-outset" => .@"border-image-outset", + .@"border-image-repeat" => .@"border-image-repeat", + .@"border-image-width" => .@"border-image-width", + .@"border-image-slice" => .@"border-image-slice", + .@"border-image" => .{ .@"border-image" = pre }, + .@"border-color" => .@"border-color", + .@"border-style" => .@"border-style", + .@"border-width" => .@"border-width", + .@"border-block-color" => .@"border-block-color", + .@"border-block-style" => .@"border-block-style", + .@"border-block-width" => .@"border-block-width", + .@"border-inline-color" => .@"border-inline-color", + .@"border-inline-style" => .@"border-inline-style", + .@"border-inline-width" => .@"border-inline-width", + .border => .border, + .@"border-top" => .@"border-top", + .@"border-bottom" => .@"border-bottom", + .@"border-left" => .@"border-left", + .@"border-right" => .@"border-right", + .@"border-block" => .@"border-block", + .@"border-block-start" => .@"border-block-start", + .@"border-block-end" => .@"border-block-end", + .@"border-inline" => .@"border-inline", + .@"border-inline-start" => .@"border-inline-start", + .@"border-inline-end" => .@"border-inline-end", + .outline => .outline, + .@"outline-color" => .@"outline-color", + .@"outline-style" => .@"outline-style", + .@"outline-width" => .@"outline-width", + .@"flex-direction" => .{ .@"flex-direction" = pre }, + .@"flex-wrap" => .{ .@"flex-wrap" = pre }, + .@"flex-flow" => .{ .@"flex-flow" = pre }, + .@"flex-grow" => .{ .@"flex-grow" = pre }, + .@"flex-shrink" => .{ .@"flex-shrink" = pre }, + .@"flex-basis" => .{ .@"flex-basis" = pre }, + .flex => .{ .flex = pre }, + .order => .{ .order = pre }, + .@"align-content" => .{ .@"align-content" = pre }, + .@"justify-content" => .{ .@"justify-content" = pre }, + .@"place-content" => .@"place-content", + .@"align-self" => .{ .@"align-self" = pre }, + .@"justify-self" => .@"justify-self", + .@"place-self" => .@"place-self", + .@"align-items" => .{ .@"align-items" = pre }, + .@"justify-items" => .@"justify-items", + .@"place-items" => .@"place-items", + .@"row-gap" => .@"row-gap", + .@"column-gap" => .@"column-gap", + .gap => .gap, + .@"box-orient" => .{ .@"box-orient" = pre }, + .@"box-direction" => .{ .@"box-direction" = pre }, + .@"box-ordinal-group" => .{ .@"box-ordinal-group" = pre }, + .@"box-align" => .{ .@"box-align" = pre }, + .@"box-flex" => .{ .@"box-flex" = pre }, + .@"box-flex-group" => .{ .@"box-flex-group" = pre }, + .@"box-pack" => .{ .@"box-pack" = pre }, + .@"box-lines" => .{ .@"box-lines" = pre }, + .@"flex-pack" => .{ .@"flex-pack" = pre }, + .@"flex-order" => .{ .@"flex-order" = pre }, + .@"flex-align" => .{ .@"flex-align" = pre }, + .@"flex-item-align" => .{ .@"flex-item-align" = pre }, + .@"flex-line-pack" => .{ .@"flex-line-pack" = pre }, + .@"flex-positive" => .{ .@"flex-positive" = pre }, + .@"flex-negative" => .{ .@"flex-negative" = pre }, + .@"flex-preferred-size" => .{ .@"flex-preferred-size" = pre }, + .@"margin-top" => .@"margin-top", + .@"margin-bottom" => .@"margin-bottom", + .@"margin-left" => .@"margin-left", + .@"margin-right" => .@"margin-right", + .@"margin-block-start" => .@"margin-block-start", + .@"margin-block-end" => .@"margin-block-end", + .@"margin-inline-start" => .@"margin-inline-start", + .@"margin-inline-end" => .@"margin-inline-end", + .@"margin-block" => .@"margin-block", + .@"margin-inline" => .@"margin-inline", + .margin => .margin, + .@"padding-top" => .@"padding-top", + .@"padding-bottom" => .@"padding-bottom", + .@"padding-left" => .@"padding-left", + .@"padding-right" => .@"padding-right", + .@"padding-block-start" => .@"padding-block-start", + .@"padding-block-end" => .@"padding-block-end", + .@"padding-inline-start" => .@"padding-inline-start", + .@"padding-inline-end" => .@"padding-inline-end", + .@"padding-block" => .@"padding-block", + .@"padding-inline" => .@"padding-inline", + .padding => .padding, + .@"scroll-margin-top" => .@"scroll-margin-top", + .@"scroll-margin-bottom" => .@"scroll-margin-bottom", + .@"scroll-margin-left" => .@"scroll-margin-left", + .@"scroll-margin-right" => .@"scroll-margin-right", + .@"scroll-margin-block-start" => .@"scroll-margin-block-start", + .@"scroll-margin-block-end" => .@"scroll-margin-block-end", + .@"scroll-margin-inline-start" => .@"scroll-margin-inline-start", + .@"scroll-margin-inline-end" => .@"scroll-margin-inline-end", + .@"scroll-margin-block" => .@"scroll-margin-block", + .@"scroll-margin-inline" => .@"scroll-margin-inline", + .@"scroll-margin" => .@"scroll-margin", + .@"scroll-padding-top" => .@"scroll-padding-top", + .@"scroll-padding-bottom" => .@"scroll-padding-bottom", + .@"scroll-padding-left" => .@"scroll-padding-left", + .@"scroll-padding-right" => .@"scroll-padding-right", + .@"scroll-padding-block-start" => .@"scroll-padding-block-start", + .@"scroll-padding-block-end" => .@"scroll-padding-block-end", + .@"scroll-padding-inline-start" => .@"scroll-padding-inline-start", + .@"scroll-padding-inline-end" => .@"scroll-padding-inline-end", + .@"scroll-padding-block" => .@"scroll-padding-block", + .@"scroll-padding-inline" => .@"scroll-padding-inline", + .@"scroll-padding" => .@"scroll-padding", + .@"font-weight" => .@"font-weight", + .@"font-size" => .@"font-size", + .@"font-stretch" => .@"font-stretch", + .@"font-family" => .@"font-family", + .@"font-style" => .@"font-style", + .@"font-variant-caps" => .@"font-variant-caps", + .@"line-height" => .@"line-height", + .font => .font, + .@"text-decoration-color" => .{ .@"text-decoration-color" = pre }, + .@"text-emphasis-color" => .{ .@"text-emphasis-color" = pre }, + .@"text-shadow" => .@"text-shadow", + .direction => .direction, + .composes => .composes, + .@"mask-image" => .{ .@"mask-image" = pre }, + .@"mask-mode" => .@"mask-mode", + .@"mask-repeat" => .{ .@"mask-repeat" = pre }, + .@"mask-position-x" => .@"mask-position-x", + .@"mask-position-y" => .@"mask-position-y", + .@"mask-position" => .{ .@"mask-position" = pre }, + .@"mask-clip" => .{ .@"mask-clip" = pre }, + .@"mask-origin" => .{ .@"mask-origin" = pre }, + .@"mask-size" => .{ .@"mask-size" = pre }, + .@"mask-composite" => .@"mask-composite", + .@"mask-type" => .@"mask-type", + .mask => .{ .mask = pre }, + .@"mask-border-source" => .@"mask-border-source", + .@"mask-border-mode" => .@"mask-border-mode", + .@"mask-border-slice" => .@"mask-border-slice", + .@"mask-border-width" => .@"mask-border-width", + .@"mask-border-outset" => .@"mask-border-outset", + .@"mask-border-repeat" => .@"mask-border-repeat", + .@"mask-border" => .@"mask-border", + .@"-webkit-mask-composite" => .@"-webkit-mask-composite", + .@"mask-source-type" => .{ .@"mask-source-type" = pre }, + .@"mask-box-image" => .{ .@"mask-box-image" = pre }, + .@"mask-box-image-source" => .{ .@"mask-box-image-source" = pre }, + .@"mask-box-image-slice" => .{ .@"mask-box-image-slice" = pre }, + .@"mask-box-image-width" => .{ .@"mask-box-image-width" = pre }, + .@"mask-box-image-outset" => .{ .@"mask-box-image-outset" = pre }, + .@"mask-box-image-repeat" => .{ .@"mask-box-image-repeat" = pre }, + else => this.*, + }; + } + + pub fn addPrefix(this: *PropertyId, pre: VendorPrefix) void { + return switch (this.*) { + .@"background-color" => {}, + .@"background-image" => {}, + .@"background-position-x" => {}, + .@"background-position-y" => {}, + .@"background-position" => {}, + .@"background-size" => {}, + .@"background-repeat" => {}, + .@"background-attachment" => {}, + .@"background-clip" => |*p| { + p.insert(pre); + }, + .@"background-origin" => {}, + .background => {}, + .@"box-shadow" => |*p| { + p.insert(pre); + }, + .opacity => {}, + .color => {}, + .display => {}, + .visibility => {}, + .width => {}, + .height => {}, + .@"min-width" => {}, + .@"min-height" => {}, + .@"max-width" => {}, + .@"max-height" => {}, + .@"block-size" => {}, + .@"inline-size" => {}, + .@"min-block-size" => {}, + .@"min-inline-size" => {}, + .@"max-block-size" => {}, + .@"max-inline-size" => {}, + .@"box-sizing" => |*p| { + p.insert(pre); + }, + .@"aspect-ratio" => {}, + .overflow => {}, + .@"overflow-x" => {}, + .@"overflow-y" => {}, + .@"text-overflow" => |*p| { + p.insert(pre); + }, + .position => {}, + .top => {}, + .bottom => {}, + .left => {}, + .right => {}, + .@"inset-block-start" => {}, + .@"inset-block-end" => {}, + .@"inset-inline-start" => {}, + .@"inset-inline-end" => {}, + .@"inset-block" => {}, + .@"inset-inline" => {}, + .inset => {}, + .@"border-spacing" => {}, + .@"border-top-color" => {}, + .@"border-bottom-color" => {}, + .@"border-left-color" => {}, + .@"border-right-color" => {}, + .@"border-block-start-color" => {}, + .@"border-block-end-color" => {}, + .@"border-inline-start-color" => {}, + .@"border-inline-end-color" => {}, + .@"border-top-style" => {}, + .@"border-bottom-style" => {}, + .@"border-left-style" => {}, + .@"border-right-style" => {}, + .@"border-block-start-style" => {}, + .@"border-block-end-style" => {}, + .@"border-inline-start-style" => {}, + .@"border-inline-end-style" => {}, + .@"border-top-width" => {}, + .@"border-bottom-width" => {}, + .@"border-left-width" => {}, + .@"border-right-width" => {}, + .@"border-block-start-width" => {}, + .@"border-block-end-width" => {}, + .@"border-inline-start-width" => {}, + .@"border-inline-end-width" => {}, + .@"border-top-left-radius" => |*p| { + p.insert(pre); + }, + .@"border-top-right-radius" => |*p| { + p.insert(pre); + }, + .@"border-bottom-left-radius" => |*p| { + p.insert(pre); + }, + .@"border-bottom-right-radius" => |*p| { + p.insert(pre); + }, + .@"border-start-start-radius" => {}, + .@"border-start-end-radius" => {}, + .@"border-end-start-radius" => {}, + .@"border-end-end-radius" => {}, + .@"border-radius" => |*p| { + p.insert(pre); + }, + .@"border-image-source" => {}, + .@"border-image-outset" => {}, + .@"border-image-repeat" => {}, + .@"border-image-width" => {}, + .@"border-image-slice" => {}, + .@"border-image" => |*p| { + p.insert(pre); + }, + .@"border-color" => {}, + .@"border-style" => {}, + .@"border-width" => {}, + .@"border-block-color" => {}, + .@"border-block-style" => {}, + .@"border-block-width" => {}, + .@"border-inline-color" => {}, + .@"border-inline-style" => {}, + .@"border-inline-width" => {}, + .border => {}, + .@"border-top" => {}, + .@"border-bottom" => {}, + .@"border-left" => {}, + .@"border-right" => {}, + .@"border-block" => {}, + .@"border-block-start" => {}, + .@"border-block-end" => {}, + .@"border-inline" => {}, + .@"border-inline-start" => {}, + .@"border-inline-end" => {}, + .outline => {}, + .@"outline-color" => {}, + .@"outline-style" => {}, + .@"outline-width" => {}, + .@"flex-direction" => |*p| { + p.insert(pre); + }, + .@"flex-wrap" => |*p| { + p.insert(pre); + }, + .@"flex-flow" => |*p| { + p.insert(pre); + }, + .@"flex-grow" => |*p| { + p.insert(pre); + }, + .@"flex-shrink" => |*p| { + p.insert(pre); + }, + .@"flex-basis" => |*p| { + p.insert(pre); + }, + .flex => |*p| { + p.insert(pre); + }, + .order => |*p| { + p.insert(pre); + }, + .@"align-content" => |*p| { + p.insert(pre); + }, + .@"justify-content" => |*p| { + p.insert(pre); + }, + .@"place-content" => {}, + .@"align-self" => |*p| { + p.insert(pre); + }, + .@"justify-self" => {}, + .@"place-self" => {}, + .@"align-items" => |*p| { + p.insert(pre); + }, + .@"justify-items" => {}, + .@"place-items" => {}, + .@"row-gap" => {}, + .@"column-gap" => {}, + .gap => {}, + .@"box-orient" => |*p| { + p.insert(pre); + }, + .@"box-direction" => |*p| { + p.insert(pre); + }, + .@"box-ordinal-group" => |*p| { + p.insert(pre); + }, + .@"box-align" => |*p| { + p.insert(pre); + }, + .@"box-flex" => |*p| { + p.insert(pre); + }, + .@"box-flex-group" => |*p| { + p.insert(pre); + }, + .@"box-pack" => |*p| { + p.insert(pre); + }, + .@"box-lines" => |*p| { + p.insert(pre); + }, + .@"flex-pack" => |*p| { + p.insert(pre); + }, + .@"flex-order" => |*p| { + p.insert(pre); + }, + .@"flex-align" => |*p| { + p.insert(pre); + }, + .@"flex-item-align" => |*p| { + p.insert(pre); + }, + .@"flex-line-pack" => |*p| { + p.insert(pre); + }, + .@"flex-positive" => |*p| { + p.insert(pre); + }, + .@"flex-negative" => |*p| { + p.insert(pre); + }, + .@"flex-preferred-size" => |*p| { + p.insert(pre); + }, + .@"margin-top" => {}, + .@"margin-bottom" => {}, + .@"margin-left" => {}, + .@"margin-right" => {}, + .@"margin-block-start" => {}, + .@"margin-block-end" => {}, + .@"margin-inline-start" => {}, + .@"margin-inline-end" => {}, + .@"margin-block" => {}, + .@"margin-inline" => {}, + .margin => {}, + .@"padding-top" => {}, + .@"padding-bottom" => {}, + .@"padding-left" => {}, + .@"padding-right" => {}, + .@"padding-block-start" => {}, + .@"padding-block-end" => {}, + .@"padding-inline-start" => {}, + .@"padding-inline-end" => {}, + .@"padding-block" => {}, + .@"padding-inline" => {}, + .padding => {}, + .@"scroll-margin-top" => {}, + .@"scroll-margin-bottom" => {}, + .@"scroll-margin-left" => {}, + .@"scroll-margin-right" => {}, + .@"scroll-margin-block-start" => {}, + .@"scroll-margin-block-end" => {}, + .@"scroll-margin-inline-start" => {}, + .@"scroll-margin-inline-end" => {}, + .@"scroll-margin-block" => {}, + .@"scroll-margin-inline" => {}, + .@"scroll-margin" => {}, + .@"scroll-padding-top" => {}, + .@"scroll-padding-bottom" => {}, + .@"scroll-padding-left" => {}, + .@"scroll-padding-right" => {}, + .@"scroll-padding-block-start" => {}, + .@"scroll-padding-block-end" => {}, + .@"scroll-padding-inline-start" => {}, + .@"scroll-padding-inline-end" => {}, + .@"scroll-padding-block" => {}, + .@"scroll-padding-inline" => {}, + .@"scroll-padding" => {}, + .@"font-weight" => {}, + .@"font-size" => {}, + .@"font-stretch" => {}, + .@"font-family" => {}, + .@"font-style" => {}, + .@"font-variant-caps" => {}, + .@"line-height" => {}, + .font => {}, + .@"text-decoration-color" => |*p| { + p.insert(pre); + }, + .@"text-emphasis-color" => |*p| { + p.insert(pre); + }, + .@"text-shadow" => {}, + .direction => {}, + .composes => {}, + .@"mask-image" => |*p| { + p.insert(pre); + }, + .@"mask-mode" => {}, + .@"mask-repeat" => |*p| { + p.insert(pre); + }, + .@"mask-position-x" => {}, + .@"mask-position-y" => {}, + .@"mask-position" => |*p| { + p.insert(pre); + }, + .@"mask-clip" => |*p| { + p.insert(pre); + }, + .@"mask-origin" => |*p| { + p.insert(pre); + }, + .@"mask-size" => |*p| { + p.insert(pre); + }, + .@"mask-composite" => {}, + .@"mask-type" => {}, + .mask => |*p| { + p.insert(pre); + }, + .@"mask-border-source" => {}, + .@"mask-border-mode" => {}, + .@"mask-border-slice" => {}, + .@"mask-border-width" => {}, + .@"mask-border-outset" => {}, + .@"mask-border-repeat" => {}, + .@"mask-border" => {}, + .@"-webkit-mask-composite" => {}, + .@"mask-source-type" => |*p| { + p.insert(pre); + }, + .@"mask-box-image" => |*p| { + p.insert(pre); + }, + .@"mask-box-image-source" => |*p| { + p.insert(pre); + }, + .@"mask-box-image-slice" => |*p| { + p.insert(pre); + }, + .@"mask-box-image-width" => |*p| { + p.insert(pre); + }, + .@"mask-box-image-outset" => |*p| { + p.insert(pre); + }, + .@"mask-box-image-repeat" => |*p| { + p.insert(pre); + }, + else => {}, + }; + } + + pub inline fn deepClone(this: *const PropertyId, _: std.mem.Allocator) PropertyId { + return this.*; + } + + pub fn eql(lhs: *const PropertyId, rhs: *const PropertyId) bool { + if (@intFromEnum(lhs.*) != @intFromEnum(rhs.*)) return false; + inline for (bun.meta.EnumFields(PropertyId), std.meta.fields(PropertyId)) |enum_field, union_field| { + if (enum_field.value == @intFromEnum(lhs.*)) { + if (comptime union_field.type == css.VendorPrefix) { + return @field(lhs, union_field.name).eql(@field(rhs, union_field.name)); + } else { + return true; + } + } + } + unreachable; + } + + pub fn hash(this: *const PropertyId, hasher: *std.hash.Wyhash) void { + const tag = @intFromEnum(this.*); + hasher.update(std.mem.asBytes(&tag)); + } +}; +pub const PropertyIdTag = enum(u16) { + @"background-color", + @"background-image", + @"background-position-x", + @"background-position-y", + @"background-position", + @"background-size", + @"background-repeat", + @"background-attachment", + @"background-clip", + @"background-origin", + background, + @"box-shadow", + opacity, + color, + display, + visibility, + width, + height, + @"min-width", + @"min-height", + @"max-width", + @"max-height", + @"block-size", + @"inline-size", + @"min-block-size", + @"min-inline-size", + @"max-block-size", + @"max-inline-size", + @"box-sizing", + @"aspect-ratio", + overflow, + @"overflow-x", + @"overflow-y", + @"text-overflow", + position, + top, + bottom, + left, + right, + @"inset-block-start", + @"inset-block-end", + @"inset-inline-start", + @"inset-inline-end", + @"inset-block", + @"inset-inline", + inset, + @"border-spacing", + @"border-top-color", + @"border-bottom-color", + @"border-left-color", + @"border-right-color", + @"border-block-start-color", + @"border-block-end-color", + @"border-inline-start-color", + @"border-inline-end-color", + @"border-top-style", + @"border-bottom-style", + @"border-left-style", + @"border-right-style", + @"border-block-start-style", + @"border-block-end-style", + @"border-inline-start-style", + @"border-inline-end-style", + @"border-top-width", + @"border-bottom-width", + @"border-left-width", + @"border-right-width", + @"border-block-start-width", + @"border-block-end-width", + @"border-inline-start-width", + @"border-inline-end-width", + @"border-top-left-radius", + @"border-top-right-radius", + @"border-bottom-left-radius", + @"border-bottom-right-radius", + @"border-start-start-radius", + @"border-start-end-radius", + @"border-end-start-radius", + @"border-end-end-radius", + @"border-radius", + @"border-image-source", + @"border-image-outset", + @"border-image-repeat", + @"border-image-width", + @"border-image-slice", + @"border-image", + @"border-color", + @"border-style", + @"border-width", + @"border-block-color", + @"border-block-style", + @"border-block-width", + @"border-inline-color", + @"border-inline-style", + @"border-inline-width", + border, + @"border-top", + @"border-bottom", + @"border-left", + @"border-right", + @"border-block", + @"border-block-start", + @"border-block-end", + @"border-inline", + @"border-inline-start", + @"border-inline-end", + outline, + @"outline-color", + @"outline-style", + @"outline-width", + @"flex-direction", + @"flex-wrap", + @"flex-flow", + @"flex-grow", + @"flex-shrink", + @"flex-basis", + flex, + order, + @"align-content", + @"justify-content", + @"place-content", + @"align-self", + @"justify-self", + @"place-self", + @"align-items", + @"justify-items", + @"place-items", + @"row-gap", + @"column-gap", + gap, + @"box-orient", + @"box-direction", + @"box-ordinal-group", + @"box-align", + @"box-flex", + @"box-flex-group", + @"box-pack", + @"box-lines", + @"flex-pack", + @"flex-order", + @"flex-align", + @"flex-item-align", + @"flex-line-pack", + @"flex-positive", + @"flex-negative", + @"flex-preferred-size", + @"margin-top", + @"margin-bottom", + @"margin-left", + @"margin-right", + @"margin-block-start", + @"margin-block-end", + @"margin-inline-start", + @"margin-inline-end", + @"margin-block", + @"margin-inline", + margin, + @"padding-top", + @"padding-bottom", + @"padding-left", + @"padding-right", + @"padding-block-start", + @"padding-block-end", + @"padding-inline-start", + @"padding-inline-end", + @"padding-block", + @"padding-inline", + padding, + @"scroll-margin-top", + @"scroll-margin-bottom", + @"scroll-margin-left", + @"scroll-margin-right", + @"scroll-margin-block-start", + @"scroll-margin-block-end", + @"scroll-margin-inline-start", + @"scroll-margin-inline-end", + @"scroll-margin-block", + @"scroll-margin-inline", + @"scroll-margin", + @"scroll-padding-top", + @"scroll-padding-bottom", + @"scroll-padding-left", + @"scroll-padding-right", + @"scroll-padding-block-start", + @"scroll-padding-block-end", + @"scroll-padding-inline-start", + @"scroll-padding-inline-end", + @"scroll-padding-block", + @"scroll-padding-inline", + @"scroll-padding", + @"font-weight", + @"font-size", + @"font-stretch", + @"font-family", + @"font-style", + @"font-variant-caps", + @"line-height", + font, + @"text-decoration-color", + @"text-emphasis-color", + @"text-shadow", + direction, + composes, + @"mask-image", + @"mask-mode", + @"mask-repeat", + @"mask-position-x", + @"mask-position-y", + @"mask-position", + @"mask-clip", + @"mask-origin", + @"mask-size", + @"mask-composite", + @"mask-type", + mask, + @"mask-border-source", + @"mask-border-mode", + @"mask-border-slice", + @"mask-border-width", + @"mask-border-outset", + @"mask-border-repeat", + @"mask-border", + @"-webkit-mask-composite", + @"mask-source-type", + @"mask-box-image", + @"mask-box-image-source", + @"mask-box-image-slice", + @"mask-box-image-width", + @"mask-box-image-outset", + @"mask-box-image-repeat", + all, + unparsed, + custom, + + /// Helper function used in comptime code to know whether to access the underlying value + /// with tuple indexing syntax because it may have a VendorPrefix associated with it. + pub fn hasVendorPrefix(this: PropertyIdTag) bool { + return switch (this) { + .@"background-color" => false, + .@"background-image" => false, + .@"background-position-x" => false, + .@"background-position-y" => false, + .@"background-position" => false, + .@"background-size" => false, + .@"background-repeat" => false, + .@"background-attachment" => false, + .@"background-clip" => true, + .@"background-origin" => false, + .background => false, + .@"box-shadow" => true, + .opacity => false, + .color => false, + .display => false, + .visibility => false, + .width => false, + .height => false, + .@"min-width" => false, + .@"min-height" => false, + .@"max-width" => false, + .@"max-height" => false, + .@"block-size" => false, + .@"inline-size" => false, + .@"min-block-size" => false, + .@"min-inline-size" => false, + .@"max-block-size" => false, + .@"max-inline-size" => false, + .@"box-sizing" => true, + .@"aspect-ratio" => false, + .overflow => false, + .@"overflow-x" => false, + .@"overflow-y" => false, + .@"text-overflow" => true, + .position => false, + .top => false, + .bottom => false, + .left => false, + .right => false, + .@"inset-block-start" => false, + .@"inset-block-end" => false, + .@"inset-inline-start" => false, + .@"inset-inline-end" => false, + .@"inset-block" => false, + .@"inset-inline" => false, + .inset => false, + .@"border-spacing" => false, + .@"border-top-color" => false, + .@"border-bottom-color" => false, + .@"border-left-color" => false, + .@"border-right-color" => false, + .@"border-block-start-color" => false, + .@"border-block-end-color" => false, + .@"border-inline-start-color" => false, + .@"border-inline-end-color" => false, + .@"border-top-style" => false, + .@"border-bottom-style" => false, + .@"border-left-style" => false, + .@"border-right-style" => false, + .@"border-block-start-style" => false, + .@"border-block-end-style" => false, + .@"border-inline-start-style" => false, + .@"border-inline-end-style" => false, + .@"border-top-width" => false, + .@"border-bottom-width" => false, + .@"border-left-width" => false, + .@"border-right-width" => false, + .@"border-block-start-width" => false, + .@"border-block-end-width" => false, + .@"border-inline-start-width" => false, + .@"border-inline-end-width" => false, + .@"border-top-left-radius" => true, + .@"border-top-right-radius" => true, + .@"border-bottom-left-radius" => true, + .@"border-bottom-right-radius" => true, + .@"border-start-start-radius" => false, + .@"border-start-end-radius" => false, + .@"border-end-start-radius" => false, + .@"border-end-end-radius" => false, + .@"border-radius" => true, + .@"border-image-source" => false, + .@"border-image-outset" => false, + .@"border-image-repeat" => false, + .@"border-image-width" => false, + .@"border-image-slice" => false, + .@"border-image" => true, + .@"border-color" => false, + .@"border-style" => false, + .@"border-width" => false, + .@"border-block-color" => false, + .@"border-block-style" => false, + .@"border-block-width" => false, + .@"border-inline-color" => false, + .@"border-inline-style" => false, + .@"border-inline-width" => false, + .border => false, + .@"border-top" => false, + .@"border-bottom" => false, + .@"border-left" => false, + .@"border-right" => false, + .@"border-block" => false, + .@"border-block-start" => false, + .@"border-block-end" => false, + .@"border-inline" => false, + .@"border-inline-start" => false, + .@"border-inline-end" => false, + .outline => false, + .@"outline-color" => false, + .@"outline-style" => false, + .@"outline-width" => false, + .@"flex-direction" => true, + .@"flex-wrap" => true, + .@"flex-flow" => true, + .@"flex-grow" => true, + .@"flex-shrink" => true, + .@"flex-basis" => true, + .flex => true, + .order => true, + .@"align-content" => true, + .@"justify-content" => true, + .@"place-content" => false, + .@"align-self" => true, + .@"justify-self" => false, + .@"place-self" => false, + .@"align-items" => true, + .@"justify-items" => false, + .@"place-items" => false, + .@"row-gap" => false, + .@"column-gap" => false, + .gap => false, + .@"box-orient" => true, + .@"box-direction" => true, + .@"box-ordinal-group" => true, + .@"box-align" => true, + .@"box-flex" => true, + .@"box-flex-group" => true, + .@"box-pack" => true, + .@"box-lines" => true, + .@"flex-pack" => true, + .@"flex-order" => true, + .@"flex-align" => true, + .@"flex-item-align" => true, + .@"flex-line-pack" => true, + .@"flex-positive" => true, + .@"flex-negative" => true, + .@"flex-preferred-size" => true, + .@"margin-top" => false, + .@"margin-bottom" => false, + .@"margin-left" => false, + .@"margin-right" => false, + .@"margin-block-start" => false, + .@"margin-block-end" => false, + .@"margin-inline-start" => false, + .@"margin-inline-end" => false, + .@"margin-block" => false, + .@"margin-inline" => false, + .margin => false, + .@"padding-top" => false, + .@"padding-bottom" => false, + .@"padding-left" => false, + .@"padding-right" => false, + .@"padding-block-start" => false, + .@"padding-block-end" => false, + .@"padding-inline-start" => false, + .@"padding-inline-end" => false, + .@"padding-block" => false, + .@"padding-inline" => false, + .padding => false, + .@"scroll-margin-top" => false, + .@"scroll-margin-bottom" => false, + .@"scroll-margin-left" => false, + .@"scroll-margin-right" => false, + .@"scroll-margin-block-start" => false, + .@"scroll-margin-block-end" => false, + .@"scroll-margin-inline-start" => false, + .@"scroll-margin-inline-end" => false, + .@"scroll-margin-block" => false, + .@"scroll-margin-inline" => false, + .@"scroll-margin" => false, + .@"scroll-padding-top" => false, + .@"scroll-padding-bottom" => false, + .@"scroll-padding-left" => false, + .@"scroll-padding-right" => false, + .@"scroll-padding-block-start" => false, + .@"scroll-padding-block-end" => false, + .@"scroll-padding-inline-start" => false, + .@"scroll-padding-inline-end" => false, + .@"scroll-padding-block" => false, + .@"scroll-padding-inline" => false, + .@"scroll-padding" => false, + .@"font-weight" => false, + .@"font-size" => false, + .@"font-stretch" => false, + .@"font-family" => false, + .@"font-style" => false, + .@"font-variant-caps" => false, + .@"line-height" => false, + .font => false, + .@"text-decoration-color" => true, + .@"text-emphasis-color" => true, + .@"text-shadow" => false, + .direction => false, + .composes => false, + .@"mask-image" => true, + .@"mask-mode" => false, + .@"mask-repeat" => true, + .@"mask-position-x" => false, + .@"mask-position-y" => false, + .@"mask-position" => true, + .@"mask-clip" => true, + .@"mask-origin" => true, + .@"mask-size" => true, + .@"mask-composite" => false, + .@"mask-type" => false, + .mask => true, + .@"mask-border-source" => false, + .@"mask-border-mode" => false, + .@"mask-border-slice" => false, + .@"mask-border-width" => false, + .@"mask-border-outset" => false, + .@"mask-border-repeat" => false, + .@"mask-border" => false, + .@"-webkit-mask-composite" => false, + .@"mask-source-type" => true, + .@"mask-box-image" => true, + .@"mask-box-image-source" => true, + .@"mask-box-image-slice" => true, + .@"mask-box-image-width" => true, + .@"mask-box-image-outset" => true, + .@"mask-box-image-repeat" => true, + .unparsed => false, + .custom => false, + .all => false, + }; + } + + /// Helper function used in comptime code to know whether to access the underlying value + /// with tuple indexing syntax because it may have a VendorPrefix associated with it. + pub fn valueType(this: PropertyIdTag) type { + return switch (this) { + .@"background-color" => CssColor, + .@"background-image" => SmallList(Image, 1), + .@"background-position-x" => SmallList(css_values.position.HorizontalPosition, 1), + .@"background-position-y" => SmallList(css_values.position.VerticalPosition, 1), + .@"background-position" => SmallList(background.BackgroundPosition, 1), + .@"background-size" => SmallList(background.BackgroundSize, 1), + .@"background-repeat" => SmallList(background.BackgroundRepeat, 1), + .@"background-attachment" => SmallList(background.BackgroundAttachment, 1), + .@"background-clip" => SmallList(background.BackgroundClip, 1), + .@"background-origin" => SmallList(background.BackgroundOrigin, 1), + .background => SmallList(background.Background, 1), + .@"box-shadow" => SmallList(box_shadow.BoxShadow, 1), + .opacity => css.css_values.alpha.AlphaValue, + .color => CssColor, + .display => display.Display, + .visibility => display.Visibility, + .width => size.Size, + .height => size.Size, + .@"min-width" => size.Size, + .@"min-height" => size.Size, + .@"max-width" => size.MaxSize, + .@"max-height" => size.MaxSize, + .@"block-size" => size.Size, + .@"inline-size" => size.Size, + .@"min-block-size" => size.Size, + .@"min-inline-size" => size.Size, + .@"max-block-size" => size.MaxSize, + .@"max-inline-size" => size.MaxSize, + .@"box-sizing" => size.BoxSizing, + .@"aspect-ratio" => size.AspectRatio, + .overflow => overflow.Overflow, + .@"overflow-x" => overflow.OverflowKeyword, + .@"overflow-y" => overflow.OverflowKeyword, + .@"text-overflow" => overflow.TextOverflow, + .position => position.Position, + .top => LengthPercentageOrAuto, + .bottom => LengthPercentageOrAuto, + .left => LengthPercentageOrAuto, + .right => LengthPercentageOrAuto, + .@"inset-block-start" => LengthPercentageOrAuto, + .@"inset-block-end" => LengthPercentageOrAuto, + .@"inset-inline-start" => LengthPercentageOrAuto, + .@"inset-inline-end" => LengthPercentageOrAuto, + .@"inset-block" => margin_padding.InsetBlock, + .@"inset-inline" => margin_padding.InsetInline, + .inset => margin_padding.Inset, + .@"border-spacing" => css.css_values.size.Size2D(Length), + .@"border-top-color" => CssColor, + .@"border-bottom-color" => CssColor, + .@"border-left-color" => CssColor, + .@"border-right-color" => CssColor, + .@"border-block-start-color" => CssColor, + .@"border-block-end-color" => CssColor, + .@"border-inline-start-color" => CssColor, + .@"border-inline-end-color" => CssColor, + .@"border-top-style" => border.LineStyle, + .@"border-bottom-style" => border.LineStyle, + .@"border-left-style" => border.LineStyle, + .@"border-right-style" => border.LineStyle, + .@"border-block-start-style" => border.LineStyle, + .@"border-block-end-style" => border.LineStyle, + .@"border-inline-start-style" => border.LineStyle, + .@"border-inline-end-style" => border.LineStyle, + .@"border-top-width" => BorderSideWidth, + .@"border-bottom-width" => BorderSideWidth, + .@"border-left-width" => BorderSideWidth, + .@"border-right-width" => BorderSideWidth, + .@"border-block-start-width" => BorderSideWidth, + .@"border-block-end-width" => BorderSideWidth, + .@"border-inline-start-width" => BorderSideWidth, + .@"border-inline-end-width" => BorderSideWidth, + .@"border-top-left-radius" => Size2D(LengthPercentage), + .@"border-top-right-radius" => Size2D(LengthPercentage), + .@"border-bottom-left-radius" => Size2D(LengthPercentage), + .@"border-bottom-right-radius" => Size2D(LengthPercentage), + .@"border-start-start-radius" => Size2D(LengthPercentage), + .@"border-start-end-radius" => Size2D(LengthPercentage), + .@"border-end-start-radius" => Size2D(LengthPercentage), + .@"border-end-end-radius" => Size2D(LengthPercentage), + .@"border-radius" => BorderRadius, + .@"border-image-source" => Image, + .@"border-image-outset" => Rect(LengthOrNumber), + .@"border-image-repeat" => BorderImageRepeat, + .@"border-image-width" => Rect(BorderImageSideWidth), + .@"border-image-slice" => BorderImageSlice, + .@"border-image" => BorderImage, + .@"border-color" => BorderColor, + .@"border-style" => BorderStyle, + .@"border-width" => BorderWidth, + .@"border-block-color" => BorderBlockColor, + .@"border-block-style" => BorderBlockStyle, + .@"border-block-width" => BorderBlockWidth, + .@"border-inline-color" => BorderInlineColor, + .@"border-inline-style" => BorderInlineStyle, + .@"border-inline-width" => BorderInlineWidth, + .border => Border, + .@"border-top" => BorderTop, + .@"border-bottom" => BorderBottom, + .@"border-left" => BorderLeft, + .@"border-right" => BorderRight, + .@"border-block" => BorderBlock, + .@"border-block-start" => BorderBlockStart, + .@"border-block-end" => BorderBlockEnd, + .@"border-inline" => BorderInline, + .@"border-inline-start" => BorderInlineStart, + .@"border-inline-end" => BorderInlineEnd, + .outline => Outline, + .@"outline-color" => CssColor, + .@"outline-style" => OutlineStyle, + .@"outline-width" => BorderSideWidth, + .@"flex-direction" => FlexDirection, + .@"flex-wrap" => FlexWrap, + .@"flex-flow" => FlexFlow, + .@"flex-grow" => CSSNumber, + .@"flex-shrink" => CSSNumber, + .@"flex-basis" => LengthPercentageOrAuto, + .flex => Flex, + .order => CSSInteger, + .@"align-content" => AlignContent, + .@"justify-content" => JustifyContent, + .@"place-content" => PlaceContent, + .@"align-self" => AlignSelf, + .@"justify-self" => JustifySelf, + .@"place-self" => PlaceSelf, + .@"align-items" => AlignItems, + .@"justify-items" => JustifyItems, + .@"place-items" => PlaceItems, + .@"row-gap" => GapValue, + .@"column-gap" => GapValue, + .gap => Gap, + .@"box-orient" => BoxOrient, + .@"box-direction" => BoxDirection, + .@"box-ordinal-group" => CSSInteger, + .@"box-align" => BoxAlign, + .@"box-flex" => CSSNumber, + .@"box-flex-group" => CSSInteger, + .@"box-pack" => BoxPack, + .@"box-lines" => BoxLines, + .@"flex-pack" => FlexPack, + .@"flex-order" => CSSInteger, + .@"flex-align" => BoxAlign, + .@"flex-item-align" => FlexItemAlign, + .@"flex-line-pack" => FlexLinePack, + .@"flex-positive" => CSSNumber, + .@"flex-negative" => CSSNumber, + .@"flex-preferred-size" => LengthPercentageOrAuto, + .@"margin-top" => LengthPercentageOrAuto, + .@"margin-bottom" => LengthPercentageOrAuto, + .@"margin-left" => LengthPercentageOrAuto, + .@"margin-right" => LengthPercentageOrAuto, + .@"margin-block-start" => LengthPercentageOrAuto, + .@"margin-block-end" => LengthPercentageOrAuto, + .@"margin-inline-start" => LengthPercentageOrAuto, + .@"margin-inline-end" => LengthPercentageOrAuto, + .@"margin-block" => MarginBlock, + .@"margin-inline" => MarginInline, + .margin => Margin, + .@"padding-top" => LengthPercentageOrAuto, + .@"padding-bottom" => LengthPercentageOrAuto, + .@"padding-left" => LengthPercentageOrAuto, + .@"padding-right" => LengthPercentageOrAuto, + .@"padding-block-start" => LengthPercentageOrAuto, + .@"padding-block-end" => LengthPercentageOrAuto, + .@"padding-inline-start" => LengthPercentageOrAuto, + .@"padding-inline-end" => LengthPercentageOrAuto, + .@"padding-block" => PaddingBlock, + .@"padding-inline" => PaddingInline, + .padding => Padding, + .@"scroll-margin-top" => LengthPercentageOrAuto, + .@"scroll-margin-bottom" => LengthPercentageOrAuto, + .@"scroll-margin-left" => LengthPercentageOrAuto, + .@"scroll-margin-right" => LengthPercentageOrAuto, + .@"scroll-margin-block-start" => LengthPercentageOrAuto, + .@"scroll-margin-block-end" => LengthPercentageOrAuto, + .@"scroll-margin-inline-start" => LengthPercentageOrAuto, + .@"scroll-margin-inline-end" => LengthPercentageOrAuto, + .@"scroll-margin-block" => ScrollMarginBlock, + .@"scroll-margin-inline" => ScrollMarginInline, + .@"scroll-margin" => ScrollMargin, + .@"scroll-padding-top" => LengthPercentageOrAuto, + .@"scroll-padding-bottom" => LengthPercentageOrAuto, + .@"scroll-padding-left" => LengthPercentageOrAuto, + .@"scroll-padding-right" => LengthPercentageOrAuto, + .@"scroll-padding-block-start" => LengthPercentageOrAuto, + .@"scroll-padding-block-end" => LengthPercentageOrAuto, + .@"scroll-padding-inline-start" => LengthPercentageOrAuto, + .@"scroll-padding-inline-end" => LengthPercentageOrAuto, + .@"scroll-padding-block" => ScrollPaddingBlock, + .@"scroll-padding-inline" => ScrollPaddingInline, + .@"scroll-padding" => ScrollPadding, + .@"font-weight" => FontWeight, + .@"font-size" => FontSize, + .@"font-stretch" => FontStretch, + .@"font-family" => BabyList(FontFamily), + .@"font-style" => FontStyle, + .@"font-variant-caps" => FontVariantCaps, + .@"line-height" => LineHeight, + .font => Font, + .@"text-decoration-color" => CssColor, + .@"text-emphasis-color" => CssColor, + .@"text-shadow" => SmallList(TextShadow, 1), + .direction => Direction, + .composes => Composes, + .@"mask-image" => SmallList(Image, 1), + .@"mask-mode" => SmallList(MaskMode, 1), + .@"mask-repeat" => SmallList(BackgroundRepeat, 1), + .@"mask-position-x" => SmallList(HorizontalPosition, 1), + .@"mask-position-y" => SmallList(VerticalPosition, 1), + .@"mask-position" => SmallList(Position, 1), + .@"mask-clip" => SmallList(MaskClip, 1), + .@"mask-origin" => SmallList(GeometryBox, 1), + .@"mask-size" => SmallList(BackgroundSize, 1), + .@"mask-composite" => SmallList(MaskComposite, 1), + .@"mask-type" => MaskType, + .mask => SmallList(Mask, 1), + .@"mask-border-source" => Image, + .@"mask-border-mode" => MaskBorderMode, + .@"mask-border-slice" => BorderImageSlice, + .@"mask-border-width" => Rect(BorderImageSideWidth), + .@"mask-border-outset" => Rect(LengthOrNumber), + .@"mask-border-repeat" => BorderImageRepeat, + .@"mask-border" => MaskBorder, + .@"-webkit-mask-composite" => SmallList(WebKitMaskComposite, 1), + .@"mask-source-type" => SmallList(WebKitMaskSourceType, 1), + .@"mask-box-image" => BorderImage, + .@"mask-box-image-source" => Image, + .@"mask-box-image-slice" => BorderImageSlice, + .@"mask-box-image-width" => Rect(BorderImageSideWidth), + .@"mask-box-image-outset" => Rect(LengthOrNumber), + .@"mask-box-image-repeat" => BorderImageRepeat, + .all => CSSWideKeyword, + .unparsed => UnparsedProperty, + .custom => CustomProperty, + }; + } +}; diff --git a/src/css/properties/properties_impl.zig b/src/css/properties/properties_impl.zig new file mode 100644 index 0000000000..6a56e20d0e --- /dev/null +++ b/src/css/properties/properties_impl.zig @@ -0,0 +1,119 @@ +const std = @import("std"); +const bun = @import("root").bun; +const Allocator = std.mem.Allocator; + +pub const css = @import("../css_parser.zig"); + +const CustomPropertyName = css.css_properties.CustomPropertyName; + +const Printer = css.Printer; +const PrintErr = css.PrintErr; +const VendorPrefix = css.VendorPrefix; +const Error = css.Error; + +const PropertyId = css.PropertyId; +const Property = css.Property; + +pub fn PropertyIdImpl() type { + return struct { + pub fn toCss(this: *const PropertyId, comptime W: type, dest: *Printer(W)) PrintErr!void { + var first = true; + const name = this.name(this); + const prefix_value = this.prefix().orNone(); + inline for (VendorPrefix.FIELDS) |field| { + if (@field(prefix_value, field)) { + var prefix: VendorPrefix = .{}; + @field(prefix, field) = true; + + if (first) { + first = false; + } else { + try dest.delim(',', false); + } + try prefix.toCss(W, dest); + try dest.writeStr(name); + } + } + } + + pub fn parse(input: *css.Parser) css.Result(PropertyId) { + const name = switch (input.expectIdent()) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + return .{ .result = fromString(name) }; + } + + pub fn fromStr(name: []const u8) PropertyId { + return fromString(name); + } + + pub fn fromString(name_: []const u8) PropertyId { + const name_ref = name_; + var prefix: VendorPrefix = undefined; + var trimmed_name: []const u8 = undefined; + + // TODO: todo_stuff.match_ignore_ascii_case + if (bun.strings.startsWithCaseInsensitiveAscii(name_ref, "-webkit-")) { + prefix = VendorPrefix{ .webkit = true }; + trimmed_name = name_ref[8..]; + } else if (bun.strings.startsWithCaseInsensitiveAscii(name_ref, "-moz-")) { + prefix = VendorPrefix{ .moz = true }; + trimmed_name = name_ref[5..]; + } else if (bun.strings.startsWithCaseInsensitiveAscii(name_ref, "-o-")) { + prefix = VendorPrefix{ .o = true }; + trimmed_name = name_ref[3..]; + } else if (bun.strings.startsWithCaseInsensitiveAscii(name_ref, "-ms-")) { + prefix = VendorPrefix{ .ms = true }; + trimmed_name = name_ref[4..]; + } else { + prefix = VendorPrefix{ .none = true }; + trimmed_name = name_ref; + } + + return PropertyId.fromNameAndPrefix(trimmed_name, prefix) orelse .{ .custom = CustomPropertyName.fromStr(name_) }; + } + }; +} + +pub fn PropertyImpl() type { + return struct { + /// Serializes the CSS property, with an optional `!important` flag. + pub fn toCss(this: *const Property, comptime W: type, dest: *Printer(W), important: bool) PrintErr!void { + if (this.* == .custom) { + try this.custom.name.toCss(W, dest); + try dest.delim(':', false); + try this.valueToCss(W, dest); + if (important) { + try dest.whitespace(); + try dest.writeStr("!important"); + } + return; + } + const name, const prefix = this.__toCssHelper(); + var first = true; + + inline for (VendorPrefix.FIELDS) |field| { + if (@field(prefix, field)) { + var p: VendorPrefix = .{}; + @field(p, field) = true; + + if (first) { + first = false; + } else { + try dest.writeChar(';'); + try dest.newline(); + } + try p.toCss(W, dest); + try dest.writeStr(name); + try dest.delim(':', false); + try this.valueToCss(W, dest); + if (important) { + try dest.whitespace(); + try dest.writeStr("!important"); + } + } + } + } + }; +} diff --git a/src/css/properties/shape.zig b/src/css/properties/shape.zig new file mode 100644 index 0000000000..5d815259dc --- /dev/null +++ b/src/css/properties/shape.zig @@ -0,0 +1,47 @@ +const std = @import("std"); +const bun = @import("root").bun; +const Allocator = std.mem.Allocator; +const ArrayList = std.ArrayListUnmanaged; + +pub const css = @import("../css_parser.zig"); + +const SmallList = css.SmallList; +const Printer = css.Printer; +const PrintErr = css.PrintErr; +const Error = css.Error; + +const ContainerName = css.css_rules.container.ContainerName; + +const LengthPercentage = css.css_values.length.LengthPercentage; +const CustomIdent = css.css_values.ident.CustomIdent; +const CSSString = css.css_values.string.CSSString; +const CSSNumber = css.css_values.number.CSSNumber; +const LengthPercentageOrAuto = css.css_values.length.LengthPercentageOrAuto; +const Size2D = css.css_values.size.Size2D; +const DashedIdent = css.css_values.ident.DashedIdent; +const Image = css.css_values.image.Image; +const CssColor = css.css_values.color.CssColor; +const Ratio = css.css_values.ratio.Ratio; +const Length = css.css_values.length.LengthValue; +const Rect = css.css_values.rect.Rect; +const NumberOrPercentage = css.css_values.percentage.NumberOrPercentage; +const CustomIdentList = css.css_values.ident.CustomIdentList; +const Angle = css.css_values.angle.Angle; +const Url = css.css_values.url.Url; + +const GenericBorder = css.css_properties.border.GenericBorder; +const LineStyle = css.css_properties.border.LineStyle; + +/// A [``](https://www.w3.org/TR/css-shapes-1/#typedef-fill-rule) used to +/// determine the interior of a `polygon()` shape. +/// +/// See [Polygon](Polygon). +pub const FillRule = css.DefineEnumProperty(@compileError(css.todo_stuff.depth)); + +/// A CSS [``](https://www.w3.org/TR/css-color-4/#typedef-alpha-value), +/// used to represent opacity. +/// +/// Parses either a `` or ``, but is always stored and serialized as a number. +pub const AlphaValue = struct { + v: f32, +}; diff --git a/src/css/properties/size.zig b/src/css/properties/size.zig new file mode 100644 index 0000000000..c0f48f137e --- /dev/null +++ b/src/css/properties/size.zig @@ -0,0 +1,626 @@ +const std = @import("std"); +const bun = @import("root").bun; +const Allocator = std.mem.Allocator; +const ArrayList = std.ArrayListUnmanaged; + +pub const css = @import("../css_parser.zig"); + +const SmallList = css.SmallList; +const Printer = css.Printer; +const PrintErr = css.PrintErr; +const Error = css.Error; + +const PropertyId = css.PropertyId; +const PropertyIdTag = css.PropertyIdTag; +const Property = css.Property; +const UnparsedProperty = css.css_properties.custom.UnparsedProperty; + +const PropertyCategory = css.logical.PropertyCategory; + +const ContainerName = css.css_rules.container.ContainerName; + +const LengthPercentage = css.css_values.length.LengthPercentage; +const CustomIdent = css.css_values.ident.CustomIdent; +const CSSString = css.css_values.string.CSSString; +const CSSNumber = css.css_values.number.CSSNumber; +const LengthPercentageOrAuto = css.css_values.length.LengthPercentageOrAuto; +const Size2D = css.css_values.size.Size2D; +const DashedIdent = css.css_values.ident.DashedIdent; +const Image = css.css_values.image.Image; +const CssColor = css.css_values.color.CssColor; +const Ratio = css.css_values.ratio.Ratio; +const Length = css.css_values.length.LengthValue; +const Rect = css.css_values.rect.Rect; +const NumberOrPercentage = css.css_values.percentage.NumberOrPercentage; +const CustomIdentList = css.css_values.ident.CustomIdentList; +const Angle = css.css_values.angle.Angle; +const Url = css.css_values.url.Url; + +const GenericBorder = css.css_properties.border.GenericBorder; +const LineStyle = css.css_properties.border.LineStyle; + +pub const BoxSizing = enum { + /// Exclude the margin/border/padding from the width and height. + @"content-box", + /// Include the padding and border (but not the margin) in the width and height. + @"border-box", + pub usingnamespace css.DefineEnumProperty(@This()); +}; + +pub const Size = union(enum) { + /// The `auto` keyworda + auto, + /// An explicit length or percentage. + length_percentage: LengthPercentage, + /// The `min-content` keyword. + min_content: css.VendorPrefix, + /// The `max-content` keyword. + max_content: css.VendorPrefix, + /// The `fit-content` keyword. + fit_content: css.VendorPrefix, + /// The `fit-content()` function. + fit_content_function: LengthPercentage, + /// The `stretch` keyword, or the `-webkit-fill-available` or `-moz-available` prefixed keywords. + stretch: css.VendorPrefix, + /// The `contain` keyword. + contain, + + pub fn parse(input: *css.Parser) css.Result(Size) { + const Enum = enum { + auto, + @"min-content", + @"-webkit-min-content", + @"-moz-min-content", + @"max-content", + @"-webkit-max-content", + @"-moz-max-content", + stretch, + @"-webkit-fill-available", + @"-moz-available", + @"fit-content", + @"-webkit-fit-content", + @"-moz-fit-content", + contain, + }; + const Map = comptime bun.ComptimeEnumMap(Enum); + const res = input.tryParse(struct { + pub fn parseFn(i: *css.Parser) css.Result(Size) { + const ident = switch (i.expectIdent()) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + + if (Map.getASCIIICaseInsensitive(ident)) |res| { + return .{ .result = switch (res) { + .auto => .auto, + .@"min-content" => .{ .min_content = css.VendorPrefix{ .none = true } }, + .@"-webkit-min-content" => .{ .min_content = css.VendorPrefix{ .webkit = true } }, + .@"-moz-min-content" => .{ .min_content = css.VendorPrefix{ .moz = true } }, + .@"max-content" => .{ .max_content = css.VendorPrefix{ .none = true } }, + .@"-webkit-max-content" => .{ .max_content = css.VendorPrefix{ .webkit = true } }, + .@"-moz-max-content" => .{ .max_content = css.VendorPrefix{ .moz = true } }, + .stretch => .{ .stretch = css.VendorPrefix{ .none = true } }, + .@"-webkit-fill-available" => .{ .stretch = css.VendorPrefix{ .webkit = true } }, + .@"-moz-available" => .{ .stretch = css.VendorPrefix{ .moz = true } }, + .@"fit-content" => .{ .fit_content = css.VendorPrefix{ .none = true } }, + .@"-webkit-fit-content" => .{ .fit_content = css.VendorPrefix{ .webkit = true } }, + .@"-moz-fit-content" => .{ .fit_content = css.VendorPrefix{ .moz = true } }, + .contain => .contain, + } }; + } else return .{ .err = i.newCustomError(css.ParserError.invalid_value) }; + } + }.parseFn, .{}); + + if (res == .result) return res; + + if (input.tryParse(parseFitContent, .{}).asValue()) |v| { + return .{ .result = Size{ .fit_content_function = v } }; + } + + const lp = switch (input.tryParse(LengthPercentage.parse, .{})) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + return .{ .result = Size{ .length_percentage = lp } }; + } + + pub fn toCss(this: *const Size, comptime W: type, dest: *css.Printer(W)) css.PrintErr!void { + return switch (this.*) { + .auto => dest.writeStr("auto"), + .contain => dest.writeStr("contain"), + .min_content => |vp| { + try vp.toCss(W, dest); + try dest.writeStr("min-content"); + }, + .max_content => |vp| { + try vp.toCss(W, dest); + try dest.writeStr("max-content"); + }, + .fit_content => |vp| { + try vp.toCss(W, dest); + try dest.writeStr("fit-content"); + }, + .stretch => |vp| { + if (vp.eql(css.VendorPrefix{ .none = true })) { + try dest.writeStr("stretch"); + } else if (vp.eql(css.VendorPrefix{ .webkit = true })) { + try dest.writeStr("-webkit-fill-available"); + } else if (vp.eql(css.VendorPrefix{ .moz = true })) { + try dest.writeStr("-moz-available"); + } else { + bun.unreachablePanic("Unexpected vendor prefixes", .{}); + } + }, + .fit_content_function => |l| { + try dest.writeStr("fit-content("); + try l.toCss(W, dest); + try dest.writeChar(')'); + }, + .length_percentage => |l| return l.toCss(W, dest), + }; + } + + pub fn isCompatible(this: *const @This(), browsers: css.targets.Browsers) bool { + const F = css.compat.Feature; + return switch (this.*) { + .length_percentage => |*l| l.isCompatible(browsers), + .min_content => F.isCompatible(.min_content_size, browsers), + .max_content => F.isCompatible(.max_content_size, browsers), + .fit_content => F.isCompatible(.fit_content_size, browsers), + .fit_content_function => |*l| F.isCompatible(.fit_content_function_size, browsers) and l.isCompatible(browsers), + .stretch => |*vp| F.isCompatible(switch (vp.asBits()) { + css.VendorPrefix.NONE.asBits() => F.stretch_size, + css.VendorPrefix.WEBKIT.asBits() => F.webkit_fill_available_size, + css.VendorPrefix.MOZ.asBits() => F.moz_available_size, + else => return false, + }, browsers), + .contain => false, // ??? no data in mdn + .auto => true, + }; + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } +}; + +/// A value for the [minimum](https://drafts.csswg.org/css-sizing-3/#min-size-properties) +/// and [maximum](https://drafts.csswg.org/css-sizing-3/#max-size-properties) size properties, +/// e.g. `min-width` and `max-height`. +pub const MaxSize = union(enum) { + /// The `none` keyword. + none, + /// An explicit length or percentage. + length_percentage: LengthPercentage, + /// The `min-content` keyword. + min_content: css.VendorPrefix, + /// The `max-content` keyword. + max_content: css.VendorPrefix, + /// The `fit-content` keyword. + fit_content: css.VendorPrefix, + /// The `fit-content()` function. + fit_content_function: LengthPercentage, + /// The `stretch` keyword, or the `-webkit-fill-available` or `-moz-available` prefixed keywords. + stretch: css.VendorPrefix, + /// The `contain` keyword. + contain, + + pub fn parse(input: *css.Parser) css.Result(MaxSize) { + const Ident = enum { + none, + min_content, + webkit_min_content, + moz_min_content, + max_content, + webkit_max_content, + moz_max_content, + stretch, + webkit_fill_available, + moz_available, + fit_content, + webkit_fit_content, + moz_fit_content, + contain, + }; + + const IdentMap = bun.ComptimeStringMap(Ident, .{ + .{ "none", .none }, + .{ "min-content", .min_content }, + .{ "-webkit-min-content", .webkit_min_content }, + .{ "-moz-min-content", .moz_min_content }, + .{ "max-content", .max_content }, + .{ "-webkit-max-content", .webkit_max_content }, + .{ "-moz-max-content", .moz_max_content }, + .{ "stretch", .stretch }, + .{ "-webkit-fill-available", .webkit_fill_available }, + .{ "-moz-available", .moz_available }, + .{ "fit-content", .fit_content }, + .{ "-webkit-fit-content", .webkit_fit_content }, + .{ "-moz-fit-content", .moz_fit_content }, + .{ "contain", .contain }, + }); + + const res = input.tryParse(struct { + fn parse(i: *css.Parser) css.Result(MaxSize) { + const ident = switch (i.expectIdent()) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + const mapped = IdentMap.getASCIIICaseInsensitive(ident) orelse return .{ .err = i.newCustomError(css.ParserError.invalid_value) }; + return .{ .result = switch (mapped) { + .none => .none, + .min_content => .{ .min_content = .{ .none = true } }, + .webkit_min_content => .{ .min_content = .{ .webkit = true } }, + .moz_min_content => .{ .min_content = .{ .moz = true } }, + .max_content => .{ .max_content = .{ .none = true } }, + .webkit_max_content => .{ .max_content = .{ .webkit = true } }, + .moz_max_content => .{ .max_content = .{ .moz = true } }, + .stretch => .{ .stretch = .{ .none = true } }, + .webkit_fill_available => .{ .stretch = .{ .webkit = true } }, + .moz_available => .{ .stretch = .{ .moz = true } }, + .fit_content => .{ .fit_content = .{ .none = true } }, + .webkit_fit_content => .{ .fit_content = .{ .webkit = true } }, + .moz_fit_content => .{ .fit_content = .{ .moz = true } }, + .contain => .contain, + } }; + } + }.parse, .{}); + + if (res.isOk()) { + return res; + } + + if (input.tryParse(parseFitContent, .{}).asValue()) |v| { + return .{ .result = .{ .fit_content_function = v } }; + } + + return switch (input.tryParse(LengthPercentage.parse, .{})) { + .result => |v| .{ .result = .{ .length_percentage = v } }, + .err => |e| .{ .err = e }, + }; + } + + pub fn toCss(this: *const MaxSize, comptime W: type, dest: *css.Printer(W)) css.PrintErr!void { + switch (this.*) { + .none => try dest.writeStr("none"), + .contain => try dest.writeStr("contain"), + .min_content => |vp| { + try vp.toCss(W, dest); + try dest.writeStr("min-content"); + }, + .max_content => |vp| { + try vp.toCss(W, dest); + try dest.writeStr("max-content"); + }, + .fit_content => |vp| { + try vp.toCss(W, dest); + try dest.writeStr("fit-content"); + }, + .stretch => |vp| { + if (css.VendorPrefix.eql(vp, css.VendorPrefix{ .none = true })) { + try dest.writeStr("stretch"); + } else if (css.VendorPrefix.eql(vp, css.VendorPrefix{ .webkit = true })) { + try dest.writeStr("-webkit-fill-available"); + } else if (css.VendorPrefix.eql(vp, css.VendorPrefix{ .moz = true })) { + try dest.writeStr("-moz-available"); + } else { + bun.unreachablePanic("Unexpected vendor prefixes", .{}); + } + }, + .fit_content_function => |l| { + try dest.writeStr("fit-content("); + try l.toCss(W, dest); + try dest.writeChar(')'); + }, + .length_percentage => |l| try l.toCss(W, dest), + } + } + + pub fn isCompatible(this: *const @This(), browsers: css.targets.Browsers) bool { + const F = css.compat.Feature; + return switch (this.*) { + .length_percentage => |*l| l.isCompatible(browsers), + .min_content => F.isCompatible(.min_content_size, browsers), + .max_content => F.isCompatible(.max_content_size, browsers), + .fit_content => F.isCompatible(.fit_content_size, browsers), + .fit_content_function => |*l| F.isCompatible(F.fit_content_function_size, browsers) and l.isCompatible(browsers), + .stretch => |*vp| F.isCompatible( + switch (vp.asBits()) { + css.VendorPrefix.NONE.asBits() => F.stretch_size, + css.VendorPrefix.WEBKIT.asBits() => F.webkit_fill_available_size, + css.VendorPrefix.MOZ.asBits() => F.moz_available_size, + else => return false, + }, + browsers, + ), + .contain => false, // ??? no data in mdn + .none => true, + }; + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } +}; + +/// A value for the [aspect-ratio](https://drafts.csswg.org/css-sizing-4/#aspect-ratio) property. +pub const AspectRatio = struct { + /// The `auto` keyword. + auto: bool, + /// A preferred aspect ratio for the box, specified as width / height. + ratio: ?Ratio, + + pub fn parse(input: *css.Parser) css.Result(AspectRatio) { + const location = input.currentSourceLocation(); + var auto = input.tryParse(css.Parser.expectIdentMatching, .{"auto"}); + + const ratio = input.tryParse(Ratio.parse, .{}); + if (auto.isErr()) { + auto = input.tryParse(css.Parser.expectIdentMatching, .{"auto"}); + } + if (auto.isErr() and ratio.isErr()) { + return .{ .err = location.newCustomError(css.ParserError{ .invalid_value = {} }) }; + } + + return .{ + .result = AspectRatio{ + .auto = auto.isOk(), + .ratio = ratio.asValue(), + }, + }; + } + + pub fn toCss(this: *const AspectRatio, comptime W: type, dest: *css.Printer(W)) css.PrintErr!void { + if (this.auto) { + try dest.writeStr("auto"); + } + + if (this.ratio) |*ratio| { + if (this.auto) try dest.writeChar(' '); + try ratio.toCss(W, dest); + } + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } +}; + +fn parseFitContent(input: *css.Parser) css.Result(LengthPercentage) { + if (input.expectFunctionMatching("fit-content").asErr()) |e| return .{ .err = e }; + return input.parseNestedBlock(LengthPercentage, {}, css.voidWrap(LengthPercentage, LengthPercentage.parse)); +} + +pub const SizeProperty = packed struct(u16) { + width: bool = false, + height: bool = false, + @"min-width": bool = false, + @"min-height": bool = false, + @"max-width": bool = false, + @"max-height": bool = false, + @"block-size": bool = false, + @"inline-size": bool = false, + @"min-block-size": bool = false, + @"min-inline-size": bool = false, + @"max-block-size": bool = false, + @"max-inline-size": bool = false, + __unused: u4 = 0, + + pub usingnamespace css.Bitflags(@This()); + + pub fn tryFromPropertyIdTag(property_id: PropertyIdTag) ?SizeProperty { + inline for (std.meta.fields(@This())) |field| { + if (comptime std.mem.eql(u8, field.name, "__unused")) continue; + if (@intFromEnum(@field(PropertyIdTag, field.name)) == @intFromEnum(@as(PropertyIdTag, property_id))) { + var ret: SizeProperty = .{}; + @field(ret, field.name) = true; + return ret; + } + } + return null; + } +}; + +pub const SizeHandler = struct { + width: ?Size = null, + height: ?Size = null, + min_width: ?Size = null, + min_height: ?Size = null, + max_width: ?MaxSize = null, + max_height: ?MaxSize = null, + block_size: ?Size = null, + inline_size: ?Size = null, + min_block_size: ?Size = null, + min_inline_size: ?Size = null, + max_block_size: ?MaxSize = null, + max_inline_size: ?MaxSize = null, + has_any: bool = false, + flushed_properties: SizeProperty = .{}, + category: PropertyCategory = PropertyCategory.default(), + + const Feature = css.Feature; + + pub fn handleProperty(this: *@This(), property: *const Property, dest: *css.DeclarationList, context: *css.PropertyHandlerContext) bool { + const logical_supported = !context.shouldCompileLogical(Feature.logical_size); + + switch (property.*) { + .width => |*v| this.propertyHelper("width", Size, v, PropertyCategory.physical, dest, context), + .height => |*v| this.propertyHelper("height", Size, v, PropertyCategory.physical, dest, context), + .@"min-width" => |*v| this.propertyHelper("min_width", Size, v, PropertyCategory.physical, dest, context), + .@"min-height" => |*v| this.propertyHelper("min_height", Size, v, PropertyCategory.physical, dest, context), + .@"max-width" => |*v| this.propertyHelper("max_width", MaxSize, v, PropertyCategory.physical, dest, context), + .@"max-height" => |*v| this.propertyHelper("max_height", MaxSize, v, PropertyCategory.physical, dest, context), + .@"block-size" => |*v| this.propertyHelper("block_size", Size, v, PropertyCategory.logical, dest, context), + .@"min-block-size" => |*v| this.propertyHelper("min_block_size", Size, v, PropertyCategory.logical, dest, context), + .@"max-block-size" => |*v| this.propertyHelper("max_block_size", MaxSize, v, PropertyCategory.logical, dest, context), + .@"inline-size" => |*v| this.propertyHelper("inline_size", Size, v, PropertyCategory.logical, dest, context), + .@"min-inline-size" => |*v| this.propertyHelper("min_inline_size", Size, v, PropertyCategory.logical, dest, context), + .@"max-inline-size" => |*v| this.propertyHelper("max_inline_size", MaxSize, v, PropertyCategory.logical, dest, context), + .unparsed => |*unparsed| { + switch (unparsed.property_id) { + .width, .height, .@"min-width", .@"max-width", .@"min-height", .@"max-height" => { + this.flushed_properties.insert(SizeProperty.tryFromPropertyIdTag(@as(PropertyIdTag, unparsed.property_id)).?); + dest.append(context.allocator, property.deepClone(context.allocator)) catch unreachable; + }, + .@"block-size" => this.logicalUnparsedHelper(property, unparsed, .height, logical_supported, dest, context), + .@"min-block-size" => this.logicalUnparsedHelper(property, unparsed, .@"min-height", logical_supported, dest, context), + .@"max-block-size" => this.logicalUnparsedHelper(property, unparsed, .@"max-height", logical_supported, dest, context), + .@"inline-size" => this.logicalUnparsedHelper(property, unparsed, .width, logical_supported, dest, context), + .@"min-inline-size" => this.logicalUnparsedHelper(property, unparsed, .@"min-width", logical_supported, dest, context), + .@"max-inline-size" => this.logicalUnparsedHelper(property, unparsed, .@"max-width", logical_supported, dest, context), + else => return false, + } + }, + else => return false, + } + + return true; + } + + inline fn logicalUnparsedHelper(this: *@This(), property: *const Property, unparsed: *const UnparsedProperty, comptime physical: PropertyIdTag, logical_supported: bool, dest: *css.DeclarationList, context: *css.PropertyHandlerContext) void { + if (logical_supported) { + this.flushed_properties.insert(SizeProperty.tryFromPropertyIdTag(@as(PropertyIdTag, unparsed.property_id)).?); + dest.append(context.allocator, property.deepClone(context.allocator)) catch bun.outOfMemory(); + } else { + dest.append(context.allocator, Property{ + .unparsed = unparsed.withPropertyId( + context.allocator, + @unionInit(PropertyId, @tagName(physical), {}), + ), + }) catch bun.outOfMemory(); + this.flushed_properties.insert(SizeProperty.fromName(@tagName(physical))); + } + } + + inline fn propertyHelper( + this: *@This(), + comptime property: []const u8, + comptime T: type, + value: *const T, + comptime category: PropertyCategory, + dest: *css.DeclarationList, + context: *css.PropertyHandlerContext, + ) void { + // If the category changes betweet logical and physical, + // or if the value contains syntax that isn't supported across all targets, + // preserve the previous value as a fallback. + + if (@field(PropertyCategory, @tagName(category)) != this.category or (@field(this, property) != null and context.targets.browsers != null and !value.isCompatible(context.targets.browsers.?))) { + this.flush(dest, context); + } + + @field(this, property) = value.deepClone(context.allocator); + this.category = category; + this.has_any = true; + } + + pub fn flush(this: *@This(), dest: *css.DeclarationList, context: *css.PropertyHandlerContext) void { + if (!this.has_any) return; + + this.has_any = false; + const logical_supported = !context.shouldCompileLogical(Feature.logical_size); + + this.flushPropertyHelper(PropertyIdTag.width, "width", Size, dest, context); + this.flushPropertyHelper(PropertyIdTag.@"min-width", "min_width", Size, dest, context); + this.flushPropertyHelper(PropertyIdTag.@"max-width", "max_width", MaxSize, dest, context); + this.flushPropertyHelper(PropertyIdTag.height, "height", Size, dest, context); + this.flushPropertyHelper(PropertyIdTag.@"min-height", "min_height", Size, dest, context); + this.flushPropertyHelper(PropertyIdTag.@"max-height", "max_height", MaxSize, dest, context); + this.flushLogicalHelper(PropertyIdTag.@"block-size", "block_size", PropertyIdTag.height, Size, logical_supported, dest, context); + this.flushLogicalHelper(PropertyIdTag.@"min-block-size", "min_block_size", PropertyIdTag.@"min-height", Size, logical_supported, dest, context); + this.flushLogicalHelper(PropertyIdTag.@"max-block-size", "max_block_size", PropertyIdTag.@"max-height", MaxSize, logical_supported, dest, context); + this.flushLogicalHelper(PropertyIdTag.@"inline-size", "inline_size", PropertyIdTag.width, Size, logical_supported, dest, context); + this.flushLogicalHelper(PropertyIdTag.@"min-inline-size", "min_inline_size", PropertyIdTag.@"min-width", Size, logical_supported, dest, context); + this.flushLogicalHelper(PropertyIdTag.@"max-inline-size", "max_inline_size", PropertyIdTag.@"max-width", MaxSize, logical_supported, dest, context); + } + + pub fn finalize(this: *@This(), dest: *css.DeclarationList, context: *css.PropertyHandlerContext) void { + this.flush(dest, context); + this.flushed_properties = SizeProperty.empty(); + } + + inline fn flushPrefixHelper( + this: *@This(), + comptime property: PropertyIdTag, + comptime SizeType: type, + comptime feature: css.prefixes.Feature, + dest: *css.DeclarationList, + context: *css.PropertyHandlerContext, + ) void { + if (!this.flushed_properties.contains(comptime SizeProperty.fromName(@tagName(property)))) { + const prefixes = context.targets.prefixes(css.VendorPrefix{ .none = true }, feature).difference(css.VendorPrefix{ .none = true }); + inline for (css.VendorPrefix.FIELDS) |field| { + if (@field(prefixes, field)) { + var prefix: css.VendorPrefix = .{}; + @field(prefix, field) = true; + dest.append( + context.allocator, + @unionInit( + Property, + @tagName(property), + @unionInit(SizeType, @tagName(feature), prefix), + ), + ) catch bun.outOfMemory(); + } + } + } + } + + inline fn flushPropertyHelper( + this: *@This(), + comptime property: PropertyIdTag, + comptime field: []const u8, + comptime SizeType: type, + dest: *css.DeclarationList, + context: *css.PropertyHandlerContext, + ) void { + if (bun.take(&@field(this, field))) |val| { + switch (val) { + .stretch => |vp| if (vp.eql(css.VendorPrefix{ .none = true })) { + this.flushPrefixHelper(property, SizeType, .stretch, dest, context); + }, + .min_content => |vp| if (vp.eql(css.VendorPrefix{ .none = true })) { + this.flushPrefixHelper(property, SizeType, .min_content, dest, context); + }, + .max_content => |vp| if (vp.eql(css.VendorPrefix{ .none = true })) { + this.flushPrefixHelper(property, SizeType, .max_content, dest, context); + }, + .fit_content => |vp| if (vp.eql(css.VendorPrefix{ .none = true })) { + this.flushPrefixHelper(property, SizeType, .fit_content, dest, context); + }, + else => {}, + } + dest.append(context.allocator, @unionInit(Property, @tagName(property), val.deepClone(context.allocator))) catch bun.outOfMemory(); + this.flushed_properties.insert(comptime SizeProperty.fromName(@tagName(property))); + } + } + + inline fn flushLogicalHelper( + this: *@This(), + comptime property: PropertyIdTag, + comptime field: []const u8, + comptime physical: PropertyIdTag, + comptime SizeType: type, + logical_supported: bool, + dest: *css.DeclarationList, + context: *css.PropertyHandlerContext, + ) void { + if (logical_supported) { + this.flushPropertyHelper(property, field, SizeType, dest, context); + } else { + this.flushPropertyHelper(physical, field, SizeType, dest, context); + } + } +}; diff --git a/src/css/properties/svg.zig b/src/css/properties/svg.zig new file mode 100644 index 0000000000..b46734d7cb --- /dev/null +++ b/src/css/properties/svg.zig @@ -0,0 +1,100 @@ +const std = @import("std"); +const bun = @import("root").bun; +const Allocator = std.mem.Allocator; +const ArrayList = std.ArrayListUnmanaged; + +pub const css = @import("../css_parser.zig"); + +const SmallList = css.SmallList; +const Printer = css.Printer; +const PrintErr = css.PrintErr; +const Error = css.Error; + +const ContainerName = css.css_rules.container.ContainerName; + +const LengthPercentage = css.css_values.length.LengthPercentage; +const CustomIdent = css.css_values.ident.CustomIdent; +const CSSString = css.css_values.string.CSSString; +const CSSNumber = css.css_values.number.CSSNumber; +const LengthPercentageOrAuto = css.css_values.length.LengthPercentageOrAuto; +const Size2D = css.css_values.size.Size2D; +const DashedIdent = css.css_values.ident.DashedIdent; +const Image = css.css_values.image.Image; +const CssColor = css.css_values.color.CssColor; +const Ratio = css.css_values.ratio.Ratio; +const Length = css.css_values.length.LengthValue; +const Rect = css.css_values.rect.Rect; +const NumberOrPercentage = css.css_values.percentage.NumberOrPercentage; +const CustomIdentList = css.css_values.ident.CustomIdentList; +const Angle = css.css_values.angle.Angle; +const Url = css.css_values.url.Url; + +const GenericBorder = css.css_properties.border.GenericBorder; +const LineStyle = css.css_properties.border.LineStyle; + +/// An SVG [``](https://www.w3.org/TR/SVG2/painting.html#SpecifyingPaint) value +/// used in the `fill` and `stroke` properties. +const SVGPaint = union(enum) { + /// A URL reference to a paint server element, e.g. `linearGradient`, `radialGradient`, and `pattern`. + Url: struct { + /// The url of the paint server. + url: Url, + /// A fallback to be used in case the paint server cannot be resolved. + fallback: ?SVGPaintFallback, + }, + /// A solid color paint. + Color: CssColor, + /// Use the paint value of fill from a context element. + ContextFill, + /// Use the paint value of stroke from a context element. + ContextStroke, + /// No paint. + None, +}; + +/// A fallback for an SVG paint in case a paint server `url()` cannot be resolved. +/// +/// See [SVGPaint](SVGPaint). +const SVGPaintFallback = union(enum) { + /// No fallback. + None, + /// A solid color. + Color: CssColor, +}; + +/// A value for the [stroke-linecap](https://www.w3.org/TR/SVG2/painting.html#LineCaps) property. +pub const StrokeLinecap = css.DefineEnumProperty(@compileError(css.todo_stuff.depth)); + +/// A value for the [stroke-linejoin](https://www.w3.org/TR/SVG2/painting.html#LineJoin) property. +pub const StrokeLinejoin = css.DefineEnumProperty(@compileError(css.todo_stuff.depth)); + +/// A value for the [stroke-dasharray](https://www.w3.org/TR/SVG2/painting.html#StrokeDashing) property. +const StrokeDasharray = union(enum) { + /// No dashing is used. + None, + /// Specifies a dashing pattern to use. + Values: ArrayList(LengthPercentage), +}; + +/// A value for the [marker](https://www.w3.org/TR/SVG2/painting.html#VertexMarkerProperties) properties. +const Marker = union(enum) { + /// No marker. + None, + /// A url reference to a `` element. + Url: Url, +}; + +/// A value for the [color-interpolation](https://www.w3.org/TR/SVG2/painting.html#ColorInterpolation) property. +pub const ColorInterpolation = css.DefineEnumProperty(@compileError(css.todo_stuff.depth)); + +/// A value for the [color-rendering](https://www.w3.org/TR/SVG2/painting.html#ColorRendering) property. +pub const ColorRendering = css.DefineEnumProperty(@compileError(css.todo_stuff.depth)); + +/// A value for the [shape-rendering](https://www.w3.org/TR/SVG2/painting.html#ShapeRendering) property. +pub const ShapeRendering = css.DefineEnumProperty(@compileError(css.todo_stuff.depth)); + +/// A value for the [text-rendering](https://www.w3.org/TR/SVG2/painting.html#TextRendering) property. +pub const TextRendering = css.DefineEnumProperty(@compileError(css.todo_stuff.depth)); + +/// A value for the [image-rendering](https://www.w3.org/TR/SVG2/painting.html#ImageRendering) property. +pub const ImageRendering = css.DefineEnumProperty(@compileError(css.todo_stuff.depth)); diff --git a/src/css/properties/text.zig b/src/css/properties/text.zig new file mode 100644 index 0000000000..7ad11f9a05 --- /dev/null +++ b/src/css/properties/text.zig @@ -0,0 +1,289 @@ +const std = @import("std"); +const bun = @import("root").bun; +const Allocator = std.mem.Allocator; +const ArrayList = std.ArrayListUnmanaged; + +pub const css = @import("../css_parser.zig"); + +const SmallList = css.SmallList; +const Printer = css.Printer; +const PrintErr = css.PrintErr; +const Error = css.Error; + +const ContainerName = css.css_rules.container.ContainerName; + +const LengthPercentage = css.css_values.length.LengthPercentage; +const CustomIdent = css.css_values.ident.CustomIdent; +const CSSString = css.css_values.string.CSSString; +const CSSNumber = css.css_values.number.CSSNumber; +const LengthPercentageOrAuto = css.css_values.length.LengthPercentageOrAuto; +const Size2D = css.css_values.size.Size2D; +const DashedIdent = css.css_values.ident.DashedIdent; +const Image = css.css_values.image.Image; +const CssColor = css.css_values.color.CssColor; +const Ratio = css.css_values.ratio.Ratio; +const Length = css.css_values.length.LengthValue; +const Rect = css.css_values.rect.Rect; +const NumberOrPercentage = css.css_values.percentage.NumberOrPercentage; +const CustomIdentList = css.css_values.ident.CustomIdentList; +const Angle = css.css_values.angle.Angle; +const Url = css.css_values.url.Url; +const Percentage = css.css_values.percentage.Percentage; + +const GenericBorder = css.css_properties.border.GenericBorder; +const LineStyle = css.css_properties.border.LineStyle; + +/// A value for the [text-transform](https://www.w3.org/TR/2021/CRD-css-text-3-20210422/#text-transform-property) property. +pub const TextTransform = struct { + /// How case should be transformed. + case: TextTransformCase, + /// How ideographic characters should be transformed. + other: TextTransformOther, +}; + +pub const TextTransformOther = packed struct(u8) { + /// Puts all typographic character units in full-width form. + full_width: bool = false, + /// Converts all small Kana characters to the equivalent full-size Kana. + full_size_kana: bool = false, +}; + +/// Defines how text case should be transformed in the +/// [text-transform](https://www.w3.org/TR/2021/CRD-css-text-3-20210422/#text-transform-property) property. +const TextTransformCase = css.DefineEnumProperty(@compileError(css.todo_stuff.depth)); + +/// A value for the [white-space](https://www.w3.org/TR/2021/CRD-css-text-3-20210422/#white-space-property) property. +pub const WhiteSpace = css.DefineEnumProperty(@compileError(css.todo_stuff.depth)); + +/// A value for the [word-break](https://www.w3.org/TR/2021/CRD-css-text-3-20210422/#word-break-property) property. +pub const WordBreak = css.DefineEnumProperty(@compileError(css.todo_stuff.depth)); + +/// A value for the [line-break](https://www.w3.org/TR/2021/CRD-css-text-3-20210422/#line-break-property) property. +pub const LineBreak = css.DefineEnumProperty(@compileError(css.todo_stuff.depth)); + +/// A value for the [hyphens](https://www.w3.org/TR/2021/CRD-css-text-3-20210422/#hyphenation) property. +pub const Hyphens = css.DefineEnumProperty(@compileError(css.todo_stuff.depth)); + +/// A value for the [overflow-wrap](https://www.w3.org/TR/2021/CRD-css-text-3-20210422/#overflow-wrap-property) property. +pub const OverflowWrap = css.DefineEnumProperty(@compileError(css.todo_stuff.depth)); + +/// A value for the [text-align](https://www.w3.org/TR/2021/CRD-css-text-3-20210422/#text-align-property) property. +pub const TextAlign = css.DefineEnumProperty(@compileError(css.todo_stuff.depth)); + +/// A value for the [text-align-last](https://www.w3.org/TR/2021/CRD-css-text-3-20210422/#text-align-last-property) property. +pub const TextAlignLast = css.DefineEnumProperty(@compileError(css.todo_stuff.depth)); + +/// A value for the [text-justify](https://www.w3.org/TR/2021/CRD-css-text-3-20210422/#text-justify-property) property. +pub const TextJustify = css.DefineEnumProperty(@compileError(css.todo_stuff.depth)); + +/// A value for the [word-spacing](https://www.w3.org/TR/2021/CRD-css-text-3-20210422/#word-spacing-property) +/// and [letter-spacing](https://www.w3.org/TR/2021/CRD-css-text-3-20210422/#letter-spacing-property) properties. +pub const Spacing = union(enum) { + /// No additional spacing is applied. + normal, + /// Additional spacing between each word or letter. + length: Length, +}; + +/// A value for the [text-indent](https://www.w3.org/TR/2021/CRD-css-text-3-20210422/#text-indent-property) property. +pub const TextIndent = struct { + /// The amount to indent. + value: LengthPercentage, + /// Inverts which lines are affected. + hanging: bool, + /// Affects the first line after each hard break. + each_line: bool, +}; + +/// A value for the [text-decoration-line](https://www.w3.org/TR/2020/WD-css-text-decor-4-20200506/#text-decoration-line-property) property. +/// +/// Multiple lines may be specified by combining the flags. +pub const TextDecorationLine = packed struct(u8) { + /// Each line of text is underlined. + underline: bool = false, + /// Each line of text has a line over it. + overline: bool = false, + /// Each line of text has a line through the middle. + line_through: bool = false, + /// The text blinks. + blink: bool = false, + /// The text is decorated as a spelling error. + spelling_error: bool = false, + /// The text is decorated as a grammar error. + grammar_error: bool = false, +}; + +/// A value for the [text-decoration-style](https://www.w3.org/TR/2020/WD-css-text-decor-4-20200506/#text-decoration-style-property) property. +pub const TextDecorationStyle = css.DefineEnumProperty(@compileError(css.todo_stuff.depth)); + +/// A value for the [text-decoration-thickness](https://www.w3.org/TR/2020/WD-css-text-decor-4-20200506/#text-decoration-width-property) property. +pub const TextDecorationThickness = union(enum) { + /// The UA chooses an appropriate thickness for text decoration lines. + auto, + /// Use the thickness defined in the current font. + from_font, + /// An explicit length. + length_percentage: LengthPercentage, +}; + +/// A value for the [text-decoration](https://www.w3.org/TR/2020/WD-css-text-decor-4-20200506/#text-decoration-property) shorthand property. +pub const TextDecoration = css.DefineEnumProperty(@compileError(css.todo_stuff.depth)); + +/// A value for the [text-decoration-skip-ink](https://www.w3.org/TR/2020/WD-css-text-decor-4-20200506/#text-decoration-skip-ink-property) property. +pub const TextDecorationSkipInk = css.DefineEnumProperty(@compileError(css.todo_stuff.depth)); + +/// A text emphasis shape for the [text-emphasis-style](https://www.w3.org/TR/2020/WD-css-text-decor-4-20200506/#text-emphasis-style-property) property. +/// +/// See [TextEmphasisStyle](TextEmphasisStyle). +pub const TextEmphasisStyle = css.DefineEnumProperty(@compileError(css.todo_stuff.depth)); + +/// A value for the [text-emphasis](https://www.w3.org/TR/2020/WD-css-text-decor-4-20200506/#text-emphasis-property) shorthand property. +pub const TextEmphasis = css.DefineEnumProperty(@compileError(css.todo_stuff.depth)); + +/// A value for the [text-emphasis-position](https://www.w3.org/TR/2020/WD-css-text-decor-4-20200506/#text-emphasis-position-property) property. +pub const TextEmphasisPosition = struct { + /// The vertical position. + vertical: TextEmphasisPositionVertical, + /// The horizontal position. + horizontal: TextEmphasisPositionHorizontal, +}; + +/// A vertical position keyword for the [text-emphasis-position](https://www.w3.org/TR/2020/WD-css-text-decor-4-20200506/#text-emphasis-position-property) property. +/// +/// See [TextEmphasisPosition](TextEmphasisPosition). +pub const TextEmphasisPositionVertical = css.DefineEnumProperty(@compileError(css.todo_stuff.depth)); + +/// A horizontal position keyword for the [text-emphasis-position](https://www.w3.org/TR/2020/WD-css-text-decor-4-20200506/#text-emphasis-position-property) property. +/// +/// See [TextEmphasisPosition](TextEmphasisPosition). +pub const TextEmphasisPositionHorizontal = css.DefineEnumProperty(@compileError(css.todo_stuff.depth)); + +/// A value for the [text-shadow](https://www.w3.org/TR/2020/WD-css-text-decor-4-20200506/#text-shadow-property) property. +pub const TextShadow = struct { + /// The color of the text shadow. + color: CssColor, + /// The x offset of the text shadow. + x_offset: Length, + /// The y offset of the text shadow. + y_offset: Length, + /// The blur radius of the text shadow. + blur: Length, + /// The spread distance of the text shadow. + spread: Length, // added in Level 4 spec + + pub fn parse(input: *css.Parser) css.Result(@This()) { + var color: ?CssColor = null; + const Lengths = struct { Length, Length, Length, Length }; + var lengths: ?Lengths = null; + + while (true) { + if (lengths == null) { + const value = input.tryParse(struct { + pub fn parseFn(i: *css.Parser) css.Result(Lengths) { + const horizontal = switch (Length.parse(i)) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + const vertical = switch (Length.parse(i)) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + const blur = i.tryParse(Length.parse, .{}).asValue() orelse Length.zero(); + const spread = i.tryParse(Length.parse, .{}).asValue() orelse Length.zero(); + return .{ .result = .{ horizontal, vertical, blur, spread } }; + } + }.parseFn, .{}); + + if (value.asValue()) |v| { + lengths = v; + continue; + } + } + + if (color == null) { + if (input.tryParse(CssColor.parse, .{}).asValue()) |value| { + color = value; + continue; + } + } + + break; + } + + const l = lengths orelse return .{ .err = input.newError(.qualified_rule_invalid) }; + return .{ + .result = .{ + .color = color orelse CssColor.current_color, + .x_offset = l[0], + .y_offset = l[1], + .blur = l[2], + .spread = l[3], + }, + }; + } + + pub fn toCss(this: *const @This(), comptime W: type, dest: *css.Printer(W)) css.PrintErr!void { + try this.x_offset.toCss(W, dest); + try dest.writeChar(' '); + try this.y_offset.toCss(W, dest); + + if (!this.blur.eql(&Length.zero()) or !this.spread.eql(&Length.zero())) { + try dest.writeChar(' '); + try this.blur.toCss(W, dest); + + if (!this.spread.eql(&Length.zero())) { + try dest.writeChar(' '); + try this.spread.toCss(W, dest); + } + } + + if (!this.color.eql(&CssColor{ .current_color = {} })) { + try dest.writeChar(' '); + try this.color.toCss(W, dest); + } + + return; + } + + pub fn isCompatible(this: *const @This(), browsers: css.targets.Browsers) bool { + return this.color.isCompatible(browsers) and + this.x_offset.isCompatible(browsers) and + this.y_offset.isCompatible(browsers) and + this.blur.isCompatible(browsers) and + this.spread.isCompatible(browsers); + } + + pub fn eql(this: *const @This(), other: *const @This()) bool { + return css.implementEql(@This(), this, other); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } +}; + +/// A value for the [text-size-adjust](https://w3c.github.io/csswg-drafts/css-size-adjust/#adjustment-control) property. +pub const TextSizeAdjust = union(enum) { + /// Use the default size adjustment when displaying on a small device. + auto, + /// No size adjustment when displaying on a small device. + none, + /// When displaying on a small device, the font size is multiplied by this percentage. + percentage: Percentage, +}; + +/// A value for the [direction](https://drafts.csswg.org/css-writing-modes-3/#direction) property. +pub const Direction = enum { + /// This value sets inline base direction (bidi directionality) to line-left-to-line-right. + ltr, + /// This value sets inline base direction (bidi directionality) to line-right-to-line-left. + rtl, + + pub usingnamespace css.DefineEnumProperty(@This()); +}; + +/// A value for the [unicode-bidi](https://drafts.csswg.org/css-writing-modes-3/#unicode-bidi) property. +pub const UnicodeBidi = css.DefineEnumProperty(@compileError(css.todo_stuff.depth)); + +/// A value for the [box-decoration-break](https://www.w3.org/TR/css-break-3/#break-decoration) property. +pub const BoxDecorationBreak = css.DefineEnumProperty(@compileError(css.todo_stuff.depth)); diff --git a/src/css/properties/transform.zig b/src/css/properties/transform.zig new file mode 100644 index 0000000000..576779ad30 --- /dev/null +++ b/src/css/properties/transform.zig @@ -0,0 +1,264 @@ +const std = @import("std"); +const bun = @import("root").bun; +const Allocator = std.mem.Allocator; +const ArrayList = std.ArrayListUnmanaged; + +pub const css = @import("../css_parser.zig"); + +const SmallList = css.SmallList; +const Printer = css.Printer; +const PrintErr = css.PrintErr; +const Result = css.Result; + +const ContainerName = css.css_rules.container.ContainerName; + +const LengthPercentage = css.css_values.length.LengthPercentage; +const CustomIdent = css.css_values.ident.CustomIdent; +const CSSString = css.css_values.string.CSSString; +const CSSNumber = css.css_values.number.CSSNumber; +const LengthPercentageOrAuto = css.css_values.length.LengthPercentageOrAuto; +const Size2D = css.css_values.size.Size2D; +const DashedIdent = css.css_values.ident.DashedIdent; +const Image = css.css_values.image.Image; +const CssColor = css.css_values.color.CssColor; +const Ratio = css.css_values.ratio.Ratio; +const Length = css.css_values.length.LengthValue; +const Rect = css.css_values.rect.Rect; +const NumberOrPercentage = css.css_values.percentage.NumberOrPercentage; +const CustomIdentList = css.css_values.ident.CustomIdentList; +const Angle = css.css_values.angle.Angle; +const Url = css.css_values.url.Url; +const Percentage = css.css_values.percentage.Percentage; + +const GenericBorder = css.css_properties.border.GenericBorder; +const LineStyle = css.css_properties.border.LineStyle; + +/// A value for the [transform](https://www.w3.org/TR/2019/CR-css-transforms-1-20190214/#propdef-transform) property. +pub const TransformList = struct { + v: ArrayList(Transform), + + pub fn parse(input: *css.Parser) Result(@This()) { + _ = input; // autofix + @panic(css.todo_stuff.depth); + } + + pub fn toCss(this: *const @This(), comptime W: type, dest: *Printer(W)) PrintErr!void { + _ = this; // autofix + _ = dest; // autofix + @panic(css.todo_stuff.depth); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } +}; + +/// An individual transform function (https://www.w3.org/TR/2019/CR-css-transforms-1-20190214/#two-d-transform-functions). +pub const Transform = union(enum) { + /// A 2D translation. + translate: struct { + x: LengthPercentage, + y: LengthPercentage, + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + }, + /// A translation in the X direction. + translate_x: LengthPercentage, + /// A translation in the Y direction. + translate_y: LengthPercentage, + /// A translation in the Z direction. + translate_z: Length, + /// A 3D translation. + translate_3d: struct { + x: LengthPercentage, + y: LengthPercentage, + z: Length, + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + }, + /// A 2D scale. + scale: struct { + x: NumberOrPercentage, + y: NumberOrPercentage, + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + }, + /// A scale in the X direction. + scale_x: NumberOrPercentage, + /// A scale in the Y direction. + scale_y: NumberOrPercentage, + /// A scale in the Z direction. + scale_z: NumberOrPercentage, + /// A 3D scale. + scale_3d: struct { + x: NumberOrPercentage, + y: NumberOrPercentage, + z: NumberOrPercentage, + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + }, + /// A 2D rotation. + rotate: Angle, + /// A rotation around the X axis. + rotate_x: Angle, + /// A rotation around the Y axis. + rotate_y: Angle, + /// A rotation around the Z axis. + rotate_z: Angle, + /// A 3D rotation. + rotate_3d: struct { + x: f32, + y: f32, + z: f32, + angle: Angle, + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + }, + /// A 2D skew. + skew: struct { + x: Angle, + y: Angle, + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + }, + /// A skew along the X axis. + skew_x: Angle, + /// A skew along the Y axis. + skew_y: Angle, + /// A perspective transform. + perspective: Length, + /// A 2D matrix transform. + matrix: Matrix(f32), + /// A 3D matrix transform. + matrix_3d: Matrix3d(f32), + + pub fn parse(input: *css.Parser) Result(Transform) { + _ = input; // autofix + @panic(css.todo_stuff.depth); + } + + pub fn toCss(this: *const @This(), comptime W: type, dest: *Printer(W)) PrintErr!void { + _ = this; // autofix + _ = dest; // autofix + @panic(css.todo_stuff.depth); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } +}; + +/// A 2D matrix. +pub fn Matrix(comptime T: type) type { + return struct { + a: T, + b: T, + c: T, + d: T, + e: T, + f: T, + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + }; +} + +/// A 3D matrix. +pub fn Matrix3d(comptime T: type) type { + return struct { + m11: T, + m12: T, + m13: T, + m14: T, + m21: T, + m22: T, + m23: T, + m24: T, + m31: T, + m32: T, + m33: T, + m34: T, + m41: T, + m42: T, + m43: T, + m44: T, + }; +} + +/// A value for the [transform-style](https://drafts.csswg.org/css-transforms-2/#transform-style-property) property. +pub const TransformStyle = css.DefineEnumProperty(@compileError(css.todo_stuff.depth)); + +/// A value for the [transform-box](https://drafts.csswg.org/css-transforms-1/#transform-box) property. +pub const TransformBox = css.DefineEnumProperty(@compileError(css.todo_stuff.depth)); + +/// A value for the [backface-visibility](https://drafts.csswg.org/css-transforms-2/#backface-visibility-property) property. +pub const BackfaceVisibility = css.DefineEnumProperty(@compileError(css.todo_stuff.depth)); + +/// A value for the perspective property. +pub const Perspective = union(enum) { + /// No perspective transform is applied. + none, + /// Distance to the center of projection. + length: Length, +}; + +/// A value for the [translate](https://drafts.csswg.org/css-transforms-2/#propdef-translate) property. +pub const Translate = union(enum) { + /// The "none" keyword. + none, + + /// The x, y, and z translations. + xyz: struct { + /// The x translation. + x: LengthPercentage, + /// The y translation. + y: LengthPercentage, + /// The z translation. + z: Length, + }, +}; + +/// A value for the [rotate](https://drafts.csswg.org/css-transforms-2/#propdef-rotate) property. +pub const Rotate = struct { + /// Rotation around the x axis. + x: f32, + /// Rotation around the y axis. + y: f32, + /// Rotation around the z axis. + z: f32, + /// The angle of rotation. + angle: Angle, +}; + +/// A value for the [scale](https://drafts.csswg.org/css-transforms-2/#propdef-scale) property. +pub const Scale = union(enum) { + /// The "none" keyword. + none, + + /// Scale on the x, y, and z axis. + xyz: struct { + /// Scale on the x axis. + x: NumberOrPercentage, + /// Scale on the y axis. + y: NumberOrPercentage, + /// Scale on the z axis. + z: NumberOrPercentage, + }, +}; diff --git a/src/css/properties/transition.zig b/src/css/properties/transition.zig new file mode 100644 index 0000000000..a44aa5cc11 --- /dev/null +++ b/src/css/properties/transition.zig @@ -0,0 +1,37 @@ +const std = @import("std"); +const bun = @import("root").bun; +const Allocator = std.mem.Allocator; +const ArrayList = std.ArrayListUnmanaged; + +pub const css = @import("../css_parser.zig"); + +const SmallList = css.SmallList; +const Printer = css.Printer; +const PrintErr = css.PrintErr; +const Error = css.Error; + +const ContainerName = css.css_rules.container.ContainerName; + +const LengthPercentage = css.css_values.length.LengthPercentage; +const CustomIdent = css.css_values.ident.CustomIdent; +const CSSString = css.css_values.string.CSSString; +const CSSNumber = css.css_values.number.CSSNumber; +const LengthPercentageOrAuto = css.css_values.length.LengthPercentageOrAuto; +const Size2D = css.css_values.size.Size2D; +const DashedIdent = css.css_values.ident.DashedIdent; +const Image = css.css_values.image.Image; +const CssColor = css.css_values.color.CssColor; +const Ratio = css.css_values.ratio.Ratio; +const Length = css.css_values.length.LengthValue; +const Rect = css.css_values.rect.Rect; +const NumberOrPercentage = css.css_values.percentage.NumberOrPercentage; +const CustomIdentList = css.css_values.ident.CustomIdentList; +const Angle = css.css_values.angle.Angle; +const Url = css.css_values.url.Url; +const Percentage = css.css_values.percentage.Percentage; + +const GenericBorder = css.css_properties.border.GenericBorder; +const LineStyle = css.css_properties.border.LineStyle; + +/// A value for the [transition](https://www.w3.org/TR/2018/WD-css-transitions-1-20181011/#transition-shorthand-property) property. +pub const Transition = @compileError(css.todo_stuff.depth); diff --git a/src/css/properties/ui.zig b/src/css/properties/ui.zig new file mode 100644 index 0000000000..58b7cec84d --- /dev/null +++ b/src/css/properties/ui.zig @@ -0,0 +1,109 @@ +const std = @import("std"); +const bun = @import("root").bun; +const Allocator = std.mem.Allocator; +const ArrayList = std.ArrayListUnmanaged; + +pub const css = @import("../css_parser.zig"); + +const SmallList = css.SmallList; +const Printer = css.Printer; +const PrintErr = css.PrintErr; +const Error = css.Error; + +const ContainerName = css.css_rules.container.ContainerName; + +const LengthPercentage = css.css_values.length.LengthPercentage; +const CustomIdent = css.css_values.ident.CustomIdent; +const CSSString = css.css_values.string.CSSString; +const CSSNumber = css.css_values.number.CSSNumber; +const LengthPercentageOrAuto = css.css_values.length.LengthPercentageOrAuto; +const Size2D = css.css_values.size.Size2D; +const DashedIdent = css.css_values.ident.DashedIdent; +const Image = css.css_values.image.Image; +const CssColor = css.css_values.color.CssColor; +const Ratio = css.css_values.ratio.Ratio; +const Length = css.css_values.length.LengthValue; +const Rect = css.css_values.rect.Rect; +const NumberOrPercentage = css.css_values.percentage.NumberOrPercentage; +const CustomIdentList = css.css_values.ident.CustomIdentList; +const Angle = css.css_values.angle.Angle; +const Url = css.css_values.url.Url; +const Percentage = css.css_values.percentage.Percentage; + +const GenericBorder = css.css_properties.border.GenericBorder; +const LineStyle = css.css_properties.border.LineStyle; + +/// A value for the [color-scheme](https://drafts.csswg.org/css-color-adjust/#color-scheme-prop) property. +pub const ColorScheme = packed struct(u8) { + /// Indicates that the element supports a light color scheme. + light: bool = false, + /// Indicates that the element supports a dark color scheme. + dark: bool = false, + /// Forbids the user agent from overriding the color scheme for the element. + only: bool = false, +}; + +/// A value for the [resize](https://www.w3.org/TR/2021/WD-css-ui-4-20210316/#resize) property. +pub const Resize = css.DefineEnumProperty(@compileError(css.todo_stuff.depth)); + +/// A value for the [cursor](https://www.w3.org/TR/2021/WD-css-ui-4-20210316/#cursor) property. +pub const Cursor = struct { + /// A list of cursor images. + images: SmallList(CursorImage), + /// A pre-defined cursor. + keyword: CursorKeyword, +}; + +/// A [cursor image](https://www.w3.org/TR/2021/WD-css-ui-4-20210316/#cursor) value, used in the `cursor` property. +/// +/// See [Cursor](Cursor). +pub const CursorImage = struct { + /// A url to the cursor image. + url: Url, + /// The location in the image where the mouse pointer appears. + hotspot: ?[2]CSSNumber, +}; + +/// A pre-defined [cursor](https://www.w3.org/TR/2021/WD-css-ui-4-20210316/#cursor) value, +/// used in the `cursor` property. +/// +/// See [Cursor](Cursor). +pub const CursorKeyword = css.DefineEnumProperty(@compileError(css.todo_stuff.depth)); + +/// A value for the [caret-color](https://www.w3.org/TR/2021/WD-css-ui-4-20210316/#caret-color) property. +pub const ColorOrAuto = union(enum) { + /// The `currentColor`, adjusted by the UA to ensure contrast against the background. + auto, + /// A color. + color: CssColor, +}; + +/// A value for the [caret-shape](https://www.w3.org/TR/2021/WD-css-ui-4-20210316/#caret-shape) property. +pub const CaretShape = css.DefineEnumProperty(@compileError(css.todo_stuff.depth)); + +/// A value for the [caret](https://www.w3.org/TR/2021/WD-css-ui-4-20210316/#caret) shorthand property. +pub const Caret = @compileError(css.todo_stuff.depth); + +/// A value for the [user-select](https://www.w3.org/TR/2021/WD-css-ui-4-20210316/#content-selection) property. +pub const UserSelect = css.DefineEnumProperty(@compileError(css.todo_stuff.depth)); + +/// A value for the [appearance](https://www.w3.org/TR/2021/WD-css-ui-4-20210316/#appearance-switching) property. +pub const Appearance = union(enum) { + none, + auto, + textfield, + menulist_button, + button, + checkbox, + listbox, + menulist, + meter, + progress_bar, + push_button, + radio, + searchfield, + slider_horizontal, + square_button, + textarea, + non_standard: []const u8, +}; diff --git a/src/css/rules/container.zig b/src/css/rules/container.zig new file mode 100644 index 0000000000..13a11ca966 --- /dev/null +++ b/src/css/rules/container.zig @@ -0,0 +1,355 @@ +const std = @import("std"); +pub const css = @import("../css_parser.zig"); +const bun = @import("root").bun; +const Result = css.Result; +const ArrayList = std.ArrayListUnmanaged; +const MediaList = css.MediaList; +const CustomMedia = css.CustomMedia; +const Printer = css.Printer; +const Maybe = css.Maybe; +const PrinterError = css.PrinterError; +const PrintErr = css.PrintErr; +const Location = css.css_rules.Location; +const CustomIdent = css.css_values.ident.CustomIdent; +const CustomIdentFns = css.css_values.ident.CustomIdentFns; +const QueryFeature = css.media_query.QueryFeature; +const QueryConditionFlags = css.media_query.QueryConditionFlags; +const Operator = css.media_query.Operator; + +pub const ContainerName = struct { + v: css.css_values.ident.CustomIdent, + pub fn parse(input: *css.Parser) Result(ContainerName) { + const ident = switch (CustomIdentFns.parse(input)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + + // todo_stuff.match_ignore_ascii_case; + if (bun.strings.eqlCaseInsensitiveASCIIICheckLength("none", ident.v) or + bun.strings.eqlCaseInsensitiveASCIIICheckLength("and", ident.v) or + bun.strings.eqlCaseInsensitiveASCIIICheckLength("not", ident.v) or + bun.strings.eqlCaseInsensitiveASCIIICheckLength("or", ident.v)) + return .{ .err = input.newUnexpectedTokenError(.{ .ident = ident.v }) }; + + return .{ .result = ContainerName{ .v = ident } }; + } + + const This = @This(); + + pub fn toCss(this: *const This, comptime W: type, dest: *Printer(W)) PrintErr!void { + return try CustomIdentFns.toCss(&this.v, W, dest); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } +}; + +pub const ContainerNameFns = ContainerName; +pub const ContainerSizeFeature = QueryFeature(ContainerSizeFeatureId); + +pub const ContainerSizeFeatureId = enum { + /// The [width](https://w3c.github.io/csswg-drafts/css-contain-3/#width) size container feature. + width, + /// The [height](https://w3c.github.io/csswg-drafts/css-contain-3/#height) size container feature. + height, + /// The [inline-size](https://w3c.github.io/csswg-drafts/css-contain-3/#inline-size) size container feature. + @"inline-size", + /// The [block-size](https://w3c.github.io/csswg-drafts/css-contain-3/#block-size) size container feature. + @"block-size", + /// The [aspect-ratio](https://w3c.github.io/csswg-drafts/css-contain-3/#aspect-ratio) size container feature. + @"aspect-ratio", + /// The [orientation](https://w3c.github.io/csswg-drafts/css-contain-3/#orientation) size container feature. + orientation, + + pub usingnamespace css.DeriveValueType(@This()); + + pub const ValueTypeMap = .{ + .width = css.MediaFeatureType.length, + .height = css.MediaFeatureType.length, + .@"inline-size" = css.MediaFeatureType.length, + .@"block-size" = css.MediaFeatureType.length, + .@"aspect-ratio" = css.MediaFeatureType.ratio, + .orientation = css.MediaFeatureType.ident, + }; + + pub fn asStr(this: *const @This()) []const u8 { + return css.enum_property_util.asStr(@This(), this); + } + + pub fn parse(input: *css.Parser) Result(@This()) { + return css.enum_property_util.parse(@This(), input); + } + + pub fn toCss(this: *const @This(), comptime W: type, dest: *Printer(W)) PrintErr!void { + return css.enum_property_util.toCss(@This(), this, W, dest); + } + + pub fn toCssWithPrefix(this: *const @This(), prefix: []const u8, comptime W: type, dest: *Printer(W)) PrintErr!void { + try dest.writeStr(prefix); + try this.toCss(W, dest); + } +}; + +/// Represents a style query within a container condition. +pub const StyleQuery = union(enum) { + /// A style feature, implicitly parenthesized. + feature: css.Property, + + /// A negation of a condition. + not: *StyleQuery, + + /// A set of joint operations. + operation: struct { + /// The operator for the conditions. + operator: css.media_query.Operator, + /// The conditions for the operator. + conditions: ArrayList(StyleQuery), + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + }, + + pub fn toCss(this: *const StyleQuery, comptime W: type, dest: *Printer(W)) PrintErr!void { + switch (this.*) { + .feature => |f| try f.toCss(W, dest, false), + .not => |c| { + try dest.writeStr("not "); + return try css.media_query.toCssWithParensIfNeeded( + c, + W, + dest, + c.needsParens(null, &dest.targets), + ); + }, + .operation => |op| return css.media_query.operationToCss( + StyleQuery, + op.operator, + &op.conditions, + W, + dest, + ), + } + } + + pub fn parseFeature(input: *css.Parser) Result(StyleQuery) { + const property_id = switch (css.PropertyId.parse(input)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + if (input.expectColon().asErr()) |e| return .{ .err = e }; + input.skipWhitespace(); + const opts = css.ParserOptions.default(input.allocator(), null); + const feature = .{ + .feature = switch (css.Property.parse( + property_id, + input, + &opts, + )) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }, + }; + _ = input.tryParse(css.parseImportant, .{}); + return .{ .result = feature }; + } + + pub fn createNegation(condition: *StyleQuery) StyleQuery { + return .{ .not = condition }; + } + + pub fn createOperation(operator: Operator, conditions: ArrayList(StyleQuery)) StyleQuery { + return .{ + .operation = .{ + .operator = operator, + .conditions = conditions, + }, + }; + } + + pub fn needsParens( + this: *const StyleQuery, + parent_operator: ?Operator, + _: *const css.Targets, + ) bool { + return switch (this.*) { + .not => true, + .operation => |op| op.operator == parent_operator, + .feature => true, + }; + } + + pub fn parseStyleQuery(input: *css.Parser) Result(@This()) { + return .{ .err = input.newErrorForNextToken() }; + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } +}; + +pub const ContainerCondition = union(enum) { + /// A size container feature, implicitly parenthesized. + feature: ContainerSizeFeature, + /// A negation of a condition. + not: *ContainerCondition, + /// A set of joint operations. + operation: struct { + /// The operator for the conditions. + operator: css.media_query.Operator, + /// The conditions for the operator. + conditions: ArrayList(ContainerCondition), + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + }, + /// A style query. + style: StyleQuery, + + const This = @This(); + + pub fn parse(input: *css.Parser) Result(ContainerCondition) { + return css.media_query.parseQueryCondition( + ContainerCondition, + input, + QueryConditionFlags{ + .allow_or = true, + .allow_style = true, + }, + ); + } + + pub fn toCss(this: *const This, comptime W: type, dest: *Printer(W)) PrintErr!void { + switch (this.*) { + .feature => |f| try f.toCss(W, dest), + .not => |c| { + try dest.writeStr("not "); + return try css.media_query.toCssWithParensIfNeeded( + c, + W, + dest, + c.needsParens(null, &dest.targets), + ); + }, + .operation => |op| try css.media_query.operationToCss(ContainerCondition, op.operator, &op.conditions, W, dest), + .style => |query| { + try dest.writeStr("style("); + try query.toCss(W, dest); + try dest.writeChar(')'); + }, + } + } + + pub fn parseFeature(input: *css.Parser) Result(ContainerCondition) { + const feature = switch (QueryFeature(ContainerSizeFeatureId).parse(input)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + return .{ .result = .{ .feature = feature } }; + } + + pub fn createNegation(condition: *ContainerCondition) ContainerCondition { + return .{ .not = condition }; + } + + pub fn createOperation(operator: Operator, conditions: ArrayList(ContainerCondition)) ContainerCondition { + return .{ + .operation = .{ + .operator = operator, + .conditions = conditions, + }, + }; + } + + pub fn parseStyleQuery(input: *css.Parser) Result(ContainerCondition) { + const Fns = struct { + pub inline fn adaptedParseQueryCondition(i: *css.Parser, flags: QueryConditionFlags) Result(StyleQuery) { + return css.media_query.parseQueryCondition(StyleQuery, i, flags); + } + + pub fn parseNestedBlockFn(_: void, i: *css.Parser) Result(ContainerCondition) { + if (i.tryParse( + @This().adaptedParseQueryCondition, + .{ + QueryConditionFlags{ .allow_or = true }, + }, + ).asValue()) |res| { + return .{ .result = .{ .style = res } }; + } + + return .{ .result = .{ + .style = switch (StyleQuery.parseFeature(i)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }, + } }; + } + }; + return input.parseNestedBlock(ContainerCondition, {}, Fns.parseNestedBlockFn); + } + + pub fn needsParens( + this: *const ContainerCondition, + parent_operator: ?Operator, + targets: *const css.Targets, + ) bool { + return switch (this.*) { + .not => true, + .operation => |op| op.operator == parent_operator, + .feature => |f| f.needsParens(parent_operator, targets), + .style => false, + }; + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } +}; + +/// A [@container](https://drafts.csswg.org/css-contain-3/#container-rule) rule. +pub fn ContainerRule(comptime R: type) type { + return struct { + /// The name of the container. + name: ?ContainerName, + /// The container condition. + condition: ContainerCondition, + /// The rules within the `@container` rule. + rules: css.CssRuleList(R), + /// The location of the rule in the source file. + loc: Location, + + const This = @This(); + + pub fn toCss(this: *const This, comptime W: type, dest: *Printer(W)) PrintErr!void { + // #[cfg(feature = "sourcemap")] + // dest.add_mapping(self.loc); + + try dest.writeStr("@container "); + if (this.name) |*name| { + try name.toCss(W, dest); + try dest.writeChar(' '); + } + + // Don't downlevel range syntax in container queries. + const exclude = dest.targets.exclude; + dest.targets.exclude.insert(css.targets.Features.media_queries); + try this.condition.toCss(W, dest); + dest.targets.exclude = exclude; + + try dest.whitespace(); + try dest.writeChar('{'); + dest.indent(); + try dest.newline(); + try this.rules.toCss(W, dest); + dest.dedent(); + try dest.newline(); + try dest.writeChar('}'); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) This { + return css.implementDeepClone(@This(), this, allocator); + } + }; +} diff --git a/src/css/rules/counter_style.zig b/src/css/rules/counter_style.zig new file mode 100644 index 0000000000..568aae137e --- /dev/null +++ b/src/css/rules/counter_style.zig @@ -0,0 +1,51 @@ +const std = @import("std"); +pub const css = @import("../css_parser.zig"); +const bun = @import("root").bun; +const Error = css.Error; +const ArrayList = std.ArrayListUnmanaged; +const MediaList = css.MediaList; +const CustomMedia = css.CustomMedia; +const Printer = css.Printer; +const Maybe = css.Maybe; +const PrinterError = css.PrinterError; +const PrintErr = css.PrintErr; +const Dependency = css.Dependency; +const dependencies = css.dependencies; +const Url = css.css_values.url.Url; +const Size2D = css.css_values.size.Size2D; +const fontprops = css.css_properties.font; +const LayerName = css.css_rules.layer.LayerName; +const Location = css.css_rules.Location; +const Angle = css.css_values.angle.Angle; +const FontStyleProperty = css.css_properties.font.FontStyle; +const FontFamily = css.css_properties.font.FontFamily; +const FontWeight = css.css_properties.font.FontWeight; +const FontStretch = css.css_properties.font.FontStretch; +const CustomProperty = css.css_properties.custom.CustomProperty; +const CustomPropertyName = css.css_properties.custom.CustomPropertyName; +const DashedIdent = css.css_values.ident.DashedIdent; + +/// A [@counter-style](https://drafts.csswg.org/css-counter-styles/#the-counter-style-rule) rule. +pub const CounterStyleRule = struct { + /// The name of the counter style to declare. + name: css.css_values.ident.CustomIdent, + /// Declarations in the `@counter-style` rule. + declarations: css.DeclarationBlock, + /// The location of the rule in the source file. + loc: Location, + + const This = @This(); + + pub fn toCss(this: *const This, comptime W: type, dest: *Printer(W)) PrintErr!void { + // #[cfg(feature = "sourcemap")] + // dest.add_mapping(self.loc); + + try dest.writeStr("@counter-style"); + try css.css_values.ident.CustomIdentFns.toCss(&this.name, W, dest); + try this.declarations.toCssBlock(W, dest); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) This { + return css.implementDeepClone(@This(), this, allocator); + } +}; diff --git a/src/css/rules/custom_media.zig b/src/css/rules/custom_media.zig new file mode 100644 index 0000000000..cc0d7d363e --- /dev/null +++ b/src/css/rules/custom_media.zig @@ -0,0 +1,41 @@ +const std = @import("std"); +const Allocator = std.mem.Allocator; +const bun = @import("root").bun; +const logger = bun.logger; +const Log = logger.Log; + +pub const css = @import("../css_parser.zig"); +pub const css_values = @import("../values/values.zig"); +pub const Error = css.Error; +const Printer = css.Printer; +const PrintErr = css.PrintErr; + +/// A [@custom-media](https://drafts.csswg.org/mediaqueries-5/#custom-mq) rule. +pub const CustomMediaRule = struct { + /// The name of the declared media query. + name: css_values.ident.DashedIdent, + /// The media query to declare. + query: css.MediaList, + /// The location of the rule in the source file. + loc: css.Location, + + const This = @This(); + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) This { + return This{ + .name = this.name, + .query = this.query.deepClone(allocator), + .loc = this.loc, + }; + } + + pub fn toCss(this: *const This, comptime W: type, dest: *Printer(W)) PrintErr!void { + // #[cfg(feature = "sourcemap")] + // dest.add_mapping(self.loc); + try dest.writeStr("@custom-media "); + try css_values.ident.DashedIdentFns.toCss(&this.name, W, dest); + try dest.writeChar(' '); + try this.query.toCss(W, dest); + try dest.writeChar(';'); + } +}; diff --git a/src/css/rules/document.zig b/src/css/rules/document.zig new file mode 100644 index 0000000000..485aef4464 --- /dev/null +++ b/src/css/rules/document.zig @@ -0,0 +1,59 @@ +const std = @import("std"); +pub const css = @import("../css_parser.zig"); +const bun = @import("root").bun; +const Error = css.Error; +const ArrayList = std.ArrayListUnmanaged; +const MediaList = css.MediaList; +const CustomMedia = css.CustomMedia; +const Printer = css.Printer; +const Maybe = css.Maybe; +const PrinterError = css.PrinterError; +const PrintErr = css.PrintErr; +const Dependency = css.Dependency; +const dependencies = css.dependencies; +const Url = css.css_values.url.Url; +const Size2D = css.css_values.size.Size2D; +const fontprops = css.css_properties.font; +const LayerName = css.css_rules.layer.LayerName; +const Location = css.css_rules.Location; +const Angle = css.css_values.angle.Angle; +const FontStyleProperty = css.css_properties.font.FontStyle; +const FontFamily = css.css_properties.font.FontFamily; +const FontWeight = css.css_properties.font.FontWeight; +const FontStretch = css.css_properties.font.FontStretch; +const CustomProperty = css.css_properties.custom.CustomProperty; +const CustomPropertyName = css.css_properties.custom.CustomPropertyName; +const DashedIdent = css.css_values.ident.DashedIdent; + +/// A [@-moz-document](https://www.w3.org/TR/2012/WD-css3-conditional-20120911/#at-document) rule. +/// +/// Note that only the `url-prefix()` function with no arguments is supported, and only the `-moz` prefix +/// is allowed since Firefox was the only browser that ever implemented this rule. +pub fn MozDocumentRule(comptime R: type) type { + return struct { + /// Nested rules within the `@-moz-document` rule. + rules: css.CssRuleList(R), + /// The location of the rule in the source file. + loc: Location, + + const This = @This(); + + pub fn toCss(this: *const This, comptime W: type, dest: *Printer(W)) PrintErr!void { + // #[cfg(feature = "sourcemap")] + // dest.add_mapping(self.loc); + try dest.writeStr("@-moz-document url-prefix()"); + try dest.whitespace(); + try dest.writeChar('{'); + dest.indent(); + try dest.newline(); + try this.rules.toCss(W, dest); + dest.dedent(); + try dest.newline(); + try dest.writeChar('}'); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) This { + return css.implementDeepClone(@This(), this, allocator); + } + }; +} diff --git a/src/css/rules/font_face.zig b/src/css/rules/font_face.zig new file mode 100644 index 0000000000..2867b2ca64 --- /dev/null +++ b/src/css/rules/font_face.zig @@ -0,0 +1,743 @@ +const std = @import("std"); +pub const css = @import("../css_parser.zig"); +const bun = @import("root").bun; +const ArrayList = std.ArrayListUnmanaged; +const MediaList = css.MediaList; +const CustomMedia = css.CustomMedia; +const Printer = css.Printer; +const Maybe = css.Maybe; +const PrinterError = css.PrinterError; +const PrintErr = css.PrintErr; +const Dependency = css.Dependency; +const dependencies = css.dependencies; +const Url = css.css_values.url.Url; +const Size2D = css.css_values.size.Size2D; +const fontprops = css.css_properties.font; +const LayerName = css.css_rules.layer.LayerName; +const SupportsCondition = css.css_rules.supports.SupportsCondition; +const Location = css.css_rules.Location; +const Angle = css.css_values.angle.Angle; +const FontStyleProperty = css.css_properties.font.FontStyle; +const FontFamily = css.css_properties.font.FontFamily; +const FontWeight = css.css_properties.font.FontWeight; +const FontStretch = css.css_properties.font.FontStretch; +const CustomProperty = css.css_properties.custom.CustomProperty; +const CustomPropertyName = css.css_properties.custom.CustomPropertyName; +const Result = css.Result; + +/// A property within an `@font-face` rule. +/// +/// See [FontFaceRule](FontFaceRule). +pub const FontFaceProperty = union(enum) { + /// The `src` property. + source: ArrayList(Source), + + /// The `font-family` property. + font_family: fontprops.FontFamily, + + /// The `font-style` property. + font_style: FontStyle, + + /// The `font-weight` property. + font_weight: Size2D(fontprops.FontWeight), + + /// The `font-stretch` property. + font_stretch: Size2D(fontprops.FontStretch), + + /// The `unicode-range` property. + unicode_range: ArrayList(UnicodeRange), + + /// An unknown or unsupported property. + custom: css.css_properties.custom.CustomProperty, + + const This = @This(); + + pub fn toCss(this: *const This, comptime W: type, dest: *Printer(W)) PrintErr!void { + const Helpers = struct { + pub fn writeProperty( + d: *Printer(W), + comptime prop: []const u8, + value: anytype, + comptime multi: bool, + ) PrintErr!void { + try d.writeStr(prop); + try d.delim(':', false); + if (comptime multi) { + const len = value.items.len; + for (value.items, 0..) |*val, idx| { + try val.toCss(W, d); + if (idx < len - 1) { + try d.delim(',', false); + } + } + } else { + try value.toCss(W, d); + } + } + }; + return switch (this.*) { + .source => |value| Helpers.writeProperty(dest, "src", value, true), + .font_family => |value| Helpers.writeProperty(dest, "font-family", value, false), + .font_style => |value| Helpers.writeProperty(dest, "font-style", value, false), + .font_weight => |value| Helpers.writeProperty(dest, "font-weight", value, false), + .font_stretch => |value| Helpers.writeProperty(dest, "font-stretch", value, false), + .unicode_range => |value| Helpers.writeProperty(dest, "unicode-range", value, true), + .custom => |custom| { + try dest.writeStr(this.custom.name.asStr()); + try dest.delim(':', false); + return custom.value.toCss(W, dest, true); + }, + }; + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) This { + return css.implementDeepClone(@This(), this, allocator); + } +}; + +/// A contiguous range of Unicode code points. +/// +/// Cannot be empty. Can represent a single code point when start == end. +pub const UnicodeRange = struct { + /// Inclusive start of the range. In [0, end]. + start: u32, + + /// Inclusive end of the range. In [0, 0x10FFFF]. + end: u32, + + pub fn toCss(this: *const UnicodeRange, comptime W: type, dest: *Printer(W)) PrintErr!void { + // Attempt to optimize the range to use question mark syntax. + if (this.start != this.end) { + // Find the first hex digit that differs between the start and end values. + var shift: u5 = 24; + var mask: u32 = @as(u32, 0xf) << shift; + while (shift > 0) { + const c1 = this.start & mask; + const c2 = this.end & mask; + if (c1 != c2) { + break; + } + + mask = mask >> 4; + shift -= 4; + } + + // Get the remainder of the value. This must be 0x0 to 0xf for the rest + // of the value to use the question mark syntax. + shift += 4; + const remainder_mask: u32 = (@as(u32, 1) << shift) - @as(u32, 1); + const start_remainder = this.start & remainder_mask; + const end_remainder = this.end & remainder_mask; + + if (start_remainder == 0 and end_remainder == remainder_mask) { + const start = (this.start & ~remainder_mask) >> shift; + if (start != 0) { + try dest.writeFmt("U+{x}", .{start}); + } else { + try dest.writeStr("U+"); + } + + while (shift > 0) { + try dest.writeChar('?'); + shift -= 4; + } + + return; + } + } + + try dest.writeFmt("U+{x}", .{this.start}); + if (this.end != this.start) { + try dest.writeFmt("-{x}", .{this.end}); + } + } + + /// https://drafts.csswg.org/css-syntax/#urange-syntax + pub fn parse(input: *css.Parser) Result(UnicodeRange) { + // = + // u '+' '?'* | + // u '?'* | + // u '?'* | + // u | + // u | + // u '+' '?'+ + + if (input.expectIdentMatching("u").asErr()) |e| return .{ .err = e }; + const after_u = input.position(); + if (parseTokens(input).asErr()) |e| return .{ .err = e }; + + // This deviates from the spec in case there are CSS comments + // between tokens in the middle of one , + // but oh well… + const concatenated_tokens = input.sliceFrom(after_u); + + const range = if (parseConcatenated(concatenated_tokens).asValue()) |range| + range + else + return .{ .err = input.newBasicUnexpectedTokenError(.{ .ident = concatenated_tokens }) }; + + if (range.end > 0x10FFFF or range.start > range.end) { + return .{ .err = input.newBasicUnexpectedTokenError(.{ .ident = concatenated_tokens }) }; + } + + return .{ .result = range }; + } + + fn parseTokens(input: *css.Parser) Result(void) { + const tok = switch (input.nextIncludingWhitespace()) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + switch (tok.*) { + .dimension => return parseQuestionMarks(input), + .number => { + const after_number = input.state(); + const token = switch (input.nextIncludingWhitespace()) { + .result => |vv| vv, + .err => { + input.reset(&after_number); + return .{ .result = {} }; + }, + }; + + if (token.* == .delim and token.delim == '?') return parseQuestionMarks(input); + if (token.* == .delim or token.* == .number) return .{ .result = {} }; + return .{ .result = {} }; + }, + .delim => |c| { + if (c == '+') { + const next = switch (input.nextIncludingWhitespace()) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + if (!(next.* == .ident or (next.* == .delim and next.delim == '?'))) { + return .{ .err = input.newBasicUnexpectedTokenError(next.*) }; + } + return parseQuestionMarks(input); + } + }, + else => {}, + } + return .{ .err = input.newBasicUnexpectedTokenError(tok.*) }; + } + + /// Consume as many '?' as possible + fn parseQuestionMarks(input: *css.Parser) Result(void) { + while (true) { + const start = input.state(); + if (input.nextIncludingWhitespace().asValue()) |tok| if (tok.* == .delim and tok.delim == '?') continue; + input.reset(&start); + return .{ .result = {} }; + } + } + + fn parseConcatenated(_text: []const u8) css.Maybe(UnicodeRange, void) { + var text = if (_text.len > 0 and _text[0] == '+') _text[1..] else { + return .{ .err = {} }; + }; + const first_hex_value, const hex_digit_count = consumeHex(&text); + const question_marks = consumeQuestionMarks(&text); + const consumed = hex_digit_count + question_marks; + + if (consumed == 0 or consumed > 6) { + return .{ .err = {} }; + } + + if (question_marks > 0) { + if (text.len == 0) return .{ .result = UnicodeRange{ + .start = first_hex_value << @intCast(question_marks * 4), + .end = ((first_hex_value + 1) << @intCast(question_marks * 4)) - 1, + } }; + } else if (text.len == 0) { + return .{ .result = UnicodeRange{ + .start = first_hex_value, + .end = first_hex_value, + } }; + } else { + if (text.len > 0 and text[0] == '-') { + text = text[1..]; + const second_hex_value, const hex_digit_count2 = consumeHex(&text); + if (hex_digit_count2 > 0 and hex_digit_count2 <= 6 and text.len == 0) { + return .{ .result = UnicodeRange{ + .start = first_hex_value, + .end = second_hex_value, + } }; + } + } + } + return .{ .err = {} }; + } + + fn consumeQuestionMarks(text: *[]const u8) usize { + var question_marks: usize = 0; + while (bun.strings.splitFirstWithExpected(text.*, '?')) |rest| { + question_marks += 1; + text.* = rest; + } + return question_marks; + } + + fn consumeHex(text: *[]const u8) struct { u32, usize } { + var value: u32 = 0; + var digits: usize = 0; + while (bun.strings.splitFirst(text.*)) |result| { + if (toHexDigit(result.first)) |digit_value| { + value = value * 0x10 + digit_value; + digits += 1; + text.* = result.rest; + } else { + break; + } + } + return .{ value, digits }; + } + + fn toHexDigit(b: u8) ?u32 { + var digit = @as(u32, b) -% @as(u32, '0'); + if (digit < 10) return digit; + // Force the 6th bit to be set to ensure ascii is lower case. + // digit = (@as(u32, b) | 0b10_0000).wrapping_sub('a' as u32).saturating_add(10); + digit = (@as(u32, b) | 0b10_0000) -% (@as(u32, 'a') +% 10); + return if (digit < 16) digit else null; + } +}; + +pub const FontStyle = union(enum) { + /// Normal font style. + normal, + + /// Italic font style. + italic, + + /// Oblique font style, with a custom angle. + oblique: Size2D(css.css_values.angle.Angle), + + pub fn parse(input: *css.Parser) Result(FontStyle) { + const property = switch (FontStyleProperty.parse(input)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + return .{ + .result = switch (property) { + .normal => .normal, + .italic => .italic, + .oblique => |angle| { + const second_angle = if (input.tryParse(css.css_values.angle.Angle.parse, .{}).asValue()) |a| a else angle; + return .{ .result = .{ + .oblique = .{ .a = angle, .b = second_angle }, + } }; + }, + }, + }; + } + + pub fn toCss(this: *const FontStyle, comptime W: type, dest: *Printer(W)) PrintErr!void { + switch (this.*) { + .normal => try dest.writeStr("normal"), + .italic => try dest.writeStr("italic"), + .oblique => |angle| { + try dest.writeStr("oblique"); + if (!angle.eql(&FontStyle.defaultObliqueAngle())) { + try dest.writeChar(' '); + try angle.toCss(W, dest); + } + }, + } + } + + fn defaultObliqueAngle() Size2D(Angle) { + return Size2D(Angle){ + .a = FontStyleProperty.defaultObliqueAngle(), + .b = FontStyleProperty.defaultObliqueAngle(), + }; + } +}; + +/// A font format keyword in the `format()` function of the +/// [src](https://drafts.csswg.org/css-fonts/#src-desc) +/// property of an `@font-face` rule. +pub const FontFormat = union(enum) { + /// A WOFF 1.0 font. + woff, + + /// A WOFF 2.0 font. + woff2, + + /// A TrueType font. + truetype, + + /// An OpenType font. + opentype, + + /// An Embedded OpenType (.eot) font. + embedded_opentype, + + /// OpenType Collection. + collection, + + /// An SVG font. + svg, + + /// An unknown format. + string: []const u8, + + pub fn parse(input: *css.Parser) Result(FontFormat) { + const s = switch (input.expectIdentOrString()) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + + if (bun.strings.eqlCaseInsensitiveASCIIICheckLength("woff", s)) { + return .{ .result = .woff }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength("woff2", s)) { + return .{ .result = .woff2 }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength("truetype", s)) { + return .{ .result = .truetype }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength("opentype", s)) { + return .{ .result = .opentype }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength("embedded-opentype", s)) { + return .{ .result = .embedded_opentype }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength("collection", s)) { + return .{ .result = .collection }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength("svg", s)) { + return .{ .result = .svg }; + } else { + return .{ .result = .{ .string = s } }; + } + } + + pub fn toCss(this: *const FontFormat, comptime W: type, dest: *Printer(W)) PrintErr!void { + // Browser support for keywords rather than strings is very limited. + // https://developer.mozilla.org/en-US/docs/Web/CSS/@font-face/src + switch (this.*) { + .woff => try dest.writeStr("woff"), + .woff2 => try dest.writeStr("woff2"), + .truetype => try dest.writeStr("truetype"), + .opentype => try dest.writeStr("opentype"), + .embedded_opentype => try dest.writeStr("embedded-opentype"), + .collection => try dest.writeStr("collection"), + .svg => try dest.writeStr("svg"), + .string => try dest.writeStr(this.string), + } + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } +}; + +/// A value for the [src](https://drafts.csswg.org/css-fonts/#src-desc) +/// property in an `@font-face` rule. +pub const Source = union(enum) { + /// A `url()` with optional format metadata. + url: UrlSource, + + /// The `local()` function. + local: fontprops.FontFamily, + + pub fn parse(input: *css.Parser) Result(Source) { + switch (input.tryParse(UrlSource.parse, .{})) { + .result => |url| .{ .result = return .{ .result = .{ .url = url } } }, + .err => |e| { + if (e.kind == .basic and e.kind.basic == .at_rule_body_invalid) { + return .{ .err = e }; + } + }, + } + + if (input.expectFunctionMatching("local").asErr()) |e| return .{ .err = e }; + + const Fn = struct { + pub fn parseNestedBlock(_: void, i: *css.Parser) Result(fontprops.FontFamily) { + return fontprops.FontFamily.parse(i); + } + }; + const local = switch (input.parseNestedBlock(fontprops.FontFamily, {}, Fn.parseNestedBlock)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + return .{ .result = .{ .local = local } }; + } + + pub fn toCss(this: *const Source, comptime W: type, dest: *Printer(W)) PrintErr!void { + switch (this.*) { + .url => try this.url.toCss(W, dest), + .local => { + try dest.writeStr("local("); + try this.local.toCss(W, dest); + try dest.writeChar(')'); + }, + } + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } +}; + +pub const FontTechnology = enum { + /// A font format keyword in the `format()` function of the + /// [src](https://drafts.csswg.org/css-fonts/#src-desc) + /// property of an `@font-face` rule. + /// A font features tech descriptor in the `tech()`function of the + /// [src](https://drafts.csswg.org/css-fonts/#font-features-tech-values) + /// property of an `@font-face` rule. + /// Supports OpenType Features. + /// https://docs.microsoft.com/en-us/typography/opentype/spec/featurelist + @"features-opentype", + + /// Supports Apple Advanced Typography Font Features. + /// https://developer.apple.com/fonts/TrueType-Reference-Manual/RM09/AppendixF.html + @"features-aat", + + /// Supports Graphite Table Format. + /// https://scripts.sil.org/cms/scripts/render_download.php?site_id=nrsi&format=file&media_id=GraphiteBinaryFormat_3_0&filename=GraphiteBinaryFormat_3_0.pdf + @"features-graphite", + + /// A color font tech descriptor in the `tech()`function of the + /// [src](https://drafts.csswg.org/css-fonts/#src-desc) + /// property of an `@font-face` rule. + /// Supports the `COLR` v0 table. + @"color-colrv0", + + /// Supports the `COLR` v1 table. + @"color-colrv1", + + /// Supports the `SVG` table. + @"color-svg", + + /// Supports the `sbix` table. + @"color-sbix", + + /// Supports the `CBDT` table. + @"color-cbdt", + + /// Supports Variations + /// The variations tech refers to the support of font variations + variations, + + /// Supports Palettes + /// The palettes tech refers to support for font palettes + palettes, + + /// Supports Incremental + /// The incremental tech refers to client support for incremental font loading, using either the range-request or the patch-subset method + incremental, + + pub fn asStr(this: *const @This()) []const u8 { + return css.enum_property_util.asStr(@This(), this); + } + + pub fn parse(input: *css.Parser) Result(@This()) { + return css.enum_property_util.parse(@This(), input); + } + + pub fn toCss(this: *const @This(), comptime W: type, dest: *Printer(W)) PrintErr!void { + return css.enum_property_util.toCss(@This(), this, W, dest); + } +}; + +/// A `url()` value for the [src](https://drafts.csswg.org/css-fonts/#src-desc) +/// property in an `@font-face` rule. +pub const UrlSource = struct { + /// The URL. + url: Url, + + /// Optional `format()` function. + format: ?FontFormat, + + /// Optional `tech()` function. + tech: ArrayList(FontTechnology), + + pub fn parse(input: *css.Parser) Result(UrlSource) { + const url = switch (Url.parse(input)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + + const format = if (input.tryParse(css.Parser.expectFunctionMatching, .{"format"}).isOk()) format: { + switch (input.parseNestedBlock(FontFormat, {}, css.voidWrap(FontFormat, FontFormat.parse))) { + .result => |vv| break :format vv, + .err => |e| return .{ .err = e }, + } + } else null; + + const tech = if (input.tryParse(css.Parser.expectFunctionMatching, .{"tech"}).isOk()) tech: { + const Fn = struct { + pub fn parseNestedBlockFn(_: void, i: *css.Parser) Result(ArrayList(FontTechnology)) { + return i.parseList(FontTechnology, FontTechnology.parse); + } + }; + break :tech switch (input.parseNestedBlock(ArrayList(FontTechnology), {}, Fn.parseNestedBlockFn)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + } else ArrayList(FontTechnology){}; + + return .{ + .result = UrlSource{ .url = url, .format = format, .tech = tech }, + }; + } + + pub fn toCss(this: *const UrlSource, comptime W: type, dest: *Printer(W)) PrintErr!void { + try this.url.toCss(W, dest); + if (this.format) |*format| { + try dest.whitespace(); + try dest.writeStr("format("); + try format.toCss(W, dest); + try dest.writeChar(')'); + } + + if (this.tech.items.len != 0) { + try dest.whitespace(); + try dest.writeStr("tech("); + try css.to_css.fromList(FontTechnology, &this.tech, W, dest); + try dest.writeChar(')'); + } + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } +}; + +/// A [@font-face](https://drafts.csswg.org/css-fonts/#font-face-rule) rule. +pub const FontFaceRule = struct { + /// Declarations in the `@font-face` rule. + properties: ArrayList(FontFaceProperty), + /// The location of the rule in the source file. + loc: Location, + + const This = @This(); + + pub fn toCss(this: *const This, comptime W: type, dest: *Printer(W)) PrintErr!void { + // #[cfg(feature = "sourcemap")] + // dest.add_mapping(self.loc); + + try dest.writeStr("@font-face"); + try dest.whitespace(); + try dest.writeChar('{'); + dest.indent(); + const len = this.properties.items.len; + for (this.properties.items, 0..) |*prop, i| { + try dest.newline(); + try prop.toCss(W, dest); + if (i != len - 1 or !dest.minify) { + try dest.writeChar(';'); + } + } + dest.dedent(); + try dest.newline(); + try dest.writeChar('}'); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) This { + return css.implementDeepClone(@This(), this, allocator); + } +}; + +pub const FontFaceDeclarationParser = struct { + const This = @This(); + + pub const AtRuleParser = struct { + pub const Prelude = void; + pub const AtRule = FontFaceProperty; + + pub fn parsePrelude(_: *This, name: []const u8, input: *css.Parser) Result(Prelude) { + return .{ + .err = input.newError(css.BasicParseErrorKind{ .at_rule_invalid = name }), + }; + } + + pub fn parseBlock(_: *This, _: Prelude, _: *const css.ParserState, input: *css.Parser) Result(AtRule) { + return .{ .err = input.newError(css.BasicParseErrorKind{ .at_rule_body_invalid = {} }) }; + } + + pub fn ruleWithoutBlock(_: *This, _: Prelude, _: *const css.ParserState) css.Maybe(AtRule, void) { + return .{ .err = {} }; + } + }; + + pub const QualifiedRuleParser = struct { + pub const Prelude = void; + pub const QualifiedRule = FontFaceProperty; + + pub fn parsePrelude(_: *This, input: *css.Parser) Result(Prelude) { + return .{ .err = input.newError(css.BasicParseErrorKind{ .qualified_rule_invalid = {} }) }; + } + + pub fn parseBlock(_: *This, _: Prelude, _: *const css.ParserState, input: *css.Parser) Result(QualifiedRule) { + return .{ .err = input.newError(css.BasicParseErrorKind.qualified_rule_invalid) }; + } + }; + + pub const DeclarationParser = struct { + pub const Declaration = FontFaceProperty; + + pub fn parseValue(this: *This, name: []const u8, input: *css.Parser) Result(Declaration) { + _ = this; // autofix + const state = input.state(); + // todo_stuff.match_ignore_ascii_case + if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "src")) { + if (input.parseCommaSeparated(Source, Source.parse).asValue()) |sources| { + return .{ .result = .{ .source = sources } }; + } + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "font-family")) { + if (FontFamily.parse(input).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .font_family = c } }; + } + } + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "font-weight")) { + if (Size2D(FontWeight).parse(input).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .font_weight = c } }; + } + } + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "font-style")) { + if (FontStyle.parse(input).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .font_style = c } }; + } + } + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "font-stretch")) { + if (Size2D(FontStretch).parse(input).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .font_stretch = c } }; + } + } + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "unicode-renage")) { + if (input.parseList(UnicodeRange, UnicodeRange.parse).asValue()) |c| { + if (input.expectExhausted().isOk()) { + return .{ .result = .{ .unicode_range = c } }; + } + } + } else { + // + } + + input.reset(&state); + const opts = css.ParserOptions.default(input.allocator(), null); + return .{ + .result = .{ + .custom = switch (CustomProperty.parse(CustomPropertyName.fromStr(name), input, &opts)) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }, + }, + }; + } + }; + + pub const RuleBodyItemParser = struct { + pub fn parseQualified(this: *This) bool { + _ = this; // autofix + return false; + } + + pub fn parseDeclarations(this: *This) bool { + _ = this; // autofix + return true; + } + }; +}; diff --git a/src/css/rules/font_palette_values.zig b/src/css/rules/font_palette_values.zig new file mode 100644 index 0000000000..d5f1eb0c1b --- /dev/null +++ b/src/css/rules/font_palette_values.zig @@ -0,0 +1,302 @@ +const std = @import("std"); +pub const css = @import("../css_parser.zig"); +const bun = @import("root").bun; +const Result = css.Result; +const ArrayList = std.ArrayListUnmanaged; +const MediaList = css.MediaList; +const CustomMedia = css.CustomMedia; +const Printer = css.Printer; +const Maybe = css.Maybe; +const PrinterError = css.PrinterError; +const PrintErr = css.PrintErr; +const Dependency = css.Dependency; +const dependencies = css.dependencies; +const Url = css.css_values.url.Url; +const Size2D = css.css_values.size.Size2D; +const fontprops = css.css_properties.font; +const LayerName = css.css_rules.layer.LayerName; +const SupportsCondition = css.css_rules.supports.SupportsCondition; +const Location = css.css_rules.Location; +const Angle = css.css_values.angle.Angle; +const CustomProperty = css.css_properties.custom.CustomProperty; +const CustomPropertyName = css.css_properties.custom.CustomPropertyName; +const DashedIdent = css.css_values.ident.DashedIdent; +const FontFamily = css.css_properties.font.FontFamily; + +/// A [@font-palette-values](https://drafts.csswg.org/css-fonts-4/#font-palette-values) rule. +pub const FontPaletteValuesRule = struct { + /// The name of the font palette. + name: css.css_values.ident.DashedIdent, + /// Declarations in the `@font-palette-values` rule. + properties: ArrayList(FontPaletteValuesProperty), + /// The location of the rule in the source file. + loc: Location, + + const This = @This(); + + pub fn parse(name: DashedIdent, input: *css.Parser, loc: Location) Result(FontPaletteValuesRule) { + var decl_parser = FontPaletteValuesDeclarationParser{}; + var parser = css.RuleBodyParser(FontPaletteValuesDeclarationParser).new(input, &decl_parser); + var properties = ArrayList(FontPaletteValuesProperty){}; + while (parser.next()) |result| { + if (result.asValue()) |decl| { + properties.append( + input.allocator(), + decl, + ) catch unreachable; + } + } + + return .{ .result = FontPaletteValuesRule{ + .name = name, + .properties = properties, + .loc = loc, + } }; + } + + pub fn toCss(this: *const This, comptime W: type, dest: *Printer(W)) PrintErr!void { + // #[cfg(feature = "sourcemap")] + // dest.add_mapping(self.loc); + + try dest.writeStr("@font-palette-values "); + try css.css_values.ident.DashedIdentFns.toCss(&this.name, W, dest); + try dest.whitespace(); + try dest.writeChar('{'); + dest.indent(); + const len = this.properties.items.len; + for (this.properties.items, 0..) |*prop, i| { + try dest.newline(); + try prop.toCss(W, dest); + if (i != len - 1 or !dest.minify) { + try dest.writeChar(';'); + } + } + dest.dedent(); + try dest.newline(); + try dest.writeChar('}'); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) This { + return css.implementDeepClone(@This(), this, allocator); + } +}; + +pub const FontPaletteValuesProperty = union(enum) { + /// The `font-family` property. + font_family: fontprops.FontFamily, + + /// The `base-palette` property. + base_palette: BasePalette, + + /// The `override-colors` property. + override_colors: ArrayList(OverrideColors), + + /// An unknown or unsupported property. + custom: css.css_properties.custom.CustomProperty, + + /// A property within an `@font-palette-values` rule. + /// + /// See [FontPaletteValuesRule](FontPaletteValuesRule). + const This = @This(); + + pub fn toCss(this: *const This, comptime W: type, dest: *Printer(W)) PrintErr!void { + switch (this.*) { + .font_family => |*f| { + try dest.writeStr("font-family"); + try dest.delim(':', false); + try f.toCss(W, dest); + }, + .base_palette => |*b| { + try dest.writeStr("base-palette"); + try dest.delim(':', false); + try b.toCss(W, dest); + }, + .override_colors => |*o| { + try dest.writeStr("override-colors"); + try dest.delim(':', false); + try css.to_css.fromList(OverrideColors, o, W, dest); + }, + .custom => |*custom| { + try dest.writeStr(custom.name.asStr()); + try dest.delim(':', false); + try custom.value.toCss(W, dest, true); + }, + } + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } +}; + +/// A value for the [override-colors](https://drafts.csswg.org/css-fonts-4/#override-color) +/// property in an `@font-palette-values` rule. +pub const OverrideColors = struct { + /// The index of the color within the palette to override. + index: u16, + + /// The replacement color. + color: css.css_values.color.CssColor, + + pub fn parse(input: *css.Parser) Result(OverrideColors) { + const index = switch (css.CSSIntegerFns.parse(input)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + if (index < 0) return .{ .err = input.newCustomError(css.ParserError.invalid_value) }; + + const color = switch (css.CssColor.parse(input)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + if (color == .current_color) return .{ .err = input.newCustomError(css.ParserError.invalid_value) }; + + return .{ + .result = OverrideColors{ + .index = @intCast(index), + .color = color, + }, + }; + } + + pub fn toCss(this: *const OverrideColors, comptime W: type, dest: *Printer(W)) PrintErr!void { + try css.CSSIntegerFns.toCss(&@as(i32, @intCast(this.index)), W, dest); + try dest.writeChar(' '); + try this.color.toCss(W, dest); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } +}; + +/// A value for the [base-palette](https://drafts.csswg.org/css-fonts-4/#base-palette-desc) +/// property in an `@font-palette-values` rule. +pub const BasePalette = union(enum) { + /// A light color palette as defined within the font. + light, + + /// A dark color palette as defined within the font. + dark, + + /// A palette index within the font. + integer: u16, + + pub fn parse(input: *css.Parser) Result(BasePalette) { + if (input.tryParse(css.CSSIntegerFns.parse, .{}).asValue()) |i| { + if (i < 0) return .{ .err = input.newCustomError(css.ParserError.invalid_value) }; + return .{ .result = .{ .integer = @intCast(i) } }; + } + + const location = input.currentSourceLocation(); + const ident = switch (input.expectIdent()) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + if (bun.strings.eqlCaseInsensitiveASCIIICheckLength("light", ident)) { + return .{ .result = .light }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength("dark", ident)) { + return .{ .result = .dark }; + } else return .{ .err = location.newUnexpectedTokenError(.{ .ident = ident }) }; + } + + pub fn toCss(this: *const BasePalette, comptime W: type, dest: *Printer(W)) PrintErr!void { + switch (this.*) { + .light => try dest.writeStr("light"), + .dark => try dest.writeStr("dark"), + .integer => try css.CSSIntegerFns.toCss(&@as(i32, @intCast(this.integer)), W, dest), + } + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } +}; + +pub const FontPaletteValuesDeclarationParser = struct { + const This = @This(); + + pub const DeclarationParser = struct { + pub const Declaration = FontPaletteValuesProperty; + + pub fn parseValue(this: *This, name: []const u8, input: *css.Parser) Result(Declaration) { + _ = this; // autofix + const state = input.state(); + // todo_stuff.match_ignore_ascii_case + if (bun.strings.eqlCaseInsensitiveASCIIICheckLength("font-family", name)) { + // https://drafts.csswg.org/css-fonts-4/#font-family-2-desc + if (FontFamily.parse(input).asValue()) |font_family| { + if (font_family == .generic) { + return .{ .err = input.newCustomError(css.ParserError.invalid_declaration) }; + } + return .{ .result = .{ .font_family = font_family } }; + } + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength("base-palette", name)) { + // https://drafts.csswg.org/css-fonts-4/#base-palette-desc + if (BasePalette.parse(input).asValue()) |base_palette| { + return .{ .result = .{ .base_palette = base_palette } }; + } + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength("override-colors", name)) { + // https://drafts.csswg.org/css-fonts-4/#override-color + if (input.parseCommaSeparated(OverrideColors, OverrideColors.parse).asValue()) |override_colors| { + return .{ .result = .{ .override_colors = override_colors } }; + } + } else { + return .{ .err = input.newCustomError(css.ParserError.invalid_declaration) }; + } + + input.reset(&state); + const opts = css.ParserOptions.default(input.allocator(), null); + return .{ .result = .{ + .custom = switch (CustomProperty.parse( + CustomPropertyName.fromStr(name), + input, + &opts, + )) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }, + } }; + } + }; + + pub const RuleBodyItemParser = struct { + pub fn parseQualified(_: *This) bool { + return false; + } + + pub fn parseDeclarations(_: *This) bool { + return true; + } + }; + + pub const AtRuleParser = struct { + pub const Prelude = void; + pub const AtRule = FontPaletteValuesProperty; + + pub fn parsePrelude(_: *This, name: []const u8, input: *css.Parser) Result(Prelude) { + return .{ .err = input.newError(css.BasicParseErrorKind{ .at_rule_invalid = name }) }; + } + + pub fn parseBlock(_: *This, _: AtRuleParser.Prelude, _: *const css.ParserState, input: *css.Parser) Result(AtRuleParser.AtRule) { + return .{ .err = input.newError(css.BasicParseErrorKind.at_rule_body_invalid) }; + } + + pub fn ruleWithoutBlock(_: *This, _: AtRuleParser.Prelude, _: *const css.ParserState) css.Maybe(AtRuleParser.AtRule, void) { + return .{ .err = {} }; + } + }; + + pub const QualifiedRuleParser = struct { + pub const Prelude = void; + pub const QualifiedRule = FontPaletteValuesProperty; + + pub fn parsePrelude(_: *This, input: *css.Parser) Result(Prelude) { + return .{ .err = input.newError(css.BasicParseErrorKind.qualified_rule_invalid) }; + } + + pub fn parseBlock(_: *This, _: Prelude, _: *const css.ParserState, input: *css.Parser) Result(QualifiedRule) { + return .{ .err = input.newError(css.BasicParseErrorKind.qualified_rule_invalid) }; + } + }; +}; diff --git a/src/css/rules/import.zig b/src/css/rules/import.zig new file mode 100644 index 0000000000..7c42e67834 --- /dev/null +++ b/src/css/rules/import.zig @@ -0,0 +1,178 @@ +const std = @import("std"); +const bun = @import("root").bun; +pub const css = @import("../css_parser.zig"); +const Error = css.Error; +const ArrayList = std.ArrayListUnmanaged; +const MediaList = css.MediaList; +const CustomMedia = css.CustomMedia; +const Printer = css.Printer; +const Maybe = css.Maybe; +const PrinterError = css.PrinterError; +const PrintErr = css.PrintErr; +const Dependency = css.Dependency; +const dependencies = css.dependencies; +const Url = css.css_values.url.Url; +const Size2D = css.css_values.size.Size2D; +const fontprops = css.css_properties.font; +const LayerName = css.css_rules.layer.LayerName; +const SupportsCondition = css.css_rules.supports.SupportsCondition; +const Location = css.css_rules.Location; + +pub const ImportConditions = struct { + /// An optional cascade layer name, or `None` for an anonymous layer. + layer: ?struct { + /// PERF: null pointer optimizaiton, nullable + v: ?LayerName, + }, + + /// An optional `supports()` condition. + supports: ?SupportsCondition, + + /// A media query. + media: css.MediaList, + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) ImportConditions { + return ImportConditions{ + .layer = if (this.layer) |*l| if (l.v) |layer| .{ .v = layer.deepClone(allocator) } else .{ .v = null } else null, + .supports = if (this.supports) |*s| s.deepClone(allocator) else null, + .media = this.media.deepClone(allocator), + }; + } + + pub fn layersEql(lhs: *const @This(), rhs: *const @This()) bool { + if (lhs.layer) |ll| { + if (rhs.layer) |rl| { + if (ll.v) |lv| { + if (rl.v) |rv| { + return lv.eql(&rv); + } + return false; + } + return true; + } + return false; + } + return false; + } +}; + +/// A [@import](https://drafts.csswg.org/css-cascade/#at-import) rule. +pub const ImportRule = struct { + /// The url to import. + url: []const u8, + + /// An optional cascade layer name, or `None` for an anonymous layer. + layer: ?struct { + /// PERF: null pointer optimizaiton, nullable + v: ?LayerName, + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + }, + + /// An optional `supports()` condition. + supports: ?SupportsCondition, + + /// A media query. + media: css.MediaList, + + /// This is default initialized to 2^32 - 1 when parsing. + /// If we are bundling, this will be set to the index of the corresponding ImportRecord + /// created for this import rule. + import_record_idx: u32 = std.math.maxInt(u32), + + /// The location of the rule in the source file. + loc: Location, + + const This = @This(); + + pub fn fromUrl(url: []const u8) This { + return .{ + .url = url, + .layer = null, + .supports = null, + .media = MediaList{ .media_queries = .{} }, + .import_record_idx = std.math.maxInt(u32), + .loc = Location.dummy(), + }; + } + + pub fn fromConditionsAndUrl(url: []const u8, conditions: ImportConditions) This { + return ImportRule{ + .url = url, + .layer = if (conditions.layer) |layer| if (layer.v) |ly| .{ .v = ly } else .{ .v = null } else null, + .supports = conditions.supports, + .media = conditions.media, + .import_record_idx = std.math.maxInt(u32), + .loc = Location.dummy(), + }; + } + + pub fn conditionsOwned(this: *const This, allocator: std.mem.Allocator) ImportConditions { + return ImportConditions{ + .layer = if (this.layer) |*l| if (l.v) |layer| .{ .v = layer.deepClone(allocator) } else .{ .v = null } else null, + .supports = if (this.supports) |*s| s.deepClone(allocator) else null, + .media = this.media.deepClone(allocator), + }; + } + + pub fn hasConditions(this: *const This) bool { + return this.layer != null or this.supports != null or this.media.media_queries.items.len > 0; + } + + pub fn toCss(this: *const This, comptime W: type, dest: *Printer(W)) PrintErr!void { + const dep = if (dest.dependencies != null) dependencies.ImportDependency.new( + dest.allocator, + this, + dest.filename(), + ) else null; + + // #[cfg(feature = "sourcemap")] + // dest.add_mapping(self.loc); + + try dest.writeStr("@import "); + if (dep) |d| { + css.serializer.serializeString(d.placeholder, dest) catch return dest.addFmtError(); + + if (dest.dependencies) |*deps| { + deps.append( + dest.allocator, + Dependency{ .import = d }, + ) catch unreachable; + } + } else { + css.serializer.serializeString(this.url, dest) catch return dest.addFmtError(); + } + + if (this.layer) |*lyr| { + try dest.writeStr(" layer"); + if (lyr.v) |l| { + try dest.writeChar('('); + try l.toCss(W, dest); + try dest.writeChar(')'); + } + } + + if (this.supports) |*sup| { + try dest.writeStr(" supports"); + if (sup.* == .declaration) { + try sup.toCss(W, dest); + } else { + try dest.writeChar('('); + try sup.toCss(W, dest); + try dest.writeChar(')'); + } + } + + if (this.media.media_queries.items.len > 0) { + try dest.writeChar(' '); + try this.media.toCss(W, dest); + } + try dest.writeStr(";"); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) This { + return css.implementDeepClone(@This(), this, allocator); + } +}; diff --git a/src/css/rules/keyframes.zig b/src/css/rules/keyframes.zig new file mode 100644 index 0000000000..640683d41a --- /dev/null +++ b/src/css/rules/keyframes.zig @@ -0,0 +1,315 @@ +const std = @import("std"); +pub const css = @import("../css_parser.zig"); +const bun = @import("root").bun; +const ArrayList = std.ArrayListUnmanaged; +const MediaList = css.MediaList; +const CustomMedia = css.CustomMedia; +const Printer = css.Printer; +const Maybe = css.Maybe; +const PrinterError = css.PrinterError; +const PrintErr = css.PrintErr; +const Dependency = css.Dependency; +const dependencies = css.dependencies; +const Url = css.css_values.url.Url; +const Size2D = css.css_values.size.Size2D; +const fontprops = css.css_properties.font; +const LayerName = css.css_rules.layer.LayerName; +const SupportsCondition = css.css_rules.supports.SupportsCondition; +const Location = css.css_rules.Location; +const Result = css.Result; + +pub const KeyframesListParser = struct { + const This = @This(); + + pub const DeclarationParser = struct { + pub const Declaration = Keyframe; + + pub fn parseValue(_: *This, name: []const u8, input: *css.Parser) Result(Declaration) { + return .{ .err = input.newError(css.BasicParseErrorKind{ .unexpected_token = .{ .ident = name } }) }; + } + }; + + pub const RuleBodyItemParser = struct { + pub fn parseQualified(_: *This) bool { + return true; + } + + pub fn parseDeclarations(_: *This) bool { + return false; + } + }; + + pub const AtRuleParser = struct { + pub const Prelude = void; + pub const AtRule = Keyframe; + + pub fn parsePrelude(_: *This, name: []const u8, input: *css.Parser) Result(Prelude) { + return .{ .err = input.newError(css.BasicParseErrorKind{ .at_rule_invalid = name }) }; + } + + pub fn parseBlock(_: *This, _: AtRuleParser.Prelude, _: *const css.ParserState, input: *css.Parser) Result(AtRuleParser.AtRule) { + return .{ .err = input.newError(css.BasicParseErrorKind.at_rule_body_invalid) }; + } + + pub fn ruleWithoutBlock(_: *This, _: AtRuleParser.Prelude, _: *const css.ParserState) css.Maybe(AtRuleParser.AtRule, void) { + return .{ .err = {} }; + } + }; + + pub const QualifiedRuleParser = struct { + pub const Prelude = ArrayList(KeyframeSelector); + pub const QualifiedRule = Keyframe; + + pub fn parsePrelude(_: *This, input: *css.Parser) Result(Prelude) { + return input.parseCommaSeparated(KeyframeSelector, KeyframeSelector.parse); + } + + pub fn parseBlock(_: *This, prelude: Prelude, _: *const css.ParserState, input: *css.Parser) Result(QualifiedRule) { + // For now there are no options that apply within @keyframes + const options = css.ParserOptions.default(input.allocator(), null); + return .{ + .result = Keyframe{ + .selectors = prelude, + .declarations = switch (css.DeclarationBlock.parse(input, &options)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }, + }, + }; + } + }; +}; + +/// KeyframesName +pub const KeyframesName = union(enum) { + /// `` of a `@keyframes` name. + ident: css.css_values.ident.CustomIdent, + /// `` of a `@keyframes` name. + custom: []const u8, + + const This = @This(); + + pub fn HashMap(comptime V: type) type { + return std.ArrayHashMapUnmanaged(KeyframesName, V, struct { + pub fn hash(_: @This(), key: KeyframesName) u32 { + return switch (key) { + .ident => std.array_hash_map.hashString(key.ident.v), + .custom => std.array_hash_map.hashString(key.custom), + }; + } + + pub fn eql(_: @This(), a: KeyframesName, b: KeyframesName, _: usize) bool { + return switch (a) { + .ident => switch (b) { + .ident => bun.strings.eql(a.ident.v, b.ident.v), + .custom => false, + }, + .custom => switch (b) { + .ident => false, + .custom => bun.strings.eql(a.custom, b.custom), + }, + }; + } + }, false); + } + + pub fn parse(input: *css.Parser) Result(KeyframesName) { + switch (switch (input.next()) { + .result => |v| v.*, + .err => |e| return .{ .err = e }, + }) { + .ident => |s| { + // todo_stuff.match_ignore_ascii_case + // CSS-wide keywords without quotes throws an error. + if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(s, "none") or + bun.strings.eqlCaseInsensitiveASCIIICheckLength(s, "initial") or + bun.strings.eqlCaseInsensitiveASCIIICheckLength(s, "inherit") or + bun.strings.eqlCaseInsensitiveASCIIICheckLength(s, "unset") or + bun.strings.eqlCaseInsensitiveASCIIICheckLength(s, "default") or + bun.strings.eqlCaseInsensitiveASCIIICheckLength(s, "revert") or + bun.strings.eqlCaseInsensitiveASCIIICheckLength(s, "revert-layer")) + { + return .{ .err = input.newUnexpectedTokenError(.{ .ident = s }) }; + } else { + return .{ .result = .{ .ident = .{ .v = s } } }; + } + }, + .quoted_string => |s| return .{ .result = .{ .custom = s } }, + else => |t| { + return .{ .err = input.newUnexpectedTokenError(t) }; + }, + } + } + + pub fn toCss(this: *const This, comptime W: type, dest: *Printer(W)) PrintErr!void { + const css_module_aimation_enabled = if (dest.css_module) |css_module| css_module.config.animation else false; + + switch (this.*) { + .ident => |ident| { + try dest.writeIdent(ident.v, css_module_aimation_enabled); + }, + .custom => |s| { + // todo_stuff.match_ignore_ascii_case + // CSS-wide keywords and `none` cannot remove quotes. + if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(s, "none") or + bun.strings.eqlCaseInsensitiveASCIIICheckLength(s, "initial") or + bun.strings.eqlCaseInsensitiveASCIIICheckLength(s, "inherit") or + bun.strings.eqlCaseInsensitiveASCIIICheckLength(s, "unset") or + bun.strings.eqlCaseInsensitiveASCIIICheckLength(s, "default") or + bun.strings.eqlCaseInsensitiveASCIIICheckLength(s, "revert") or + bun.strings.eqlCaseInsensitiveASCIIICheckLength(s, "revert-layer")) + { + css.serializer.serializeString(s, dest) catch return dest.addFmtError(); + } else { + try dest.writeIdent(s, css_module_aimation_enabled); + } + }, + } + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } +}; + +pub const KeyframeSelector = union(enum) { + /// An explicit percentage. + percentage: css.css_values.percentage.Percentage, + /// The `from` keyword. Equivalent to 0%. + from, + /// The `to` keyword. Equivalent to 100%. + to, + + // TODO: implement this + pub usingnamespace css.DeriveParse(@This()); + + // pub fn parse(input: *css.Parser) Result(KeyframeSelector) { + // _ = input; // autofix + // @panic(css.todo_stuff.depth); + // } + + pub fn toCss(this: *const @This(), comptime W: type, dest: *Printer(W)) PrintErr!void { + switch (this.*) { + .percentage => |p| { + if (dest.minify and p.v == 1.0) { + try dest.writeStr("to"); + } else { + try p.toCss(W, dest); + } + }, + .from => { + if (dest.minify) { + try dest.writeStr("0%"); + } else { + try dest.writeStr("from"); + } + }, + .to => { + try dest.writeStr("to"); + }, + } + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } +}; + +/// An individual keyframe within an `@keyframes` rule. +/// +/// See [KeyframesRule](KeyframesRule). +pub const Keyframe = struct { + /// A list of keyframe selectors to associate with the declarations in this keyframe. + selectors: ArrayList(KeyframeSelector), + /// The declarations for this keyframe. + declarations: css.DeclarationBlock, + + const This = @This(); + + pub fn toCss(this: *const This, comptime W: type, dest: *Printer(W)) PrintErr!void { + var first = true; + for (this.selectors.items) |sel| { + if (!first) { + try dest.delim(',', false); + } + first = false; + try sel.toCss(W, dest); + } + + try this.declarations.toCssBlock(W, dest); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } +}; + +pub const KeyframesRule = struct { + /// The animation name. + /// = | + name: KeyframesName, + /// A list of keyframes in the animation. + keyframes: ArrayList(Keyframe), + /// A vendor prefix for the rule, e.g. `@-webkit-keyframes`. + vendor_prefix: css.VendorPrefix, + /// The location of the rule in the source file. + loc: Location, + + const This = @This(); + + pub fn toCss(this: *const This, comptime W: type, dest: *Printer(W)) PrintErr!void { + // #[cfg(feature = "sourcemap")] + // dest.add_mapping(self.loc); + + var first_rule = true; + + const PREFIXES = .{ "webkit", "moz", "ms", "o", "none" }; + + inline for (PREFIXES) |prefix_name| { + const prefix = css.VendorPrefix.fromName(prefix_name); + + if (this.vendor_prefix.contains(prefix)) { + if (first_rule) { + first_rule = false; + } else { + if (!dest.minify) { + try dest.writeChar('\n'); // no indent + } + try dest.newline(); + } + + try dest.writeChar('@'); + try prefix.toCss(W, dest); + try dest.writeStr("keyframes "); + try this.name.toCss(W, dest); + try dest.whitespace(); + try dest.writeChar('{'); + dest.indent(); + + var first = true; + for (this.keyframes.items) |*keyframe| { + if (first) { + first = false; + } else if (!dest.minify) { + try dest.writeChar('\n'); // no indent + } + try dest.newline(); + try keyframe.toCss(W, dest); + } + dest.dedent(); + try dest.newline(); + try dest.writeChar('}'); + } + } + } + + pub fn getFallbacks(this: *This, comptime T: type, targets: *const css.targets.Targets) []css.CssRule(T) { + _ = this; // autofix + _ = targets; // autofix + @panic(css.todo_stuff.depth); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) This { + return css.implementDeepClone(@This(), this, allocator); + } +}; diff --git a/src/css/rules/layer.zig b/src/css/rules/layer.zig new file mode 100644 index 0000000000..05c2d692a8 --- /dev/null +++ b/src/css/rules/layer.zig @@ -0,0 +1,186 @@ +const std = @import("std"); +const bun = @import("root").bun; +pub const css = @import("../css_parser.zig"); +const ArrayList = std.ArrayListUnmanaged; +const MediaList = css.MediaList; +const CustomMedia = css.CustomMedia; +const Printer = css.Printer; +const Maybe = css.Maybe; +const PrinterError = css.PrinterError; +const PrintErr = css.PrintErr; +const SupportsCondition = css.css_rules.supports.SupportsCondition; +const Location = css.css_rules.Location; +const Result = css.Result; + +// TODO: make this equivalent of SmallVec<[CowArcStr<'i>; 1] +pub const LayerName = struct { + v: css.SmallList([]const u8, 1) = .{}, + + pub fn HashMap(comptime V: type) type { + return std.ArrayHashMapUnmanaged(LayerName, V, struct { + pub fn hash(_: @This(), key: LayerName) u32 { + var hasher = std.hash.Wyhash.init(0); + for (key.v.items) |part| { + hasher.update(part); + } + return hasher.final(); + } + + pub fn eql(_: @This(), a: LayerName, b: LayerName, _: usize) bool { + if (a.v.len != b.v.len) return false; + for (a.v.items, 0..) |part, i| { + if (!bun.strings.eql(part, b.v.items[i])) return false; + } + return true; + } + }, false); + } + + pub fn deepClone(this: *const LayerName, allocator: std.mem.Allocator) LayerName { + return LayerName{ + .v = this.v.clone(allocator), + }; + } + + pub fn eql(lhs: *const LayerName, rhs: *const LayerName) bool { + if (lhs.v.len() != rhs.v.len()) return false; + for (lhs.v.slice(), 0..) |part, i| { + if (!bun.strings.eql(part, rhs.v.at(@intCast(i)).*)) return false; + } + return true; + } + + pub fn parse(input: *css.Parser) Result(LayerName) { + var parts: css.SmallList([]const u8, 1) = .{}; + const ident = switch (input.expectIdent()) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + parts.append( + input.allocator(), + ident, + ); + + while (true) { + const Fn = struct { + pub fn tryParseFn( + i: *css.Parser, + ) Result([]const u8) { + const name = name: { + out: { + const start_location = i.currentSourceLocation(); + const tok = switch (i.nextIncludingWhitespace()) { + .err => |e| return .{ .err = e }, + .result => |vvv| vvv, + }; + if (tok.* == .delim and tok.delim == '.') { + break :out; + } + return .{ .err = start_location.newBasicUnexpectedTokenError(tok.*) }; + } + + const start_location = i.currentSourceLocation(); + const tok = switch (i.nextIncludingWhitespace()) { + .err => |e| return .{ .err = e }, + .result => |vvv| vvv, + }; + if (tok.* == .ident) { + break :name tok.ident; + } + return .{ .err = start_location.newBasicUnexpectedTokenError(tok.*) }; + }; + return .{ .result = name }; + } + }; + + while (true) { + const name = switch (input.tryParse(Fn.tryParseFn, .{})) { + .err => break, + .result => |vvv| vvv, + }; + parts.append( + input.allocator(), + name, + ); + } + + return .{ .result = LayerName{ .v = parts } }; + } + } + + pub fn toCss(this: *const LayerName, comptime W: type, dest: *css.Printer(W)) css.PrintErr!void { + var first = true; + for (this.v.slice()) |name| { + if (first) { + first = false; + } else { + try dest.writeChar('.'); + } + + css.serializer.serializeIdentifier(name, dest) catch return dest.addFmtError(); + } + } +}; + +/// A [@layer block](https://drafts.csswg.org/css-cascade-5/#layer-block) rule. +pub fn LayerBlockRule(comptime R: type) type { + return struct { + /// PERF: null pointer optimizaiton, nullable + /// The name of the layer to declare, or `None` to declare an anonymous layer. + name: ?LayerName, + /// The rules within the `@layer` rule. + rules: css.CssRuleList(R), + /// The location of the rule in the source file. + loc: Location, + + const This = @This(); + + pub fn toCss(this: *const This, comptime W: type, dest: *Printer(W)) PrintErr!void { + // #[cfg(feature = "sourcemap")] + // dest.add_mapping(self.loc); + + try dest.writeStr("@layer"); + if (this.name) |*name| { + try dest.writeChar(' '); + try name.toCss(W, dest); + } + + try dest.whitespace(); + try dest.writeChar('{'); + dest.indent(); + try dest.newline(); + try this.rules.toCss(W, dest); + dest.dedent(); + try dest.newline(); + try dest.writeChar('}'); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) This { + return css.implementDeepClone(@This(), this, allocator); + } + }; +} + +/// A [@layer statement](https://drafts.csswg.org/css-cascade-5/#layer-empty) rule. +/// +/// See also [LayerBlockRule](LayerBlockRule). +pub const LayerStatementRule = struct { + /// The layer names to declare. + names: ArrayList(LayerName), + /// The location of the rule in the source file. + loc: Location, + + const This = @This(); + + pub fn toCss(this: *const This, comptime W: type, dest: *Printer(W)) PrintErr!void { + // #[cfg(feature = "sourcemap")] + // dest.add_mapping(self.loc); + try dest.writeStr("@layer "); + try css.to_css.fromList(LayerName, &this.names, W, dest); + try dest.writeChar(';'); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) This { + return css.implementDeepClone(@This(), this, allocator); + } +}; diff --git a/src/css/rules/media.zig b/src/css/rules/media.zig new file mode 100644 index 0000000000..7cbb5214ac --- /dev/null +++ b/src/css/rules/media.zig @@ -0,0 +1,58 @@ +const std = @import("std"); +pub const css = @import("../css_parser.zig"); +const bun = @import("root").bun; +const Error = css.Error; +const ArrayList = std.ArrayListUnmanaged; +const MediaList = css.MediaList; +const CustomMedia = css.CustomMedia; +const Printer = css.Printer; +const Maybe = css.Maybe; +const PrinterError = css.PrinterError; +const PrintErr = css.PrintErr; +const Location = css.css_rules.Location; +const style = css.css_rules.style; +const CssRuleList = css.CssRuleList; + +pub fn MediaRule(comptime R: type) type { + return struct { + /// The media query list. + query: css.MediaList, + /// The rules within the `@media` rule. + rules: css.CssRuleList(R), + /// The location of the rule in the source file. + loc: Location, + + const This = @This(); + + pub fn minify(this: *This, context: *css.MinifyContext, parent_is_unused: bool) css.MinifyErr!bool { + _ = this; // autofix + _ = context; // autofix + _ = parent_is_unused; // autofix + return false; + } + + pub fn toCss(this: *const This, comptime W: type, dest: *Printer(W)) PrintErr!void { + if (dest.minify and this.query.alwaysMatches()) { + try this.rules.toCss(W, dest); + return; + } + // #[cfg(feature = "sourcemap")] + // dest.addMapping(this.loc); + + try dest.writeStr("@media "); + try this.query.toCss(W, dest); + try dest.whitespace(); + try dest.writeChar('{'); + dest.indent(); + try dest.newline(); + try this.rules.toCss(W, dest); + dest.dedent(); + try dest.newline(); + return dest.writeChar('}'); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) This { + return css.implementDeepClone(@This(), this, allocator); + } + }; +} diff --git a/src/css/rules/namespace.zig b/src/css/rules/namespace.zig new file mode 100644 index 0000000000..30bdade77f --- /dev/null +++ b/src/css/rules/namespace.zig @@ -0,0 +1,41 @@ +const std = @import("std"); +const Allocator = std.mem.Allocator; +const bun = @import("root").bun; +const logger = bun.logger; +const Log = logger.Log; + +pub const css = @import("../css_parser.zig"); +pub const css_values = @import("../values/values.zig"); +pub const Error = css.Error; +const Printer = css.Printer; +const PrintErr = css.PrintErr; + +/// A [@namespace](https://drafts.csswg.org/css-namespaces/#declaration) rule. +pub const NamespaceRule = struct { + /// An optional namespace prefix to declare, or `None` to declare the default namespace. + prefix: ?css.Ident, + /// The url of the namespace. + url: css.CSSString, + /// The location of the rule in the source file. + loc: css.Location, + + const This = @This(); + + pub fn toCss(this: *const This, comptime W: type, dest: *Printer(W)) PrintErr!void { + // #[cfg(feature = "sourcemap")] + // dest.add_mapping(self.loc); + + try dest.writeStr("@namespace "); + if (this.prefix) |*prefix| { + try css.css_values.ident.IdentFns.toCss(prefix, W, dest); + try dest.writeChar(' '); + } + + try css.css_values.string.CSSStringFns.toCss(&this.url, W, dest); + try dest.writeChar(':'); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) This { + return css.implementDeepClone(@This(), this, allocator); + } +}; diff --git a/src/css/rules/nesting.zig b/src/css/rules/nesting.zig new file mode 100644 index 0000000000..9aceb97b51 --- /dev/null +++ b/src/css/rules/nesting.zig @@ -0,0 +1,38 @@ +const std = @import("std"); +pub const css = @import("../css_parser.zig"); +const bun = @import("root").bun; +const Error = css.Error; +const ArrayList = std.ArrayListUnmanaged; +const MediaList = css.MediaList; +const CustomMedia = css.CustomMedia; +const Printer = css.Printer; +const Maybe = css.Maybe; +const PrinterError = css.PrinterError; +const PrintErr = css.PrintErr; +const Location = css.css_rules.Location; +const style = css.css_rules.style; + +/// A [@nest](https://www.w3.org/TR/css-nesting-1/#at-nest) rule. +pub fn NestingRule(comptime R: type) type { + return struct { + /// The style rule that defines the selector and declarations for the `@nest` rule. + style: style.StyleRule(R), + /// The location of the rule in the source file. + loc: Location, + + const This = @This(); + + pub fn toCss(this: *const This, comptime W: type, dest: *Printer(W)) PrintErr!void { + // #[cfg(feature = "sourcemap")] + // dest.add_mapping(self.loc); + if (dest.context() == null) { + try dest.writeStr("@nest "); + } + return try this.style.toCss(W, dest); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) This { + return css.implementDeepClone(@This(), this, allocator); + } + }; +} diff --git a/src/css/rules/page.zig b/src/css/rules/page.zig new file mode 100644 index 0000000000..267c49f8c5 --- /dev/null +++ b/src/css/rules/page.zig @@ -0,0 +1,400 @@ +const std = @import("std"); +pub const css = @import("../css_parser.zig"); +const bun = @import("root").bun; +const Result = css.Result; +const ArrayList = std.ArrayListUnmanaged; +const MediaList = css.MediaList; +const CustomMedia = css.CustomMedia; +const Printer = css.Printer; +const Maybe = css.Maybe; +const PrinterError = css.PrinterError; +const PrintErr = css.PrintErr; +const Dependency = css.Dependency; +const dependencies = css.dependencies; +const Url = css.css_values.url.Url; +const Size2D = css.css_values.size.Size2D; +const fontprops = css.css_properties.font; +const LayerName = css.css_rules.layer.LayerName; +const SupportsCondition = css.css_rules.supports.SupportsCondition; +const Location = css.css_rules.Location; +const Angle = css.css_values.angle.Angle; +const FontStyleProperty = css.css_properties.font.FontStyle; +const FontFamily = css.css_properties.font.FontFamily; +const FontWeight = css.css_properties.font.FontWeight; +const FontStretch = css.css_properties.font.FontStretch; +const CustomProperty = css.css_properties.custom.CustomProperty; +const CustomPropertyName = css.css_properties.custom.CustomPropertyName; +const DashedIdent = css.css_values.ident.DashedIdent; + +/// A [page selector](https://www.w3.org/TR/css-page-3/#typedef-page-selector) +/// within a `@page` rule. +/// +/// Either a name or at least one pseudo class is required. +pub const PageSelector = struct { + /// An optional named page type. + name: ?[]const u8, + /// A list of page pseudo classes. + pseudo_classes: ArrayList(PagePseudoClass), + + pub fn parse(input: *css.Parser) Result(PageSelector) { + const name = if (input.tryParse(css.Parser.expectIdent, .{}).asValue()) |name| name else null; + var pseudo_classes = ArrayList(PagePseudoClass){}; + + while (true) { + // Whitespace is not allowed between pseudo classes + const state = input.state(); + if (switch (input.nextIncludingWhitespace()) { + .result => |tok| tok.* == .colon, + .err => |e| return .{ .err = e }, + }) { + pseudo_classes.append( + input.allocator(), + switch (PagePseudoClass.parse(input)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }, + ) catch bun.outOfMemory(); + } else { + input.reset(&state); + break; + } + } + + if (name == null and pseudo_classes.items.len == 0) { + return .{ .err = input.newCustomError(css.ParserError.invalid_page_selector) }; + } + + return .{ + .result = PageSelector{ + .name = name, + .pseudo_classes = pseudo_classes, + }, + }; + } + + const This = @This(); + + pub fn toCss(this: *const This, comptime W: type, dest: *Printer(W)) PrintErr!void { + if (this.name) |name| { + try dest.writeStr(name); + } + + for (this.pseudo_classes.items) |*pseudo| { + try dest.writeChar(':'); + try pseudo.toCss(W, dest); + } + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } +}; + +pub const PageMarginRule = struct { + /// The margin box identifier for this rule. + margin_box: PageMarginBox, + /// The declarations within the rule. + declarations: css.DeclarationBlock, + /// The location of the rule in the source file. + loc: Location, + + const This = @This(); + + pub fn toCss(this: *const This, comptime W: type, dest: *Printer(W)) PrintErr!void { + // #[cfg(feature = "sourcemap")] + // dest.add_mapping(self.loc); + + try dest.writeChar('@'); + try this.margin_box.toCss(W, dest); + try this.declarations.toCssBlock(W, dest); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } +}; + +/// A [@page](https://www.w3.org/TR/css-page-3/#at-page-rule) rule. +pub const PageRule = struct { + /// A list of page selectors. + selectors: ArrayList(PageSelector), + /// The declarations within the `@page` rule. + declarations: css.DeclarationBlock, + /// The nested margin rules. + rules: ArrayList(PageMarginRule), + /// The location of the rule in the source file. + loc: Location, + + pub fn parse(selectors: ArrayList(PageSelector), input: *css.Parser, loc: Location, options: *const css.ParserOptions) Result(PageRule) { + var declarations = css.DeclarationBlock{}; + var rules = ArrayList(PageMarginRule){}; + var rule_parser = PageRuleParser{ + .declarations = &declarations, + .rules = &rules, + .options = options, + }; + var parser = css.RuleBodyParser(PageRuleParser).new(input, &rule_parser); + + while (parser.next()) |decl| { + if (decl.asErr()) |e| { + if (parser.parser.options.error_recovery) { + parser.parser.options.warn(e); + continue; + } + + return .{ .err = e }; + } + } + + return .{ .result = PageRule{ + .selectors = selectors, + .declarations = declarations, + .rules = rules, + .loc = loc, + } }; + } + + const This = @This(); + + pub fn toCss(this: *const This, comptime W: type, dest: *Printer(W)) PrintErr!void { + // #[cfg(feature = "sourcemap")] + // dest.add_mapping(self.loc); + try dest.writeStr("@page"); + if (this.selectors.items.len >= 1) { + const firstsel = &this.selectors.items[0]; + // Space is only required if the first selector has a name. + if (!dest.minify and firstsel.name != null) { + try dest.writeChar(' '); + } + var first = true; + for (this.selectors.items) |selector| { + if (first) { + first = false; + } else { + try dest.delim(',', false); + } + try selector.toCss(W, dest); + } + } + + try dest.whitespace(); + try dest.writeChar('{'); + dest.indent(); + + var i: usize = 0; + const len = this.declarations.len() + this.rules.items.len; + + const DECLS = .{ "declarations", "important_declarations" }; + inline for (DECLS) |decl_field_name| { + const decls: *const ArrayList(css.Property) = &@field(this.declarations, decl_field_name); + const important = comptime std.mem.eql(u8, decl_field_name, "important_declarations"); + for (decls.items) |*decl| { + try dest.newline(); + try decl.toCss(W, dest, important); + if (i != len - 1 or !dest.minify) { + try dest.writeChar(';'); + } + i += 1; + } + } + + if (this.rules.items.len > 0) { + if (!dest.minify and this.declarations.len() > 0) { + try dest.writeChar('\n'); + } + try dest.newline(); + + var first = true; + for (this.rules.items) |*rule| { + if (first) { + first = false; + } else { + if (!dest.minify) { + try dest.writeChar('\n'); + } + try dest.newline(); + } + try rule.toCss(W, dest); + } + } + + dest.dedent(); + try dest.newline(); + try dest.writeChar('}'); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) This { + return css.implementDeepClone(@This(), this, allocator); + } +}; + +/// A page pseudo class within an `@page` selector. +/// +/// See [PageSelector](PageSelector). +pub const PagePseudoClass = enum { + /// The `:left` pseudo class. + left, + /// The `:right` pseudo class. + right, + /// The `:first` pseudo class. + first, + /// The `:last` pseudo class. + last, + /// The `:blank` pseudo class. + blank, + + pub fn asStr(this: *const @This()) []const u8 { + return css.enum_property_util.asStr(@This(), this); + } + + pub fn parse(input: *css.Parser) Result(@This()) { + return css.enum_property_util.parse(@This(), input); + } + + pub fn toCss(this: *const @This(), comptime W: type, dest: *Printer(W)) PrintErr!void { + return css.enum_property_util.toCss(@This(), this, W, dest); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } +}; + +/// A [page margin box](https://www.w3.org/TR/css-page-3/#margin-boxes). +pub const PageMarginBox = enum { + /// A fixed-size box defined by the intersection of the top and left margins of the page box. + @"top-left-corner", + /// A variable-width box filling the top page margin between the top-left-corner and top-center page-margin boxes. + @"top-left", + /// A variable-width box centered horizontally between the page’s left and right border edges and filling the + /// page top margin between the top-left and top-right page-margin boxes. + @"top-center", + /// A variable-width box filling the top page margin between the top-center and top-right-corner page-margin boxes. + @"top-right", + /// A fixed-size box defined by the intersection of the top and right margins of the page box. + @"top-right-corner", + /// A variable-height box filling the left page margin between the top-left-corner and left-middle page-margin boxes. + @"left-top", + /// A variable-height box centered vertically between the page’s top and bottom border edges and filling the + /// left page margin between the left-top and left-bottom page-margin boxes. + @"left-middle", + /// A variable-height box filling the left page margin between the left-middle and bottom-left-corner page-margin boxes. + @"left-bottom", + /// A variable-height box filling the right page margin between the top-right-corner and right-middle page-margin boxes. + @"right-top", + /// A variable-height box centered vertically between the page’s top and bottom border edges and filling the right + /// page margin between the right-top and right-bottom page-margin boxes. + @"right-middle", + /// A variable-height box filling the right page margin between the right-middle and bottom-right-corner page-margin boxes. + @"right-bottom", + /// A fixed-size box defined by the intersection of the bottom and left margins of the page box. + @"bottom-left-corner", + /// A variable-width box filling the bottom page margin between the bottom-left-corner and bottom-center page-margin boxes. + @"bottom-left", + /// A variable-width box centered horizontally between the page’s left and right border edges and filling the bottom + /// page margin between the bottom-left and bottom-right page-margin boxes. + @"bottom-center", + /// A variable-width box filling the bottom page margin between the bottom-center and bottom-right-corner page-margin boxes. + @"bottom-right", + /// A fixed-size box defined by the intersection of the bottom and right margins of the page box. + @"bottom-right-corner", + + pub fn asStr(this: *const @This()) []const u8 { + return css.enum_property_util.asStr(@This(), this); + } + + pub fn parse(input: *css.Parser) Result(@This()) { + return css.enum_property_util.parse(@This(), input); + } + + pub fn toCss(this: *const @This(), comptime W: type, dest: *Printer(W)) PrintErr!void { + return css.enum_property_util.toCss(@This(), this, W, dest); + } +}; + +pub const PageRuleParser = struct { + declarations: *css.DeclarationBlock, + rules: *ArrayList(PageMarginRule), + options: *const css.ParserOptions, + + const This = @This(); + + pub const DeclarationParser = struct { + pub const Declaration = void; + + pub fn parseValue(this: *This, name: []const u8, input: *css.Parser) Result(Declaration) { + return css.declaration.parse_declaration( + name, + input, + &this.declarations.declarations, + &this.declarations.important_declarations, + this.options, + ); + } + }; + + pub const RuleBodyItemParser = struct { + pub fn parseQualified(_: *This) bool { + return false; + } + + pub fn parseDeclarations(_: *This) bool { + return true; + } + }; + + pub const AtRuleParser = struct { + pub const Prelude = PageMarginBox; + pub const AtRule = void; + + pub fn parsePrelude(_: *This, name: []const u8, input: *css.Parser) Result(Prelude) { + const loc = input.currentSourceLocation(); + return switch (css.parse_utility.parseString( + input.allocator(), + PageMarginBox, + name, + PageMarginBox.parse, + )) { + .result => |v| return .{ .result = v }, + .err => { + return .{ .err = loc.newCustomError(css.ParserError{ .at_rule_invalid = name }) }; + }, + }; + } + + pub fn parseBlock(this: *This, prelude: AtRuleParser.Prelude, start: *const css.ParserState, input: *css.Parser) Result(AtRuleParser.AtRule) { + const loc = start.sourceLocation(); + const declarations = switch (css.DeclarationBlock.parse(input, this.options)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + this.rules.append(input.allocator(), PageMarginRule{ + .margin_box = prelude, + .declarations = declarations, + .loc = Location{ + .source_index = this.options.source_index, + .line = loc.line, + .column = loc.column, + }, + }) catch bun.outOfMemory(); + return Result(AtRuleParser.AtRule).success; + } + + pub fn ruleWithoutBlock(_: *This, _: AtRuleParser.Prelude, _: *const css.ParserState) css.Maybe(AtRuleParser.AtRule, void) { + return .{ .err = {} }; + } + }; + + pub const QualifiedRuleParser = struct { + pub const Prelude = void; + pub const QualifiedRule = void; + + pub fn parsePrelude(_: *This, input: *css.Parser) Result(Prelude) { + return .{ .err = input.newError(css.BasicParseErrorKind.qualified_rule_invalid) }; + } + + pub fn parseBlock(_: *This, _: Prelude, _: *const css.ParserState, input: *css.Parser) Result(QualifiedRule) { + return .{ .err = input.newError(css.BasicParseErrorKind.qualified_rule_invalid) }; + } + }; +}; diff --git a/src/css/rules/property.zig b/src/css/rules/property.zig new file mode 100644 index 0000000000..b3044d1836 --- /dev/null +++ b/src/css/rules/property.zig @@ -0,0 +1,229 @@ +const std = @import("std"); +pub const css = @import("../css_parser.zig"); +const bun = @import("root").bun; +const Result = css.Result; +const ArrayList = std.ArrayListUnmanaged; +const MediaList = css.MediaList; +const CustomMedia = css.CustomMedia; +const Printer = css.Printer; +const Maybe = css.Maybe; +const PrinterError = css.PrinterError; +const PrintErr = css.PrintErr; +const Location = css.css_rules.Location; +const style = css.css_rules.style; +const SyntaxString = css.css_values.syntax.SyntaxString; +const ParsedComponent = css.css_values.syntax.ParsedComponent; + +pub const PropertyRule = struct { + name: css.css_values.ident.DashedIdent, + syntax: SyntaxString, + inherits: bool, + initial_value: ?css.css_values.syntax.ParsedComponent, + loc: Location, + + pub fn parse(name: css.css_values.ident.DashedIdent, input: *css.Parser, loc: Location) Result(PropertyRule) { + var p = PropertyRuleDeclarationParser{ + .syntax = null, + .inherits = null, + .initial_value = null, + }; + + var decl_parser = css.RuleBodyParser(PropertyRuleDeclarationParser).new(input, &p); + while (decl_parser.next()) |decl| { + if (decl.asErr()) |e| { + return .{ .err = e }; + } + } + + // `syntax` and `inherits` are always required. + const parser = decl_parser.parser; + // TODO(zack): source clones these two, but I omitted here becaues it seems 100% unnecessary + const syntax: SyntaxString = parser.syntax orelse return .{ .err = decl_parser.input.newCustomError(css.ParserError.at_rule_body_invalid) }; + const inherits: bool = parser.inherits orelse return .{ .err = decl_parser.input.newCustomError(css.ParserError.at_rule_body_invalid) }; + + // `initial-value` is required unless the syntax is a universal definition. + const initial_value = switch (syntax) { + .universal => if (parser.initial_value) |val| brk: { + var i = css.ParserInput.new(input.allocator(), val); + var p2 = css.Parser.new(&i, null); + + if (p2.isExhausted()) { + break :brk ParsedComponent{ + .token_list = css.TokenList{ + .v = .{}, + }, + }; + } + break :brk switch (syntax.parseValue(&p2)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + } else null, + else => brk: { + const val = parser.initial_value orelse return .{ .err = input.newCustomError(css.ParserError.at_rule_body_invalid) }; + var i = css.ParserInput.new(input.allocator(), val); + var p2 = css.Parser.new(&i, null); + break :brk switch (syntax.parseValue(&p2)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + }, + }; + + return .{ + .result = PropertyRule{ + .name = name, + .syntax = syntax, + .inherits = inherits, + .initial_value = initial_value, + .loc = loc, + }, + }; + } + + const This = @This(); + + pub fn toCss(this: *const This, comptime W: type, dest: *Printer(W)) PrintErr!void { + // #[cfg(feature = "sourcemap")] + // dest.add_mapping(self.loc); + + try dest.writeStr("@property "); + try css.css_values.ident.DashedIdentFns.toCss(&this.name, W, dest); + try dest.whitespace(); + try dest.writeChar('{'); + dest.indent(); + try dest.newline(); + + try dest.writeStr("syntax:"); + try dest.whitespace(); + try this.syntax.toCss(W, dest); + try dest.writeChar(';'); + try dest.newline(); + + try dest.writeStr("inherits:"); + try dest.whitespace(); + if (this.inherits) { + try dest.writeStr("true"); + } else { + try dest.writeStr("false"); + } + + if (this.initial_value) |*initial_value| { + try dest.writeChar(';'); + try dest.newline(); + + try dest.writeStr("initial-value:"); + try dest.whitespace(); + try initial_value.toCss(W, dest); + + if (!dest.minify) { + try dest.writeChar(';'); + } + } + + dest.dedent(); + try dest.newline(); + try dest.writeChar(';'); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) This { + return css.implementDeepClone(@This(), this, allocator); + } +}; + +pub const PropertyRuleDeclarationParser = struct { + syntax: ?SyntaxString, + inherits: ?bool, + initial_value: ?[]const u8, + + const This = @This(); + + pub const DeclarationParser = struct { + pub const Declaration = void; + const Map = bun.ComptimeStringMap(std.meta.FieldEnum(PropertyRuleDeclarationParser), .{ + .{ "syntax", .syntax }, + .{ "inherits", .inherits }, + .{ "initial-value", .initial_value }, + }); + + pub fn parseValue(this: *This, name: []const u8, input: *css.Parser) Result(Declaration) { + // todo_stuff.match_ignore_ascii_case + + // if (Map.getASCIIICaseInsensitive( + // name)) |field| { + // return switch (field) { + // .syntax => |syntax| { + + if (bun.strings.eqlCaseInsensitiveASCIIICheckLength("syntax", name)) { + const syntax = switch (SyntaxString.parse(input)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + this.syntax = syntax; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength("inherits", name)) { + const location = input.currentSourceLocation(); + const ident = switch (input.expectIdent()) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + const inherits = if (bun.strings.eqlCaseInsensitiveASCIIICheckLength("true", ident)) + true + else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength("false", ident)) + false + else + return .{ .err = location.newUnexpectedTokenError(.{ .ident = ident }) }; + this.inherits = inherits; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength("initial-value", name)) { + // Buffer the value into a string. We will parse it later. + const start = input.position(); + while (input.next().isOk()) {} + const initial_value = input.sliceFrom(start); + this.initial_value = initial_value; + } else { + return .{ .err = input.newCustomError(css.ParserError.invalid_declaration) }; + } + + return .{ .result = {} }; + } + }; + + pub const RuleBodyItemParser = struct { + pub fn parseQualified(_: *This) bool { + return false; + } + + pub fn parseDeclarations(_: *This) bool { + return true; + } + }; + + pub const AtRuleParser = struct { + pub const Prelude = void; + pub const AtRule = void; + + pub fn parsePrelude(_: *This, name: []const u8, input: *css.Parser) Result(Prelude) { + return .{ .err = input.newError(css.BasicParseErrorKind{ .at_rule_invalid = name }) }; + } + + pub fn parseBlock(_: *This, _: AtRuleParser.Prelude, _: *const css.ParserState, input: *css.Parser) Result(AtRuleParser.AtRule) { + return .{ .err = input.newError(css.BasicParseErrorKind.at_rule_body_invalid) }; + } + + pub fn ruleWithoutBlock(_: *This, _: AtRuleParser.Prelude, _: *const css.ParserState) css.Maybe(AtRuleParser.AtRule, void) { + return .{ .err = {} }; + } + }; + + pub const QualifiedRuleParser = struct { + pub const Prelude = void; + pub const QualifiedRule = void; + + pub fn parsePrelude(_: *This, input: *css.Parser) Result(Prelude) { + return .{ .err = input.newError(css.BasicParseErrorKind.qualified_rule_invalid) }; + } + + pub fn parseBlock(_: *This, _: Prelude, _: *const css.ParserState, input: *css.Parser) Result(QualifiedRule) { + return .{ .err = input.newError(css.BasicParseErrorKind.qualified_rule_invalid) }; + } + }; +}; diff --git a/src/css/rules/rules.zig b/src/css/rules/rules.zig new file mode 100644 index 0000000000..a66ebd6cd2 --- /dev/null +++ b/src/css/rules/rules.zig @@ -0,0 +1,677 @@ +const std = @import("std"); +pub const css = @import("../css_parser.zig"); +const bun = @import("root").bun; + +const Error = css.Error; +const ArrayList = std.ArrayListUnmanaged; +const MediaList = css.MediaList; +const CustomMedia = css.CustomMedia; +const Printer = css.Printer; +const Maybe = css.Maybe; +const PrinterError = css.PrinterError; +const PrintErr = css.PrintErr; +const Dependency = css.Dependency; +const dependencies = css.dependencies; +const Url = css.css_values.url.Url; +const Size2D = css.css_values.size.Size2D; +const fontprops = css.css_properties.font; + +pub const import = @import("./import.zig"); +pub const layer = @import("./layer.zig"); +pub const style = @import("./style.zig"); +pub const keyframes = @import("./keyframes.zig"); +pub const font_face = @import("./font_face.zig"); +pub const font_palette_values = @import("./font_palette_values.zig"); +pub const page = @import("./page.zig"); +pub const supports = @import("./supports.zig"); +pub const counter_style = @import("./counter_style.zig"); +pub const custom_media = @import("./custom_media.zig"); +pub const namespace = @import("./namespace.zig"); +pub const unknown = @import("./unknown.zig"); +pub const document = @import("./document.zig"); +pub const nesting = @import("./nesting.zig"); +pub const viewport = @import("./viewport.zig"); +pub const property = @import("./property.zig"); +pub const container = @import("./container.zig"); +pub const scope = @import("./scope.zig"); +pub const media = @import("./media.zig"); +pub const starting_style = @import("./starting_style.zig"); + +pub const tailwind = @import("./tailwind.zig"); + +const debug = bun.Output.scoped(.CSS_MINIFY, false); + +pub fn CssRule(comptime Rule: type) type { + return union(enum) { + /// A `@media` rule. + media: media.MediaRule(Rule), + /// An `@import` rule. + import: import.ImportRule, + /// A style rule. + style: style.StyleRule(Rule), + /// A `@keyframes` rule. + keyframes: keyframes.KeyframesRule, + /// A `@font-face` rule. + font_face: font_face.FontFaceRule, + /// A `@font-palette-values` rule. + font_palette_values: font_palette_values.FontPaletteValuesRule, + /// A `@page` rule. + page: page.PageRule, + /// A `@supports` rule. + supports: supports.SupportsRule(Rule), + /// A `@counter-style` rule. + counter_style: counter_style.CounterStyleRule, + /// A `@namespace` rule. + namespace: namespace.NamespaceRule, + /// A `@-moz-document` rule. + moz_document: document.MozDocumentRule(Rule), + /// A `@nest` rule. + nesting: nesting.NestingRule(Rule), + /// A `@viewport` rule. + viewport: viewport.ViewportRule, + /// A `@custom-media` rule. + custom_media: CustomMedia, + /// A `@layer` statement rule. + layer_statement: layer.LayerStatementRule, + /// A `@layer` block rule. + layer_block: layer.LayerBlockRule(Rule), + /// A `@property` rule. + property: property.PropertyRule, + /// A `@container` rule. + container: container.ContainerRule(Rule), + /// A `@scope` rule. + scope: scope.ScopeRule(Rule), + /// A `@starting-style` rule. + starting_style: starting_style.StartingStyleRule(Rule), + /// A placeholder for a rule that was removed. + ignored, + /// An unknown at-rule. + unknown: unknown.UnknownAtRule, + /// A custom at-rule. + custom: Rule, + + const This = @This(); + + pub fn toCss(this: *const This, comptime W: type, dest: *Printer(W)) PrintErr!void { + return switch (this.*) { + .media => |x| x.toCss(W, dest), + .import => |x| x.toCss(W, dest), + .style => |x| x.toCss(W, dest), + .keyframes => |x| x.toCss(W, dest), + .font_face => |x| x.toCss(W, dest), + .font_palette_values => |x| x.toCss(W, dest), + .page => |x| x.toCss(W, dest), + .supports => |x| x.toCss(W, dest), + .counter_style => |x| x.toCss(W, dest), + .namespace => |x| x.toCss(W, dest), + .moz_document => |x| x.toCss(W, dest), + .nesting => |x| x.toCss(W, dest), + .viewport => |x| x.toCss(W, dest), + .custom_media => |x| x.toCss(W, dest), + .layer_statement => |x| x.toCss(W, dest), + .layer_block => |x| x.toCss(W, dest), + .property => |x| x.toCss(W, dest), + .starting_style => |x| x.toCss(W, dest), + .container => |x| x.toCss(W, dest), + .scope => |x| x.toCss(W, dest), + .unknown => |x| x.toCss(W, dest), + .custom => |x| x.toCss(W, dest) catch return dest.addFmtError(), + .ignored => {}, + }; + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) This { + return css.implementDeepClone(@This(), this, allocator); + } + }; +} + +pub fn CssRuleList(comptime AtRule: type) type { + return struct { + v: ArrayList(CssRule(AtRule)) = .{}, + + const This = @This(); + + pub fn minify(this: *This, context: *MinifyContext, parent_is_unused: bool) css.MinifyErr!void { + // var keyframe_rules: keyframes.KeyframesName.HashMap(usize) = .{}; + // _ = keyframe_rules; // autofix + // const layer_rules: layer.LayerName.HashMap(usize) = .{}; + // _ = layer_rules; // autofix + // const property_rules: css.css_values.ident.DashedIdent.HashMap(usize) = .{}; + // _ = property_rules; // autofix + var style_rules = StyleRuleKey(AtRule).HashMap(usize){}; + // _ = style_rules; // autofix + var rules = ArrayList(CssRule(AtRule)){}; + + for (this.v.items) |*rule| { + // NOTE Anytime you append to `rules` with this `rule`, you must set `moved_rule` to true. + var moved_rule = false; + defer if (moved_rule) { + // PERF calling deinit here might allow mimalloc to reuse the freed memory + rule.* = .ignored; + }; + + switch (rule.*) { + .keyframes => |*keyframez| { + _ = keyframez; // autofix + // if (context.unused_symbols.contains(switch (keyframez.name) { + // .ident => |ident| ident.v, + // .custom => |custom| custom, + // })) { + // continue; + // } + + // keyframez.minify(context); + + // // Merge @keyframes rules with the same name. + // if (keyframe_rules.get(keyframez.name)) |existing_idx| { + // if (existing_idx < rules.items.len and rules.items[existing_idx] == .keyframes) { + // var existing = &rules.items[existing_idx].keyframes; + // // If the existing rule has the same vendor prefixes, replace it with this rule. + // if (existing.vendor_prefix.eq(keyframez.vendor_prefix)) { + // existing.* = keyframez.clone(context.allocator); + // continue; + // } + // // Otherwise, if the keyframes are identical, merge the prefixes. + // if (existing.keyframes == keyframez.keyframes) { + // existing.vendor_prefix |= keyframez.vendor_prefix; + // existing.vendor_prefix = context.targets.prefixes(existing.vendor_prefix, css.prefixes.Feature.at_keyframes); + // continue; + // } + // } + // } + + // keyframez.vendor_prefix = context.targets.prefixes(keyframez.vendor_prefix, css.prefixes.Feature.at_keyframes); + // keyframe_rules.put(context.allocator, keyframez.name, rules.items.len) catch bun.outOfMemory(); + + // const fallbacks = keyframez.getFallbacks(AtRule, context.targets); + // moved_rule = true; + // rules.append(context.allocator, rule.*) catch bun.outOfMemory(); + // rules.appendSlice(context.allocator, fallbacks) catch bun.outOfMemory(); + // continue; + debug("TODO: KeyframesRule", .{}); + }, + .custom_media => { + if (context.custom_media != null) { + continue; + } + }, + .media => |*med| { + moved_rule = false; + if (rules.items.len > 0 and rules.items[rules.items.len - 1] == .media) { + var last_rule = &rules.items[rules.items.len - 1].media; + if (last_rule.query.eql(&med.query)) { + last_rule.rules.v.appendSlice(context.allocator, med.rules.v.items) catch bun.outOfMemory(); + _ = try last_rule.minify(context, parent_is_unused); + continue; + } + + if (try med.minify(context, parent_is_unused)) { + continue; + } + } + }, + .supports => |*supp| { + if (rules.items.len > 0 and rules.items[rules.items.len - 1] == .supports) { + var last_rule = &rules.items[rules.items.len - 1].supports; + if (last_rule.condition.eql(&supp.condition)) { + continue; + } + } + + try supp.minify(context, parent_is_unused); + if (supp.rules.v.items.len == 0) continue; + }, + .container => |*cont| { + _ = cont; // autofix + debug("TODO: ContainerRule", .{}); + }, + .layer_block => |*lay| { + _ = lay; // autofix + debug("TODO: LayerBlockRule", .{}); + }, + .layer_statement => |*lay| { + _ = lay; // autofix + debug("TODO: LayerStatementRule", .{}); + }, + .moz_document => |*doc| { + _ = doc; // autofix + debug("TODO: MozDocumentRule", .{}); + }, + .style => |*sty| { + const Selector = css.selector.Selector; + const SelectorList = css.selector.SelectorList; + const Component = css.selector.Component; + debug("Input style:\n Selectors: {}\n Decls: {}\n", .{ sty.selectors.debug(), sty.declarations.debug() }); + if (parent_is_unused or try sty.minify(context, parent_is_unused)) { + continue; + } + + // If some of the selectors in this rule are not compatible with the targets, + // we need to either wrap in :is() or split them into multiple rules. + var incompatible: css.SmallList(css.selector.parser.Selector, 1) = if (sty.selectors.v.len() > 1 and + context.targets.shouldCompileSelectors() and + !sty.isCompatible(context.targets.*)) + incompatible: { + debug("Making incompatible!\n", .{}); + // The :is() selector accepts a forgiving selector list, so use that if possible. + // Note that :is() does not allow pseudo elements, so we need to check for that. + // In addition, :is() takes the highest specificity of its arguments, so if the selectors + // have different weights, we need to split them into separate rules as well. + if (context.targets.isCompatible(css.compat.Feature.is_selector) and !sty.selectors.anyHasPseudoElement() and sty.selectors.specifitiesAllEqual()) { + const component = Component{ .is = sty.selectors.v.toOwnedSlice(context.allocator) }; + var list = css.SmallList(css.selector.parser.Selector, 1){}; + list.append(context.allocator, Selector.fromComponent(context.allocator, component)); + sty.selectors = SelectorList{ + .v = list, + }; + break :incompatible css.SmallList(Selector, 1){}; + } else { + // Otherwise, partition the selectors and keep the compatible ones in this rule. + // We will generate additional rules for incompatible selectors later. + var incompatible = css.SmallList(Selector, 1){}; + var i: u32 = 0; + while (i < sty.selectors.v.len()) { + if (css.selector.isCompatible(sty.selectors.v.slice()[i .. i + 1], context.targets.*)) { + i += 1; + } else { + // Move the selector to the incompatible list. + incompatible.append( + context.allocator, + sty.selectors.v.orderedRemove(i), + ); + } + } + break :incompatible incompatible; + } + } else .{}; + + sty.updatePrefix(context); + + // Attempt to merge the new rule with the last rule we added. + var merged = false; + const ZACK_REMOVE_THIS = false; + _ = ZACK_REMOVE_THIS; // autofix + if (rules.items.len > 0 and rules.items[rules.items.len - 1] == .style) { + const last_style_rule = &rules.items[rules.items.len - 1].style; + if (mergeStyleRules(AtRule, sty, last_style_rule, context)) { + // If that was successful, then the last rule has been updated to include the + // selectors/declarations of the new rule. This might mean that we can merge it + // with the previous rule, so continue trying while we have style rules available. + while (rules.items.len >= 2) { + const len = rules.items.len; + var a, var b = bun.splitAtMut(CssRule(AtRule), rules.items, len - 1); + if (b[0] == .style and a[len - 2] == .style) { + if (mergeStyleRules(AtRule, &b[0].style, &a[len - 2].style, context)) { + // If we were able to merge the last rule into the previous one, remove the last. + const popped = rules.pop(); + _ = popped; // autofix + // TODO: deinit? + // popped.deinit(contet.allocator); + continue; + } + } + // If we didn't see a style rule, or were unable to merge, stop. + break; + } + merged = true; + } + } + + // Create additional rules for logical properties, @supports overrides, and incompatible selectors. + const supps = context.handler_context.getSupportsRules(AtRule, sty); + const logical = context.handler_context.getAdditionalRules(AtRule, sty); + debug("LOGICAL: {d}\n", .{logical.items.len}); + const StyleRule = style.StyleRule(AtRule); + + const IncompatibleRuleEntry = struct { rule: StyleRule, supports: ArrayList(css.CssRule(AtRule)), logical: ArrayList(css.CssRule(AtRule)) }; + var incompatible_rules: css.SmallList(IncompatibleRuleEntry, 1) = incompatible_rules: { + var incompatible_rules = css.SmallList(IncompatibleRuleEntry, 1).initCapacity( + context.allocator, + incompatible.len(), + ); + + for (incompatible.slice_mut()) |sel| { + // Create a clone of the rule with only the one incompatible selector. + const list = SelectorList{ .v = css.SmallList(Selector, 1).withOne(sel) }; + var clone: StyleRule = .{ + .selectors = list, + .vendor_prefix = sty.vendor_prefix, + .declarations = sty.declarations.deepClone(context.allocator), + .rules = sty.rules.deepClone(context.allocator), + .loc = sty.loc, + }; + clone.updatePrefix(context); + + // Also add rules for logical properties and @supports overrides. + const s = context.handler_context.getSupportsRules(AtRule, &clone); + const l = context.handler_context.getAdditionalRules(AtRule, &clone); + incompatible_rules.append(context.allocator, IncompatibleRuleEntry{ + .rule = clone, + .supports = s, + .logical = l, + }); + } + + break :incompatible_rules incompatible_rules; + }; + debug("Incompatible rules: {d}\n", .{incompatible_rules.len()}); + defer incompatible.deinit(context.allocator); + defer incompatible_rules.deinit(context.allocator); + + context.handler_context.reset(); + + // If the rule has nested rules, and we have extra rules to insert such as for logical properties, + // we need to split the rule in two so we can insert the extra rules in between the declarations from + // the main rule and the nested rules. + const nested_rule: ?StyleRule = if (sty.rules.v.items.len > 0 and + // can happen if there are no compatible rules, above. + sty.selectors.v.len() > 0 and + (logical.items.len > 0 or supps.items.len > 0 or !incompatible_rules.isEmpty())) + brk: { + var rulesss: CssRuleList(AtRule) = .{}; + std.mem.swap(CssRuleList(AtRule), &sty.rules, &rulesss); + break :brk StyleRule{ + .selectors = sty.selectors.deepClone(context.allocator), + .declarations = css.DeclarationBlock{}, + .rules = rulesss, + .vendor_prefix = sty.vendor_prefix, + .loc = sty.loc, + }; + } else null; + + if (!merged and !sty.isEmpty()) { + const source_index = sty.loc.source_index; + const has_no_rules = sty.rules.v.items.len == 0; + const idx = rules.items.len; + + rules.append(context.allocator, rule.*) catch bun.outOfMemory(); + moved_rule = true; + + // Check if this rule is a duplicate of an earlier rule, meaning it has + // the same selectors and defines the same properties. If so, remove the + // earlier rule because this one completely overrides it. + if (has_no_rules) { + const key = StyleRuleKey(AtRule).new(&rules, idx); + if (idx > 0) { + if (style_rules.fetchSwapRemove(key)) |i_| { + const i = i_.value; + if (i < rules.items.len and rules.items[i] == .style) { + const other = &rules.items[i].style; + // Don't remove the rule if this is a CSS module and the other rule came from a different file. + if (!context.css_modules or source_index == other.loc.source_index) { + // Only mark the rule as ignored so we don't need to change all of the indices. + rules.items[i] = .ignored; + } + } + } + } + + style_rules.put(context.allocator, key, idx) catch bun.outOfMemory(); + } + } + + if (logical.items.len > 0) { + debug("Adding logical: {}\n", .{logical.items[0].style.selectors.debug()}); + var log = CssRuleList(AtRule){ .v = logical }; + try log.minify(context, parent_is_unused); + rules.appendSlice(context.allocator, log.v.items) catch bun.outOfMemory(); + } + rules.appendSlice(context.allocator, supps.items) catch bun.outOfMemory(); + for (incompatible_rules.slice_mut()) |incompatible_entry| { + if (!incompatible_entry.rule.isEmpty()) { + rules.append(context.allocator, .{ .style = incompatible_entry.rule }) catch bun.outOfMemory(); + } + if (incompatible_entry.logical.items.len > 0) { + var log = CssRuleList(AtRule){ .v = incompatible_entry.logical }; + try log.minify(context, parent_is_unused); + rules.appendSlice(context.allocator, log.v.items) catch bun.outOfMemory(); + } + rules.appendSlice(context.allocator, incompatible_entry.supports.items) catch bun.outOfMemory(); + } + if (nested_rule) |nested| { + rules.append(context.allocator, .{ .style = nested }) catch bun.outOfMemory(); + } + + continue; + }, + .counter_style => |*cntr| { + _ = cntr; // autofix + debug("TODO: CounterStyleRule", .{}); + }, + .scope => |*scpe| { + _ = scpe; // autofix + debug("TODO: ScopeRule", .{}); + }, + .nesting => |*nst| { + _ = nst; // autofix + debug("TODO: NestingRule", .{}); + }, + .starting_style => |*rl| { + _ = rl; // autofix + debug("TODO: StartingStyleRule", .{}); + }, + .font_palette_values => |*f| { + _ = f; // autofix + debug("TODO: FontPaletteValuesRule", .{}); + }, + .property => |*prop| { + _ = prop; // autofix + debug("TODO: PropertyRule", .{}); + }, + else => {}, + } + + rules.append(context.allocator, rule.*) catch bun.outOfMemory(); + } + + // MISSING SHIT HERE + + css.deepDeinit(CssRule(AtRule), context.allocator, &this.v); + this.v = rules; + return; + } + + pub fn toCss(this: *const This, comptime W: type, dest: *Printer(W)) PrintErr!void { + var first = true; + var last_without_block = false; + + for (this.v.items) |*rule| { + if (rule.* == .ignored) continue; + + // Skip @import rules if collecting dependencies. + if (rule.* == .import) { + if (dest.remove_imports) { + const dep = if (dest.dependencies != null) Dependency{ + .import = dependencies.ImportDependency.new(dest.allocator, &rule.import, dest.filename()), + } else null; + + if (dest.dependencies) |*deps| { + deps.append(dest.allocator, dep.?) catch unreachable; + continue; + } + } + } + + if (first) { + first = false; + } else { + if (!dest.minify and + !(last_without_block and + (rule.* == .import or rule.* == .namespace or rule.* == .layer_statement))) + { + try dest.writeChar('\n'); + } + try dest.newline(); + } + try rule.toCss(W, dest); + last_without_block = rule.* == .import or rule.* == .namespace or rule.* == .layer_statement; + } + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + }; +} + +pub const MinifyContext = struct { + /// NOTE: this should the same allocator the AST was allocated with + allocator: std.mem.Allocator, + targets: *const css.targets.Targets, + handler: *css.DeclarationHandler, + important_handler: *css.DeclarationHandler, + handler_context: css.PropertyHandlerContext, + unused_symbols: *const std.StringArrayHashMapUnmanaged(void), + custom_media: ?std.StringArrayHashMapUnmanaged(custom_media.CustomMediaRule), + css_modules: bool, + err: ?css.MinifyError = null, +}; + +pub const Location = struct { + /// The index of the source file within the source map. + source_index: u32, + /// The line number, starting at 0. + line: u32, + /// The column number within a line, starting at 1 for first the character of the line. + /// Column numbers are counted in UTF-16 code units. + column: u32, + + pub fn dummy() Location { + return .{ + .source_index = std.math.maxInt(u32), + .line = std.math.maxInt(u32), + .column = std.math.maxInt(u32), + }; + } +}; + +pub const StyleContext = struct { + selectors: *const css.SelectorList, + parent: ?*const StyleContext, +}; + +/// A key to a StyleRule meant for use in a HashMap for quickly detecting duplicates. +/// It stores a reference to a list and an index so it can access items without cloning +/// even when the list is reallocated. A hash is also pre-computed for fast lookups. +pub fn StyleRuleKey(comptime R: type) type { + return struct { + list: *const ArrayList(CssRule(R)), + index: usize, + // TODO: store in the hashmap by setting `store_hash` to true + hash: u64, + + const This = @This(); + + pub fn HashMap(comptime V: type) type { + return std.ArrayHashMapUnmanaged( + StyleRuleKey(R), + V, + struct { + pub fn hash(_: @This(), key: This) u32 { + return @truncate(key.hash); + } + + pub fn eql(_: @This(), a: This, b: This, _: usize) bool { + return a.eql(&b); + } + }, + // TODO: make this true + false, + ); + } + + pub fn new(list: *const ArrayList(CssRule(R)), index: usize) This { + const rule = &list.items[index].style; + return This{ + .list = list, + .index = index, + .hash = rule.hashKey(), + }; + } + + pub fn eql(this: *const This, other: *const This) bool { + const rule = if (this.index < this.list.items.len and this.list.items[this.index] == .style) + &this.list.items[this.index].style + else + return false; + + const other_rule = if (other.index < other.list.items.len and other.list.items[other.index] == .style) + &other.list.items[other.index].style + else + return false; + + return rule.isDuplicate(other_rule); + } + }; +} + +fn mergeStyleRules( + comptime T: type, + sty: *style.StyleRule(T), + last_style_rule: *style.StyleRule(T), + context: *MinifyContext, +) bool { + // Merge declarations if the selectors are equivalent, and both are compatible with all targets. + if (sty.selectors.eql(&last_style_rule.selectors) and + sty.isCompatible(context.targets.*) and + last_style_rule.isCompatible(context.targets.*) and + sty.rules.v.items.len == 0 and + last_style_rule.rules.v.items.len == 0 and + (!context.css_modules or sty.loc.source_index == last_style_rule.loc.source_index)) + { + last_style_rule.declarations.declarations.appendSlice( + context.allocator, + sty.declarations.declarations.items, + ) catch bun.outOfMemory(); + sty.declarations.declarations.clearRetainingCapacity(); + + last_style_rule.declarations.important_declarations.appendSlice( + context.allocator, + sty.declarations.important_declarations.items, + ) catch bun.outOfMemory(); + sty.declarations.important_declarations.clearRetainingCapacity(); + + last_style_rule.declarations.minify( + context.handler, + context.important_handler, + &context.handler_context, + ); + return true; + } else if (sty.declarations.eql(&last_style_rule.declarations) and + sty.rules.v.items.len == 0 and + last_style_rule.rules.v.items.len == 0) + { + // If both selectors are potentially vendor prefixable, and they are + // equivalent minus prefixes, add the prefix to the last rule. + if (!sty.vendor_prefix.isEmpty() and + !last_style_rule.vendor_prefix.isEmpty() and + css.selector.isEquivalent(sty.selectors.v.slice(), last_style_rule.selectors.v.slice())) + { + // If the new rule is unprefixed, replace the prefixes of the last rule. + // Otherwise, add the new prefix. + if (sty.vendor_prefix.contains(css.VendorPrefix{ .none = true }) and context.targets.shouldCompileSelectors()) { + last_style_rule.vendor_prefix = sty.vendor_prefix; + } else { + last_style_rule.vendor_prefix.insert(sty.vendor_prefix); + } + return true; + } + + // Append the selectors to the last rule if the declarations are the same, and all selectors are compatible. + if (sty.isCompatible(context.targets.*) and last_style_rule.isCompatible(context.targets.*)) { + last_style_rule.selectors.v.appendSlice( + context.allocator, + sty.selectors.v.slice(), + ); + sty.selectors.v.clearRetainingCapacity(); + if (sty.vendor_prefix.contains(css.VendorPrefix{ .none = true }) and context.targets.shouldCompileSelectors()) { + last_style_rule.vendor_prefix = sty.vendor_prefix; + } else { + last_style_rule.vendor_prefix.insert(sty.vendor_prefix); + } + return true; + } + } + return false; +} diff --git a/src/css/rules/scope.zig b/src/css/rules/scope.zig new file mode 100644 index 0000000000..51436f416a --- /dev/null +++ b/src/css/rules/scope.zig @@ -0,0 +1,82 @@ +const std = @import("std"); +pub const css = @import("../css_parser.zig"); +const bun = @import("root").bun; +const Error = css.Error; +const ArrayList = std.ArrayListUnmanaged; +const MediaList = css.MediaList; +const CustomMedia = css.CustomMedia; +const Printer = css.Printer; +const Maybe = css.Maybe; +const PrinterError = css.PrinterError; +const PrintErr = css.PrintErr; +const Location = css.css_rules.Location; +const style = css.css_rules.style; +const CssRuleList = css.CssRuleList; + +/// A [@scope](https://drafts.csswg.org/css-cascade-6/#scope-atrule) rule. +/// +/// @scope () [to ()]? { +/// +/// } +pub fn ScopeRule(comptime R: type) type { + return struct { + /// A selector list used to identify the scoping root(s). + scope_start: ?css.selector.parser.SelectorList, + /// A selector list used to identify any scoping limits. + scope_end: ?css.selector.parser.SelectorList, + /// Nested rules within the `@scope` rule. + rules: css.CssRuleList(R), + /// The location of the rule in the source file. + loc: Location, + + const This = @This(); + + pub fn toCss(this: *const This, comptime W: type, dest: *Printer(W)) PrintErr!void { + // #[cfg(feature = "sourcemap")] + // dest.add_mapping(self.loc); + + try dest.writeStr("@scope"); + try dest.whitespace(); + if (this.scope_start) |*scope_start| { + try dest.writeChar('('); + // try scope_start.toCss(W, dest); + try css.selector.serialize.serializeSelectorList(scope_start.v.slice(), W, dest, dest.context(), false); + try dest.writeChar(')'); + try dest.whitespace(); + } + if (this.scope_end) |*scope_end| { + if (dest.minify) { + try dest.writeChar(' '); + } + try dest.writeStr("to ("); + // is treated as an ancestor of scope end. + // https://drafts.csswg.org/css-nesting/#nesting-at-scope + if (this.scope_start) |*scope_start| { + try dest.withContext(scope_start, scope_end, struct { + pub fn toCssFn(scope_end_: *const css.selector.parser.SelectorList, comptime WW: type, d: *Printer(WW)) PrintErr!void { + return css.selector.serialize.serializeSelectorList(scope_end_.v.slice(), WW, d, d.context(), false); + } + }.toCssFn); + } else { + return css.selector.serialize.serializeSelectorList(scope_end.v.slice(), W, dest, dest.context(), false); + } + try dest.writeChar(')'); + try dest.whitespace(); + } + try dest.writeChar('{'); + dest.indent(); + try dest.newline(); + // Nested style rules within @scope are implicitly relative to the + // so clear our style context while printing them to avoid replacing & ourselves. + // https://drafts.csswg.org/css-cascade-6/#scoped-rules + try dest.withClearedContext(&this.rules, CssRuleList(R).toCss); + dest.dedent(); + try dest.newline(); + try dest.writeChar('}'); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) This { + return css.implementDeepClone(@This(), this, allocator); + } + }; +} diff --git a/src/css/rules/starting_style.zig b/src/css/rules/starting_style.zig new file mode 100644 index 0000000000..f86a656931 --- /dev/null +++ b/src/css/rules/starting_style.zig @@ -0,0 +1,45 @@ +const std = @import("std"); +pub const css = @import("../css_parser.zig"); +const bun = @import("root").bun; +const Error = css.Error; +const ArrayList = std.ArrayListUnmanaged; +const MediaList = css.MediaList; +const CustomMedia = css.CustomMedia; +const Printer = css.Printer; +const Maybe = css.Maybe; +const PrinterError = css.PrinterError; +const PrintErr = css.PrintErr; +const Location = css.css_rules.Location; +const style = css.css_rules.style; +const CssRuleList = css.CssRuleList; + +/// A [@starting-style](https://drafts.csswg.org/css-transitions-2/#defining-before-change-style-the-starting-style-rule) rule. +pub fn StartingStyleRule(comptime R: type) type { + return struct { + /// Nested rules within the `@starting-style` rule. + rules: css.CssRuleList(R), + /// The location of the rule in the source file. + loc: Location, + + const This = @This(); + + pub fn toCss(this: *const This, comptime W: type, dest: *Printer(W)) PrintErr!void { + // #[cfg(feature = "sourcemap")] + // dest.add_mapping(self.loc); + + try dest.writeStr("@starting-style"); + try dest.whitespace(); + try dest.writeChar('{'); + dest.indent(); + try dest.newline(); + try this.rules.toCss(W, dest); + dest.dedent(); + try dest.newline(); + try dest.writeChar('}'); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) This { + return css.implementDeepClone(@This(), this, allocator); + } + }; +} diff --git a/src/css/rules/style.zig b/src/css/rules/style.zig new file mode 100644 index 0000000000..a972865541 --- /dev/null +++ b/src/css/rules/style.zig @@ -0,0 +1,260 @@ +const std = @import("std"); +pub const css = @import("../css_parser.zig"); +const bun = @import("root").bun; +const ArrayList = std.ArrayListUnmanaged; +const MediaList = css.MediaList; +const CustomMedia = css.CustomMedia; +const Printer = css.Printer; +const Maybe = css.Maybe; +const PrinterError = css.PrinterError; +const PrintErr = css.PrintErr; +const Dependency = css.Dependency; +const dependencies = css.dependencies; +const Url = css.css_values.url.Url; +const Size2D = css.css_values.size.Size2D; +const fontprops = css.css_properties.font; +const LayerName = css.css_rules.layer.LayerName; +const SupportsCondition = css.css_rules.supports.SupportsCondition; +const Location = css.css_rules.Location; + +pub fn StyleRule(comptime R: type) type { + return struct { + /// The selectors for the style rule. + selectors: css.selector.parser.SelectorList, + /// A vendor prefix override, used during selector printing. + vendor_prefix: css.VendorPrefix, + /// The declarations within the style rule. + declarations: css.DeclarationBlock, + /// Nested rules within the style rule. + rules: css.CssRuleList(R), + /// The location of the rule in the source file. + loc: css.Location, + + const This = @This(); + + /// Returns whether the rule is empty. + pub fn isEmpty(this: *const This) bool { + return this.selectors.v.isEmpty() or (this.declarations.isEmpty() and this.rules.v.items.len == 0); + } + + /// Returns a hash of this rule for use when deduplicating. + /// Includes the selectors and properties. + pub fn hashKey(this: *const This) u64 { + var hasher = std.hash.Wyhash.init(0); + this.selectors.hash(&hasher); + this.declarations.hashPropertyIds(&hasher); + return hasher.final(); + } + + pub fn deepClone(this: *const This, allocator: std.mem.Allocator) This { + return This{ + .selectors = this.selectors.deepClone(allocator), + .vendor_prefix = this.vendor_prefix, + .declarations = this.declarations.deepClone(allocator), + .rules = this.rules.deepClone(allocator), + .loc = this.loc, + }; + } + + pub fn updatePrefix(this: *This, context: *css.MinifyContext) void { + this.vendor_prefix = css.selector.getPrefix(&this.selectors); + if (this.vendor_prefix.contains(css.VendorPrefix{ .none = true }) and + context.targets.shouldCompileSelectors()) + { + this.vendor_prefix = css.selector.downlevelSelectors(context.allocator, this.selectors.v.slice_mut(), context.targets.*); + } + } + + pub fn isCompatible(this: *const This, targets: css.targets.Targets) bool { + return css.selector.isCompatible(this.selectors.v.slice(), targets); + } + + pub fn toCss(this: *const This, comptime W: type, dest: *Printer(W)) PrintErr!void { + if (this.vendor_prefix.isEmpty()) { + try this.toCssBase(W, dest); + } else { + var first_rule = true; + inline for (css.VendorPrefix.FIELDS) |field| { + if (@field(this.vendor_prefix, field)) { + if (first_rule) { + first_rule = false; + } else { + if (!dest.minify) { + try dest.writeChar('\n'); // no indent + } + try dest.newline(); + } + + const prefix = css.VendorPrefix.fromName(field); + dest.vendor_prefix = prefix; + try this.toCssBase(W, dest); + } + } + + dest.vendor_prefix = css.VendorPrefix.empty(); + } + } + + fn toCssBase(this: *const This, comptime W: type, dest: *Printer(W)) PrintErr!void { + // If supported, or there are no targets, preserve nesting. Otherwise, write nested rules after parent. + const supports_nesting = this.rules.v.items.len == 0 or + !css.Targets.shouldCompileSame( + &dest.targets, + .nesting, + ); + + const len = this.declarations.declarations.items.len + this.declarations.important_declarations.items.len; + const has_declarations = supports_nesting or len > 0 or this.rules.v.items.len == 0; + + if (has_declarations) { + // #[cfg(feature = "sourcemap")] + // dest.add_mapping(self.loc); + + try css.selector.serialize.serializeSelectorList(this.selectors.v.slice(), W, dest, dest.context(), false); + try dest.whitespace(); + try dest.writeChar('{'); + dest.indent(); + + var i: usize = 0; + const DECLS = .{ "declarations", "important_declarations" }; + inline for (DECLS) |decl_field_name| { + const important = comptime std.mem.eql(u8, decl_field_name, "important_declarations"); + const decls: *const ArrayList(css.Property) = &@field(this.declarations, decl_field_name); + + for (decls.items) |*decl| { + // The CSS modules `composes` property is handled specially, and omitted during printing. + // We need to add the classes it references to the list for the selectors in this rule. + if (decl.* == .composes) { + const composes = &decl.composes; + if (dest.isNested() and dest.css_module != null) { + return dest.newError(css.PrinterErrorKind.invalid_composes_nesting, composes.loc); + } + + if (dest.css_module) |*css_module| { + if (css_module.handleComposes( + dest.allocator, + &this.selectors, + composes, + this.loc.source_index, + ).asErr()) |error_kind| { + return dest.newError(error_kind, composes.loc); + } + continue; + } + } + + try dest.newline(); + try decl.toCss(W, dest, important); + if (i != len - 1 or !dest.minify or (supports_nesting and this.rules.v.items.len > 0)) { + try dest.writeChar(';'); + } + + i += 1; + } + } + } + + const Helpers = struct { + pub fn newline( + self: *const This, + comptime W2: type, + d: *Printer(W2), + supports_nesting2: bool, + len1: usize, + ) PrintErr!void { + if (!d.minify and (supports_nesting2 or len1 > 0) and self.rules.v.items.len > 0) { + if (len1 > 0) { + try d.writeChar('\n'); + } + try d.newline(); + } + } + + pub fn end(comptime W2: type, d: *Printer(W2), has_decls: bool) PrintErr!void { + if (has_decls) { + d.dedent(); + try d.newline(); + try d.writeChar('}'); + } + } + }; + + // Write nested rules after the parent. + if (supports_nesting) { + try Helpers.newline(this, W, dest, supports_nesting, len); + try this.rules.toCss(W, dest); + try Helpers.end(W, dest, has_declarations); + } else { + try Helpers.end(W, dest, has_declarations); + try Helpers.newline(this, W, dest, supports_nesting, len); + try dest.withContext(&this.selectors, this, struct { + pub fn toCss(self: *const This, WW: type, d: *Printer(WW)) PrintErr!void { + return self.rules.toCss(WW, d); + } + }.toCss); + } + } + + pub fn minify(this: *This, context: *css.MinifyContext, parent_is_unused: bool) css.MinifyErr!bool { + var unused = false; + if (context.unused_symbols.count() > 0) { + if (css.selector.isUnused(this.selectors.v.slice(), context.unused_symbols, parent_is_unused)) { + if (this.rules.v.items.len == 0) { + return true; + } + + this.declarations.declarations.clearRetainingCapacity(); + this.declarations.important_declarations.clearRetainingCapacity(); + unused = true; + } + } + + // TODO: this + // let pure_css_modules = context.pure_css_modules; + // if context.pure_css_modules { + // if !self.selectors.0.iter().all(is_pure_css_modules_selector) { + // return Err(MinifyError { + // kind: crate::error::MinifyErrorKind::ImpureCSSModuleSelector, + // loc: self.loc, + // }); + // } + + // // Parent rule contained id or class, so child rules don't need to. + // context.pure_css_modules = false; + // } + + context.handler_context.context = .style_rule; + this.declarations.minify(context.handler, context.important_handler, &context.handler_context); + context.handler_context.context = .none; + + if (this.rules.v.items.len > 0) { + var handler_context = context.handler_context.child(.style_rule); + std.mem.swap(css.PropertyHandlerContext, &context.handler_context, &handler_context); + try this.rules.minify(context, unused); + if (unused and this.rules.v.items.len == 0) { + return true; + } + } + + return false; + } + + /// Returns whether this rule is a duplicate of another rule. + /// This means it has the same selectors and properties. + pub inline fn isDuplicate(this: *const This, other: *const This) bool { + return this.declarations.len() == other.declarations.len() and + this.selectors.eql(&other.selectors) and + brk: { + var len = @min(this.declarations.declarations.items.len, other.declarations.declarations.items.len); + for (this.declarations.declarations.items[0..len], other.declarations.declarations.items[0..len]) |*a, *b| { + if (!a.propertyId().eql(&b.propertyId())) break :brk false; + } + len = @min(this.declarations.important_declarations.items.len, other.declarations.important_declarations.items.len); + for (this.declarations.important_declarations.items[0..len], other.declarations.important_declarations.items[0..len]) |*a, *b| { + if (!a.propertyId().eql(&b.propertyId())) break :brk false; + } + break :brk true; + }; + } + }; +} diff --git a/src/css/rules/supports.zig b/src/css/rules/supports.zig new file mode 100644 index 0000000000..a07a78494e --- /dev/null +++ b/src/css/rules/supports.zig @@ -0,0 +1,408 @@ +const std = @import("std"); +pub const css = @import("../css_parser.zig"); +const bun = @import("root").bun; +const Result = css.Result; +const ArrayList = std.ArrayListUnmanaged; +const MediaList = css.MediaList; +const CustomMedia = css.CustomMedia; +const Printer = css.Printer; +const Maybe = css.Maybe; +const PrinterError = css.PrinterError; +const PrintErr = css.PrintErr; +const Dependency = css.Dependency; +const dependencies = css.dependencies; +const Url = css.css_values.url.Url; +const Size2D = css.css_values.size.Size2D; +const fontprops = css.css_properties.font; +const LayerName = css.css_rules.layer.LayerName; +const Location = css.css_rules.Location; +const Angle = css.css_values.angle.Angle; +const FontStyleProperty = css.css_properties.font.FontStyle; +const FontFamily = css.css_properties.font.FontFamily; +const FontWeight = css.css_properties.font.FontWeight; +const FontStretch = css.css_properties.font.FontStretch; +const CustomProperty = css.css_properties.custom.CustomProperty; +const CustomPropertyName = css.css_properties.custom.CustomPropertyName; +const DashedIdent = css.css_values.ident.DashedIdent; + +/// A [``](https://drafts.csswg.org/css-conditional-3/#typedef-supports-condition), +/// as used in the `@supports` and `@import` rules. +pub const SupportsCondition = union(enum) { + /// A `not` expression. + not: *SupportsCondition, + + /// An `and` expression. + @"and": ArrayList(SupportsCondition), + + /// An `or` expression. + @"or": ArrayList(SupportsCondition), + + /// A declaration to evaluate. + declaration: struct { + /// The property id for the declaration. + property_id: css.PropertyId, + /// The raw value of the declaration. + value: []const u8, + + pub fn eql(this: *const @This(), other: *const @This()) bool { + return css.implementEql(@This(), this, other); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + }, + + /// A selector to evaluate. + selector: []const u8, + + /// An unknown condition. + unknown: []const u8, + + pub fn eql(this: *const SupportsCondition, other: *const SupportsCondition) bool { + return css.implementEql(SupportsCondition, this, other); + } + + pub fn deepClone(this: *const SupportsCondition, allocator: std.mem.Allocator) SupportsCondition { + return css.implementDeepClone(SupportsCondition, this, allocator); + } + + fn needsParens(this: *const SupportsCondition, parent: *const SupportsCondition) bool { + return switch (this.*) { + .not => true, + .@"and" => parent.* != .@"and", + .@"or" => parent.* != .@"or", + else => false, + }; + } + + const SeenDeclKey = struct { + css.PropertyId, + []const u8, + }; + + pub fn parse(input: *css.Parser) Result(SupportsCondition) { + if (input.tryParse(css.Parser.expectIdentMatching, .{"not"}).isOk()) { + const in_parens = switch (SupportsCondition.parseInParens(input)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + return .{ + .result = .{ + .not = bun.create( + input.allocator(), + SupportsCondition, + in_parens, + ), + }, + }; + } + + const in_parens: SupportsCondition = switch (SupportsCondition.parseInParens(input)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + var expected_type: ?i32 = null; + var conditions = ArrayList(SupportsCondition){}; + const mapalloc: std.mem.Allocator = input.allocator(); + var seen_declarations = std.ArrayHashMap( + SeenDeclKey, + usize, + struct { + pub fn hash(self: @This(), s: SeenDeclKey) u32 { + _ = self; // autofix + return std.array_hash_map.hashString(s[1]) +% @intFromEnum(s[0]); + } + pub fn eql(self: @This(), a: SeenDeclKey, b: SeenDeclKey, b_index: usize) bool { + _ = self; // autofix + _ = b_index; // autofix + return seenDeclKeyEql(a, b); + } + + pub inline fn seenDeclKeyEql(this: SeenDeclKey, that: SeenDeclKey) bool { + return @intFromEnum(this[0]) == @intFromEnum(that[0]) and bun.strings.eql(this[1], that[1]); + } + }, + false, + ).init(mapalloc); + defer seen_declarations.deinit(); + + while (true) { + const Closure = struct { + expected_type: *?i32, + pub fn tryParseFn(i: *css.Parser, this: *@This()) Result(SupportsCondition) { + const location = i.currentSourceLocation(); + const s = switch (i.expectIdent()) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + const found_type: i32 = found_type: { + // todo_stuff.match_ignore_ascii_case + if (bun.strings.eqlCaseInsensitiveASCIIICheckLength("and", s)) break :found_type 1; + if (bun.strings.eqlCaseInsensitiveASCIIICheckLength("or", s)) break :found_type 2; + return .{ .err = location.newUnexpectedTokenError(.{ .ident = s }) }; + }; + + if (this.expected_type.*) |expected| { + if (found_type != expected) { + return .{ .err = location.newUnexpectedTokenError(.{ .ident = s }) }; + } + } else { + this.expected_type.* = found_type; + } + + return SupportsCondition.parseInParens(i); + } + }; + var closure = Closure{ + .expected_type = &expected_type, + }; + const _condition = input.tryParse(Closure.tryParseFn, .{&closure}); + + switch (_condition) { + .result => |condition| { + if (conditions.items.len == 0) { + conditions.append(input.allocator(), in_parens.deepClone(input.allocator())) catch bun.outOfMemory(); + if (in_parens == .declaration) { + const property_id = in_parens.declaration.property_id; + const value = in_parens.declaration.value; + seen_declarations.put( + .{ property_id.withPrefix(css.VendorPrefix{ .none = true }), value }, + 0, + ) catch bun.outOfMemory(); + } + } + + if (condition == .declaration) { + // Merge multiple declarations with the same property id (minus prefix) and value together. + const property_id_ = condition.declaration.property_id; + const value = condition.declaration.value; + + const property_id = property_id_.withPrefix(css.VendorPrefix{ .none = true }); + const key = SeenDeclKey{ property_id, value }; + if (seen_declarations.get(key)) |index| { + const cond = &conditions.items[index]; + if (cond.* == .declaration) { + cond.declaration.property_id.addPrefix(property_id.prefix()); + } + } else { + seen_declarations.put(key, conditions.items.len) catch bun.outOfMemory(); + conditions.append(input.allocator(), SupportsCondition{ .declaration = .{ + .property_id = property_id, + .value = value, + } }) catch bun.outOfMemory(); + } + } else { + conditions.append( + input.allocator(), + condition, + ) catch bun.outOfMemory(); + } + }, + else => break, + } + } + + if (conditions.items.len == 1) { + const ret = conditions.pop(); + defer conditions.deinit(input.allocator()); + return .{ .result = ret }; + } + + if (expected_type == 1) return .{ .result = .{ .@"and" = conditions } }; + if (expected_type == 2) return .{ .result = .{ .@"or" = conditions } }; + return .{ .result = in_parens }; + } + + pub fn parseDeclaration(input: *css.Parser) Result(SupportsCondition) { + const property_id = switch (css.PropertyId.parse(input)) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + if (input.expectColon().asErr()) |e| return .{ .err = e }; + input.skipWhitespace(); + const pos = input.position(); + if (input.expectNoErrorToken().asErr()) |e| return .{ .err = e }; + return .{ .result = SupportsCondition{ + .declaration = .{ + .property_id = property_id, + .value = input.sliceFrom(pos), + }, + } }; + } + + fn parseInParens(input: *css.Parser) Result(SupportsCondition) { + input.skipWhitespace(); + const location = input.currentSourceLocation(); + const pos = input.position(); + const tok = switch (input.next()) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + switch (tok.*) { + .function => |f| { + if (bun.strings.eqlCaseInsensitiveASCIIICheckLength("selector", f)) { + const Fn = struct { + pub fn tryParseFn(i: *css.Parser) Result(SupportsCondition) { + return i.parseNestedBlock(SupportsCondition, {}, @This().parseNestedBlockFn); + } + pub fn parseNestedBlockFn(_: void, i: *css.Parser) Result(SupportsCondition) { + const p = i.position(); + if (i.expectNoErrorToken().asErr()) |e| return .{ .err = e }; + return .{ .result = SupportsCondition{ .selector = i.sliceFrom(p) } }; + } + }; + const res = input.tryParse(Fn.tryParseFn, .{}); + if (res.isOk()) return res; + } + }, + .open_paren => { + const res = input.tryParse(struct { + pub fn parseFn(i: *css.Parser) Result(SupportsCondition) { + return i.parseNestedBlock(SupportsCondition, {}, css.voidWrap(SupportsCondition, parse)); + } + }.parseFn, .{}); + if (res.isOk()) return res; + }, + else => return .{ .err = location.newUnexpectedTokenError(tok.*) }, + } + + if (input.parseNestedBlock(void, {}, struct { + pub fn parseFn(_: void, i: *css.Parser) Result(void) { + return i.expectNoErrorToken(); + } + }.parseFn).asErr()) |err| { + return .{ .err = err }; + } + + return .{ .result = SupportsCondition{ .unknown = input.sliceFrom(pos) } }; + } + + pub fn toCss(this: *const SupportsCondition, comptime W: type, dest: *css.Printer(W)) css.PrintErr!void { + switch (this.*) { + .not => |condition| { + try dest.writeStr(" not "); + try condition.toCssWithParensIfNeeded(W, dest, condition.needsParens(this)); + }, + .@"and" => |conditions| { + var first = true; + for (conditions.items) |*cond| { + if (first) { + first = false; + } else { + try dest.writeStr(" and "); + } + try cond.toCssWithParensIfNeeded(W, dest, cond.needsParens(this)); + } + }, + .@"or" => |conditions| { + var first = true; + for (conditions.items) |*cond| { + if (first) { + first = false; + } else { + try dest.writeStr(" or "); + } + try cond.toCssWithParensIfNeeded(W, dest, cond.needsParens(this)); + } + }, + .declaration => |decl| { + const property_id = decl.property_id; + const value = decl.value; + + try dest.writeChar('('); + + const prefix: css.VendorPrefix = property_id.prefix().orNone(); + if (!prefix.eq(css.VendorPrefix{ .none = true })) { + try dest.writeChar('('); + } + + const name = property_id.name(); + var first = true; + inline for (css.VendorPrefix.FIELDS) |field| { + if (@field(prefix, field)) { + if (first) { + first = false; + } else { + try dest.writeStr(") or ("); + } + + var p = css.VendorPrefix{}; + @field(p, field) = true; + css.serializer.serializeName(name, dest) catch return dest.addFmtError(); + try dest.delim(':', false); + try dest.writeStr(value); + } + } + + if (!prefix.eq(css.VendorPrefix{ .none = true })) { + try dest.writeChar(')'); + } + try dest.writeChar(')'); + }, + .selector => |sel| { + try dest.writeStr("selector("); + try dest.writeStr(sel); + try dest.writeChar(')'); + }, + .unknown => |unk| { + try dest.writeStr(unk); + }, + } + } + + pub fn toCssWithParensIfNeeded( + this: *const SupportsCondition, + comptime W: type, + dest: *css.Printer( + W, + ), + needs_parens: bool, + ) css.PrintErr!void { + if (needs_parens) try dest.writeStr("("); + try this.toCss(W, dest); + if (needs_parens) try dest.writeStr(")"); + } +}; + +/// A [@supports](https://drafts.csswg.org/css-conditional-3/#at-supports) rule. +pub fn SupportsRule(comptime R: type) type { + return struct { + /// The supports condition. + condition: SupportsCondition, + /// The rules within the `@supports` rule. + rules: css.CssRuleList(R), + /// The location of the rule in the source file. + loc: Location, + + const This = @This(); + + pub fn toCss(this: *const This, comptime W: type, dest: *Printer(W)) PrintErr!void { + // #[cfg(feature = "sourcemap")] + // dest.add_mapping(self.loc); + + try dest.writeStr("@supports "); + try this.condition.toCss(W, dest); + try dest.whitespace(); + try dest.writeChar('{'); + dest.indent(); + try dest.newline(); + try this.rules.toCss(W, dest); + dest.dedent(); + try dest.newline(); + try dest.writeChar('}'); + } + + pub fn minify(this: *This, context: *css.MinifyContext, parent_is_unused: bool) css.MinifyErr!void { + _ = this; // autofix + _ = context; // autofix + _ = parent_is_unused; // autofix + // TODO: Implement this + return; + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) This { + return css.implementDeepClone(@This(), this, allocator); + } + }; +} diff --git a/src/css/rules/tailwind.zig b/src/css/rules/tailwind.zig new file mode 100644 index 0000000000..b3e15e3e1b --- /dev/null +++ b/src/css/rules/tailwind.zig @@ -0,0 +1,60 @@ +const std = @import("std"); +const Allocator = std.mem.Allocator; +const bun = @import("root").bun; +const logger = bun.logger; +const Log = logger.Log; + +pub const css = @import("../css_parser.zig"); +pub const css_values = @import("../values/values.zig"); +pub const Error = css.Error; +const Printer = css.Printer; +const PrintErr = css.PrintErr; + +/// @tailwind +/// https://github.com/tailwindlabs/tailwindcss.com/blob/4d6ac11425d96bc963f936e0157df460a364c43b/src/pages/docs/functions-and-directives.mdx?plain=1#L13 +pub const TailwindAtRule = struct { + style_name: TailwindStyleName, + /// The location of the rule in the source file. + loc: css.Location, + + pub fn toCss(this: *const @This(), comptime W: type, dest: *Printer(W)) PrintErr!void { + try dest.writeStr("@tailwind"); + try dest.whitespace(); + try this.style_name.toCss(W, dest); + try dest.writeChar(';'); + } + + pub fn deepClone(this: *const @This(), _: std.mem.Allocator) @This() { + return this.*; + } +}; + +pub const TailwindStyleName = enum { + /// This injects Tailwind's base styles and any base styles registered by + /// plugins. + base, + /// This injects Tailwind's component classes and any component classes + /// registered by plugins. + components, + /// This injects Tailwind's utility classes and any utility classes registered + /// by plugins. + utilities, + /// Use this directive to control where Tailwind injects the hover, focus, + /// responsive, dark mode, and other variants of each class. + /// + /// If omitted, Tailwind will append these classes to the very end of + /// your stylesheet by default. + variants, + + pub fn asStr(this: *const @This()) []const u8 { + return css.enum_property_util.asStr(@This(), this); + } + + pub fn parse(input: *css.Parser) css.Result(@This()) { + return css.enum_property_util.parse(@This(), input); + } + + pub fn toCss(this: *const @This(), comptime W: type, dest: *Printer(W)) PrintErr!void { + return css.enum_property_util.toCss(@This(), this, W, dest); + } +}; diff --git a/src/css/rules/unknown.zig b/src/css/rules/unknown.zig new file mode 100644 index 0000000000..a1ab9408ff --- /dev/null +++ b/src/css/rules/unknown.zig @@ -0,0 +1,55 @@ +const std = @import("std"); +const Allocator = std.mem.Allocator; +const bun = @import("root").bun; +const logger = bun.logger; +const Log = logger.Log; + +pub const css = @import("../css_parser.zig"); +pub const css_values = @import("../values/values.zig"); +pub const Error = css.Error; +const Printer = css.Printer; +const PrintErr = css.PrintErr; + +/// An unknown at-rule, stored as raw tokens. +pub const UnknownAtRule = struct { + /// The name of the at-rule (without the @). + name: []const u8, + /// The prelude of the rule. + prelude: css.TokenList, + /// The contents of the block, if any. + block: ?css.TokenList, + /// The location of the rule in the source file. + loc: css.Location, + + const This = @This(); + + pub fn toCss(this: *const This, comptime W: type, dest: *Printer(W)) PrintErr!void { + // #[cfg(feature = "sourcemap")] + // dest.add_mapping(self.loc); + + try dest.writeChar('@'); + try dest.writeStr(this.name); + + if (this.prelude.v.items.len > 0) { + try dest.writeChar(' '); + try this.prelude.toCss(W, dest, false); + } + + if (this.block) |*block| { + try dest.whitespace(); + try dest.writeChar('{'); + dest.indent(); + try dest.newline(); + try block.toCss(W, dest, false); + dest.dedent(); + try dest.newline(); + try dest.writeChar('}'); + } else { + try dest.writeChar(';'); + } + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) This { + return css.implementDeepClone(@This(), this, allocator); + } +}; diff --git a/src/css/rules/viewport.zig b/src/css/rules/viewport.zig new file mode 100644 index 0000000000..03f88aa8c5 --- /dev/null +++ b/src/css/rules/viewport.zig @@ -0,0 +1,38 @@ +const std = @import("std"); +pub const css = @import("../css_parser.zig"); +const bun = @import("root").bun; +const Error = css.Error; +const ArrayList = std.ArrayListUnmanaged; +const MediaList = css.MediaList; +const CustomMedia = css.CustomMedia; +const Printer = css.Printer; +const Maybe = css.Maybe; +const PrinterError = css.PrinterError; +const PrintErr = css.PrintErr; +const Location = css.css_rules.Location; +const style = css.css_rules.style; + +/// A [@viewport](https://drafts.csswg.org/css-device-adapt/#atviewport-rule) rule. +pub const ViewportRule = struct { + /// The vendor prefix for this rule, e.g. `@-ms-viewport`. + vendor_prefix: css.VendorPrefix, + /// The declarations within the `@viewport` rule. + declarations: css.DeclarationBlock, + /// The location of the rule in the source file. + loc: Location, + + const This = @This(); + + pub fn toCss(this: *const This, comptime W: type, dest: *Printer(W)) PrintErr!void { + // #[cfg(feature = "sourcemap")] + // dest.add_mapping(self.loc); + try dest.writeChar('@'); + try this.vendor_prefix.toCss(W, dest); + try dest.writeStr("viewport"); + try this.declarations.toCssBlock(W, dest); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) This { + return css.implementDeepClone(@This(), this, allocator); + } +}; diff --git a/src/css/selectors/builder.zig b/src/css/selectors/builder.zig new file mode 100644 index 0000000000..e07aef3eb7 --- /dev/null +++ b/src/css/selectors/builder.zig @@ -0,0 +1,215 @@ +//! This is the selector builder module ported from the copypasted implementation from +//! servo in lightningcss. +//! +//! -- original comment from servo -- +//! Helper module to build up a selector safely and efficiently. +//! +//! Our selector representation is designed to optimize matching, and has +//! several requirements: +//! * All simple selectors and combinators are stored inline in the same buffer +//! as Component instances. +//! * We store the top-level compound selectors from right to left, i.e. in +//! matching order. +//! * We store the simple selectors for each combinator from left to right, so +//! that we match the cheaper simple selectors first. +//! +//! Meeting all these constraints without extra memmove traffic during parsing +//! is non-trivial. This module encapsulates those details and presents an +//! easy-to-use API for the parser. +const std = @import("std"); +const Allocator = std.mem.Allocator; +const bun = @import("root").bun; +const logger = bun.logger; +const Log = logger.Log; + +pub const css = @import("../css_parser.zig"); +const CSSString = css.CSSString; +const CSSStringFns = css.CSSStringFns; + +pub const Printer = css.Printer; +pub const PrintErr = css.PrintErr; + +const Result = css.Result; +const PrintResult = css.PrintResult; + +const ArrayList = std.ArrayListUnmanaged; + +const parser = css.selector.parser; + +const ValidSelectorImpl = parser.ValidSelectorImpl; +const GenericComponent = parser.GenericComponent; +const Combinator = parser.Combinator; +const SpecifityAndFlags = parser.SpecifityAndFlags; +const compute_specifity = parser.compute_specifity; +const SelectorFlags = parser.SelectorFlags; + +/// Top-level SelectorBuilder struct. This should be stack-allocated by the +/// consumer and never moved (because it contains a lot of inline data that +/// would be slow to memmov). +/// +/// After instantiation, callers may call the push_simple_selector() and +/// push_combinator() methods to append selector data as it is encountered +/// (from left to right). Once the process is complete, callers should invoke +/// build(), which transforms the contents of the SelectorBuilder into a heap- +/// allocated Selector and leaves the builder in a drained state. +pub fn SelectorBuilder(comptime Impl: type) type { + ValidSelectorImpl(Impl); + + return struct { + /// The entire sequence of simple selectors, from left to right, without combinators. + /// + /// We make this large because the result of parsing a selector is fed into a new + /// Arc-ed allocation, so any spilled vec would be a wasted allocation. Also, + /// Components are large enough that we don't have much cache locality benefit + /// from reserving stack space for fewer of them. + /// + simple_selectors: css.SmallList(GenericComponent(Impl), 32) = .{}, + + /// The combinators, and the length of the compound selector to their left. + /// + combinators: css.SmallList(struct { Combinator, usize }, 32) = .{}, + + /// The length of the current compound selector. + current_len: usize = 0, + + allocator: Allocator, + + const This = @This(); + + const BuildResult = struct { + specifity_and_flags: SpecifityAndFlags, + components: ArrayList(GenericComponent(Impl)), + }; + + pub inline fn init(allocator: Allocator) This { + return This{ + .allocator = allocator, + }; + } + + /// Returns true if combinators have ever been pushed to this builder. + pub inline fn hasCombinators(this: *This) bool { + return this.combinators.len() > 0; + } + + /// Completes the current compound selector and starts a new one, delimited + /// by the given combinator. + pub inline fn pushCombinator(this: *This, combinator: Combinator) void { + this.combinators.append(this.allocator, .{ combinator, this.current_len }); + this.current_len = 0; + } + + /// Pushes a simple selector onto the current compound selector. + pub fn pushSimpleSelector(this: *This, ss: GenericComponent(Impl)) void { + bun.assert(!ss.isCombinator()); + this.simple_selectors.append(this.allocator, ss); + this.current_len += 1; + } + + pub fn addNestingPrefix(this: *This) void { + this.combinators.insert(this.allocator, 0, .{ Combinator.descendant, 1 }); + this.simple_selectors.insert(this.allocator, 0, .nesting); + } + + pub fn deinit(this: *This) void { + this.simple_selectors.deinit(this.allocator); + this.combinators.deinit(this.allocator); + } + + /// Consumes the builder, producing a Selector. + /// + /// *NOTE*: This will free all allocated memory in the builder + pub fn build( + this: *This, + parsed_pseudo: bool, + parsed_slotted: bool, + parsed_part: bool, + ) BuildResult { + const specifity = compute_specifity(Impl, this.simple_selectors.slice()); + var flags = SelectorFlags.empty(); + // PERF: is it faster to do these ORs all at once + if (parsed_pseudo) { + flags.has_pseudo = true; + } + if (parsed_slotted) { + flags.has_slotted = true; + } + if (parsed_part) { + flags.has_part = true; + } + // `buildWithSpecificityAndFlags()` will + defer this.deinit(); + return this.buildWithSpecificityAndFlags(SpecifityAndFlags{ .specificity = specifity, .flags = flags }); + } + + /// Builds a selector with the given specifity and flags. + /// + /// PERF: + /// Recall that this code is ported from servo, which optimizes for matching speed, so + /// the final AST has the components of the selector stored in reverse order, which is + /// optimized for matching. + /// + /// We don't really care about matching selectors, and storing the components in reverse + /// order requires additional allocations, and undoing the reversal when serializing the + /// selector. So we could just change this code to store the components in the same order + /// as the source. + pub fn buildWithSpecificityAndFlags(this: *This, spec: SpecifityAndFlags) BuildResult { + const T = GenericComponent(Impl); + const rest: []const T, const current: []const T = splitFromEnd(T, this.simple_selectors.slice(), this.current_len); + const combinators = this.combinators.slice(); + defer { + // This function should take every component from `this.simple_selectors` + // and place it into `components` and return it. + // + // This means that we shouldn't leak any `GenericComponent(Impl)`, so + // it is safe to just set the length to 0. + // + // Combinators don't need to be deinitialized because they are simple enums. + this.simple_selectors.setLen(0); + this.combinators.setLen(0); + } + + var components = ArrayList(T){}; + + var current_simple_selectors_i: usize = 0; + var combinator_i: i64 = @as(i64, @intCast(this.combinators.len())) - 1; + var rest_of_simple_selectors = rest; + var current_simple_selectors = current; + + while (true) { + if (current_simple_selectors_i < current_simple_selectors.len) { + components.append( + this.allocator, + current_simple_selectors[current_simple_selectors_i], + ) catch unreachable; + current_simple_selectors_i += 1; + } else { + if (combinator_i >= 0) { + const combo: Combinator, const len: usize = combinators[@intCast(combinator_i)]; + const rest2, const current2 = splitFromEnd(GenericComponent(Impl), rest_of_simple_selectors, len); + rest_of_simple_selectors = rest2; + current_simple_selectors_i = 0; + current_simple_selectors = current2; + combinator_i -= 1; + components.append( + this.allocator, + .{ .combinator = combo }, + ) catch unreachable; + continue; + } + break; + } + } + + return .{ .specifity_and_flags = spec, .components = components }; + } + + pub fn splitFromEnd(comptime T: type, s: []const T, at: usize) struct { []const T, []const T } { + const midpoint = s.len - at; + return .{ + s[0..midpoint], + s[midpoint..], + }; + } + }; +} diff --git a/src/css/selectors/parser.zig b/src/css/selectors/parser.zig new file mode 100644 index 0000000000..a89dd6345a --- /dev/null +++ b/src/css/selectors/parser.zig @@ -0,0 +1,3664 @@ +const std = @import("std"); +const Allocator = std.mem.Allocator; +const bun = @import("root").bun; +const logger = bun.logger; +const Log = logger.Log; + +pub const css = @import("../css_parser.zig"); +const CSSString = css.CSSString; +const CSSStringFns = css.CSSStringFns; + +pub const Printer = css.Printer; +pub const PrintErr = css.PrintErr; + +const Result = css.Result; +const PrintResult = css.PrintResult; +const SmallList = css.SmallList; +const ArrayList = std.ArrayListUnmanaged; + +const impl = css.selector.impl; +const serialize = css.selector.serialize; + +/// Instantiation of generic selector structs using our implementation of the `SelectorImpl` trait. +pub const Component = GenericComponent(impl.Selectors); +pub const Selector = GenericSelector(impl.Selectors); +pub const SelectorList = GenericSelectorList(impl.Selectors); + +pub const ToCssCtx = enum { + lightning, + servo, +}; + +/// The definition of whitespace per CSS Selectors Level 3 § 4. +pub const SELECTOR_WHITESPACE: []const u8 = &[_]u8{ ' ', '\t', '\n', '\r', 0x0C }; + +pub fn ValidSelectorImpl(comptime T: type) void { + _ = T.SelectorImpl.ExtraMatchingData; + _ = T.SelectorImpl.AttrValue; + _ = T.SelectorImpl.Identifier; + _ = T.SelectorImpl.LocalName; + _ = T.SelectorImpl.NamespaceUrl; + _ = T.SelectorImpl.NamespacePrefix; + _ = T.SelectorImpl.BorrowedNamespaceUrl; + _ = T.SelectorImpl.BorrowedLocalName; + + _ = T.SelectorImpl.NonTSPseudoClass; + _ = T.SelectorImpl.VendorPrefix; + _ = T.SelectorImpl.PseudoElement; +} + +const selector_builder = @import("./builder.zig"); + +pub const attrs = struct { + pub fn NamespaceUrl(comptime Impl: type) type { + return struct { + prefix: Impl.SelectorImpl.NamespacePrefix, + url: Impl.SelectorImpl.NamespaceUrl, + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub fn deepClone(this: *const @This(), allocator: Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn hash(this: *const @This(), hasher: *std.hash.Wyhash) void { + return css.implementHash(@This(), this, hasher); + } + }; + } + + pub fn AttrSelectorWithOptionalNamespace(comptime Impl: type) type { + return struct { + namespace: ?NamespaceConstraint(NamespaceUrl(Impl)), + local_name: Impl.SelectorImpl.LocalName, + local_name_lower: Impl.SelectorImpl.LocalName, + operation: ParsedAttrSelectorOperation(Impl.SelectorImpl.AttrValue), + never_matches: bool, + + pub fn toCss(this: *const @This(), comptime W: type, dest: *Printer(W)) PrintErr!void { + try dest.writeChar('['); + if (this.namespace) |nsp| switch (nsp) { + .specific => |v| { + try css.IdentFns.toCss(&v.prefix, W, dest); + try dest.writeChar('|'); + }, + .any => { + try dest.writeStr("*|"); + }, + }; + try css.IdentFns.toCss(&this.local_name, W, dest); + switch (this.operation) { + .exists => {}, + .with_value => |v| { + try v.operator.toCss(W, dest); + // try v.expected_value.toCss(dest); + try CSSStringFns.toCss(&v.expected_value, W, dest); + switch (v.case_sensitivity) { + .case_sensitive, .ascii_case_insensitive_if_in_html_element_in_html_document => {}, + .ascii_case_insensitive => { + try dest.writeStr(" i"); + }, + .explicit_case_sensitive => { + try dest.writeStr(" s"); + }, + } + }, + } + return dest.writeChar(']'); + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub fn deepClone(this: *const @This(), allocator: Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn hash(this: *const @This(), hasher: *std.hash.Wyhash) void { + return css.implementHash(@This(), this, hasher); + } + }; + } + + pub fn NamespaceConstraint(comptime NamespaceUrl_: type) type { + return union(enum) { + any, + /// Empty string for no namespace + specific: NamespaceUrl_, + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub fn hash(this: *const @This(), hasher: *std.hash.Wyhash) void { + return css.implementHash(@This(), this, hasher); + } + + pub fn deepClone(this: *const @This(), allocator: Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + }; + } + + pub fn ParsedAttrSelectorOperation(comptime AttrValue: type) type { + return union(enum) { + exists, + with_value: struct { + operator: AttrSelectorOperator, + case_sensitivity: ParsedCaseSensitivity, + expected_value: AttrValue, + + pub fn __generateEql() void {} + pub fn __generateDeepClone() void {} + pub fn __generateHash() void {} + }, + + pub fn deepClone(this: *const @This(), allocator: Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub fn hash(this: *const @This(), hasher: *std.hash.Wyhash) void { + return css.implementHash(@This(), this, hasher); + } + }; + } + + pub const AttrSelectorOperator = enum { + equal, + includes, + dash_match, + prefix, + substring, + suffix, + + const This = @This(); + pub fn toCss(this: *const This, comptime W: type, dest: *Printer(W)) PrintErr!void { + // https://drafts.csswg.org/cssom/#serializing-selectors + // See "attribute selector". + return dest.writeStr(switch (this.*) { + .equal => "=", + .includes => "~=", + .dash_match => "|=", + .prefix => "^=", + .substring => "*=", + .suffix => "$=", + }); + } + + pub fn hash(this: *const @This(), hasher: *std.hash.Wyhash) void { + return css.implementHash(@This(), this, hasher); + } + }; + + pub const AttrSelectorOperation = enum { + equal, + includes, + dash_match, + prefix, + substring, + suffix, + }; + + pub const ParsedCaseSensitivity = enum { + // 's' was specified. + explicit_case_sensitive, + // 'i' was specified. + ascii_case_insensitive, + // No flags were specified and HTML says this is a case-sensitive attribute. + case_sensitive, + // No flags were specified and HTML says this is a case-insensitive attribute. + ascii_case_insensitive_if_in_html_element_in_html_document, + }; +}; + +pub const Specifity = struct { + id_selectors: u32 = 0, + class_like_selectors: u32 = 0, + element_selectors: u32 = 0, + + const MAX_10BIT: u32 = (1 << 10) - 1; + + pub fn toU32(this: Specifity) u32 { + return @as(u32, @as(u32, @min(this.id_selectors, MAX_10BIT)) << @as(u32, 20)) | + @as(u32, @as(u32, @min(this.class_like_selectors, MAX_10BIT)) << @as(u32, 10)) | + @min(this.element_selectors, MAX_10BIT); + } + + pub fn fromU32(value: u32) Specifity { + bun.assert(value <= MAX_10BIT << 20 | MAX_10BIT << 10 | MAX_10BIT); + return Specifity{ + .id_selectors = value >> 20, + .class_like_selectors = (value >> 10) & MAX_10BIT, + .element_selectors = value & MAX_10BIT, + }; + } + + pub fn add(lhs: *Specifity, rhs: Specifity) void { + lhs.id_selectors += rhs.id_selectors; + lhs.element_selectors += rhs.element_selectors; + lhs.class_like_selectors += rhs.class_like_selectors; + } +}; + +pub fn compute_specifity(comptime Impl: type, iter: []const GenericComponent(Impl)) u32 { + const spec = compute_complex_selector_specifity(Impl, iter); + return spec.toU32(); +} + +fn compute_complex_selector_specifity(comptime Impl: type, iter: []const GenericComponent(Impl)) Specifity { + var specifity: Specifity = .{}; + + for (iter) |*simple_selector| { + compute_simple_selector_specifity(Impl, simple_selector, &specifity); + } + + return specifity; +} + +fn compute_simple_selector_specifity( + comptime Impl: type, + simple_selector: *const GenericComponent(Impl), + specifity: *Specifity, +) void { + switch (simple_selector.*) { + .combinator => { + bun.unreachablePanic("Found combinator in simple selectors vector?", .{}); + }, + .part, .pseudo_element, .local_name => { + specifity.element_selectors += 1; + }, + .slotted => |selector| { + specifity.element_selectors += 1; + // Note that due to the way ::slotted works we only compete with + // other ::slotted rules, so the above rule doesn't really + // matter, but we do it still for consistency with other + // pseudo-elements. + // + // See: https://github.com/w3c/csswg-drafts/issues/1915 + specifity.add(Specifity.fromU32(selector.specifity())); + }, + .host => |maybe_selector| { + specifity.class_like_selectors += 1; + if (maybe_selector) |*selector| { + // See: https://github.com/w3c/csswg-drafts/issues/1915 + specifity.add(Specifity.fromU32(selector.specifity())); + } + }, + .id => { + specifity.id_selectors += 1; + }, + .class, + .attribute_in_no_namespace, + .attribute_in_no_namespace_exists, + .attribute_other, + .root, + .empty, + .scope, + .nth, + .non_ts_pseudo_class, + => { + specifity.class_like_selectors += 1; + }, + .nth_of => |nth_of_data| { + // https://drafts.csswg.org/selectors/#specificity-rules: + // + // The specificity of the :nth-last-child() pseudo-class, + // like the :nth-child() pseudo-class, combines the + // specificity of a regular pseudo-class with that of its + // selector argument S. + specifity.class_like_selectors += 1; + var max: u32 = 0; + for (nth_of_data.selectors) |*selector| { + max = @max(selector.specifity(), max); + } + specifity.add(Specifity.fromU32(max)); + }, + .negation, .is, .any => { + // https://drafts.csswg.org/selectors/#specificity-rules: + // + // The specificity of an :is() pseudo-class is replaced by the + // specificity of the most specific complex selector in its + // selector list argument. + const list: []GenericSelector(Impl) = switch (simple_selector.*) { + .negation => |list| list, + .is => |list| list, + .any => |a| a.selectors, + else => unreachable, + }; + var max: u32 = 0; + for (list) |*selector| { + max = @max(selector.specifity(), max); + } + specifity.add(Specifity.fromU32(max)); + }, + .where, + .has, + .explicit_universal_type, + .explicit_any_namespace, + .explicit_no_namespace, + .default_namespace, + .namespace, + => { + // Does not affect specifity + }, + .nesting => { + // TODO + }, + } +} + +const SelectorBuilder = selector_builder.SelectorBuilder; + +/// Build up a Selector. +/// selector : simple_selector_sequence [ combinator simple_selector_sequence ]* ; +/// +/// `Err` means invalid selector. +fn parse_selector( + comptime Impl: type, + parser: *SelectorParser, + input: *css.Parser, + state: *SelectorParsingState, + nesting_requirement: NestingRequirement, +) Result(GenericSelector(Impl)) { + if (nesting_requirement == .prefixed) { + const parser_state = input.state(); + if (!input.expectDelim('&').isOk()) { + return .{ .err = input.newCustomError(SelectorParseErrorKind.intoDefaultParserError(.missing_nesting_prefix)) }; + } + input.reset(&parser_state); + } + + // PERF: allocations here + var builder = selector_builder.SelectorBuilder(Impl){ + .allocator = input.allocator(), + }; + + outer_loop: while (true) { + // Parse a sequence of simple selectors. + const empty = switch (parse_compound_selector(Impl, parser, state, input, &builder)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + if (empty) { + const kind: SelectorParseErrorKind = if (builder.hasCombinators()) + .dangling_combinator + else + .empty_selector; + + return .{ .err = input.newCustomError(kind.intoDefaultParserError()) }; + } + + if (state.intersects(SelectorParsingState.AFTER_PSEUDO)) { + const source_location = input.currentSourceLocation(); + if (input.next().asValue()) |next| { + return .{ .err = source_location.newCustomError(SelectorParseErrorKind.intoDefaultParserError(.{ .unexpected_selector_after_pseudo_element = next.* })) }; + } + break; + } + + // Parse a combinator + var combinator: Combinator = undefined; + var any_whitespace = false; + while (true) { + const before_this_token = input.state(); + const tok: *css.Token = switch (input.nextIncludingWhitespace()) { + .result => |vv| vv, + .err => break :outer_loop, + }; + switch (tok.*) { + .whitespace => { + any_whitespace = true; + continue; + }, + .delim => |d| { + switch (d) { + '>' => { + if (parser.deepCombinatorEnabled() and input.tryParse(struct { + pub fn parseFn(i: *css.Parser) Result(void) { + if (i.expectDelim('>').asErr()) |e| return .{ .err = e }; + return i.expectDelim('>'); + } + }.parseFn, .{}).isOk()) { + combinator = Combinator.deep_descendant; + } else { + combinator = Combinator.child; + } + break; + }, + '+' => { + combinator = .next_sibling; + break; + }, + '~' => { + combinator = .later_sibling; + break; + }, + '/' => { + if (parser.deepCombinatorEnabled()) { + if (input.tryParse(struct { + pub fn parseFn(i: *css.Parser) Result(void) { + if (i.expectIdentMatching("deep").asErr()) |e| return .{ .err = e }; + return i.expectDelim('/'); + } + }.parseFn, .{}).isOk()) { + combinator = .deep; + break; + } else { + break :outer_loop; + } + } + }, + else => {}, + } + }, + else => {}, + } + + input.reset(&before_this_token); + + if (any_whitespace) { + combinator = .descendant; + break; + } else { + break :outer_loop; + } + } + + if (!state.allowsCombinators()) { + return .{ .err = input.newCustomError(SelectorParseErrorKind.intoDefaultParserError(.invalid_state)) }; + } + + builder.pushCombinator(combinator); + } + + if (!state.contains(SelectorParsingState{ .after_nesting = true })) { + switch (nesting_requirement) { + .implicit => { + builder.addNestingPrefix(); + }, + .contained, .prefixed => { + return .{ .err = input.newCustomError(SelectorParseErrorKind.intoDefaultParserError(.missing_nesting_selector)) }; + }, + else => {}, + } + } + + const has_pseudo_element = state.intersects(SelectorParsingState{ + .after_pseudo_element = true, + .after_unknown_pseudo_element = true, + }); + const slotted = state.intersects(SelectorParsingState{ .after_slotted = true }); + const part = state.intersects(SelectorParsingState{ .after_part = true }); + const result = builder.build(has_pseudo_element, slotted, part); + return .{ .result = Selector{ + .specifity_and_flags = result.specifity_and_flags, + .components = result.components, + } }; +} + +/// simple_selector_sequence +/// : [ type_selector | universal ] [ HASH | class | attrib | pseudo | negation ]* +/// | [ HASH | class | attrib | pseudo | negation ]+ +/// +/// `Err(())` means invalid selector. +/// `Ok(true)` is an empty selector +fn parse_compound_selector( + comptime Impl: type, + parser: *SelectorParser, + state: *SelectorParsingState, + input: *css.Parser, + builder: *SelectorBuilder(Impl), +) Result(bool) { + input.skipWhitespace(); + + var empty: bool = true; + if (parser.isNestingAllowed() and if (input.tryParse(css.Parser.expectDelim, .{'&'}).isOk()) true else false) { + state.insert(SelectorParsingState{ .after_nesting = true }); + builder.pushSimpleSelector(.nesting); + empty = false; + } + + if (parse_type_selector(Impl, parser, input, state.*, builder).asValue()) |_| { + empty = false; + } + + while (true) { + const result: SimpleSelectorParseResult(Impl) = result: { + const ret = switch (parse_one_simple_selector(Impl, parser, input, state)) { + .result => |r| r, + .err => |e| return .{ .err = e }, + }; + if (ret) |result| { + break :result result; + } + break; + }; + + if (empty) { + if (parser.defaultNamespace()) |url| { + // If there was no explicit type selector, but there is a + // default namespace, there is an implicit "|*" type + // selector. Except for :host() or :not() / :is() / :where(), + // where we ignore it. + // + // https://drafts.csswg.org/css-scoping/#host-element-in-tree: + // + // When considered within its own shadow trees, the shadow + // host is featureless. Only the :host, :host(), and + // :host-context() pseudo-classes are allowed to match it. + // + // https://drafts.csswg.org/selectors-4/#featureless: + // + // A featureless element does not match any selector at all, + // except those it is explicitly defined to match. If a + // given selector is allowed to match a featureless element, + // it must do so while ignoring the default namespace. + // + // https://drafts.csswg.org/selectors-4/#matches + // + // Default namespace declarations do not affect the compound + // selector representing the subject of any selector within + // a :is() pseudo-class, unless that compound selector + // contains an explicit universal selector or type selector. + // + // (Similar quotes for :where() / :not()) + // + const ignore_default_ns = state.intersects(SelectorParsingState{ .skip_default_namespace = true }) or + (result == .simple_selector and result.simple_selector == .host); + if (!ignore_default_ns) { + builder.pushSimpleSelector(.{ .default_namespace = url }); + } + } + } + + empty = false; + + switch (result) { + .simple_selector => { + builder.pushSimpleSelector(result.simple_selector); + }, + .part_pseudo => { + const selector = result.part_pseudo; + state.insert(SelectorParsingState{ .after_part = true }); + builder.pushCombinator(.part); + builder.pushSimpleSelector(.{ .part = selector }); + }, + .slotted_pseudo => |selector| { + state.insert(.{ .after_slotted = true }); + builder.pushCombinator(.slot_assignment); + builder.pushSimpleSelector(.{ .slotted = selector }); + }, + .pseudo_element => |p| { + if (!p.isUnknown()) { + state.insert(SelectorParsingState{ .after_pseudo_element = true }); + builder.pushCombinator(.pseudo_element); + } else { + state.insert(.{ .after_unknown_pseudo_element = true }); + } + + if (!p.acceptsStatePseudoClasses()) { + state.insert(.{ .after_non_stateful_pseudo_element = true }); + } + + if (p.isWebkitScrollbar()) { + state.insert(.{ .after_webkit_scrollbar = true }); + } + + if (p.isViewTransition()) { + state.insert(.{ .after_view_transition = true }); + } + + builder.pushSimpleSelector(.{ .pseudo_element = p }); + }, + } + } + + return .{ .result = empty }; +} + +fn parse_relative_selector( + comptime Impl: type, + parser: *SelectorParser, + input: *css.Parser, + state: *SelectorParsingState, + nesting_requirement_: NestingRequirement, +) Result(GenericSelector(Impl)) { + // https://www.w3.org/TR/selectors-4/#parse-relative-selector + var nesting_requirement = nesting_requirement_; + const s = input.state(); + + const combinator: ?Combinator = combinator: { + const tok = switch (input.next()) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + switch (tok.*) { + .delim => |c| { + switch (c) { + '>' => break :combinator Combinator.child, + '+' => break :combinator Combinator.next_sibling, + '~' => break :combinator Combinator.later_sibling, + else => {}, + } + }, + else => {}, + } + input.reset(&s); + break :combinator null; + }; + + const scope: GenericComponent(Impl) = if (nesting_requirement == .implicit) .nesting else .scope; + + if (combinator != null) { + nesting_requirement = .none; + } + + var selector = switch (parse_selector(Impl, parser, input, state, nesting_requirement)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + if (combinator) |wombo_combo| { + // https://www.w3.org/TR/selectors/#absolutizing + selector.components.append( + parser.allocator, + .{ .combinator = wombo_combo }, + ) catch unreachable; + selector.components.append( + parser.allocator, + scope, + ) catch unreachable; + } + + return .{ .result = selector }; +} + +pub fn ValidSelectorParser(comptime T: type) type { + ValidSelectorImpl(T.SelectorParser.Impl); + + // Whether to parse the `::slotted()` pseudo-element. + _ = T.SelectorParser.parseSlotted; + + _ = T.SelectorParser.parsePart; + + _ = T.SelectorParser.parseIsAndWhere; + + _ = T.SelectorParser.isAndWhereErrorRecovery; + + _ = T.SelectorParser.parseAnyPrefix; + + _ = T.SelectorParser.parseHost; + + _ = T.SelectorParser.parseNonTsPseudoClass; + + _ = T.SelectorParser.parseNonTsFunctionalPseudoClass; + + _ = T.SelectorParser.parsePseudoElement; + + _ = T.SelectorParser.parseFunctionalPseudoElement; + + _ = T.SelectorParser.defaultNamespace; + + _ = T.SelectorParser.namespaceForPrefix; + + _ = T.SelectorParser.isNestingAllowed; + + _ = T.SelectorParser.deepCombinatorEnabled; +} + +/// The [:dir()](https://drafts.csswg.org/selectors-4/#the-dir-pseudo) pseudo class. +pub const Direction = enum { + /// Left to right + ltr, + /// Right to left + rtl, + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub fn asStr(this: *const @This()) []const u8 { + return css.enum_property_util.asStr(@This(), this); + } + + pub fn parse(input: *css.Parser) Result(@This()) { + return css.enum_property_util.parse(@This(), input); + } + + pub fn toCss(this: *const @This(), comptime W: type, dest: *Printer(W)) PrintErr!void { + return css.enum_property_util.toCss(@This(), this, W, dest); + } +}; + +/// A pseudo class. +pub const PseudoClass = union(enum) { + /// https://drafts.csswg.org/selectors-4/#linguistic-pseudos + /// The [:lang()](https://drafts.csswg.org/selectors-4/#the-lang-pseudo) pseudo class. + lang: struct { + /// A list of language codes. + languages: ArrayList([]const u8), + + pub fn __generateEql() void {} + pub fn __generateDeepClone() void {} + pub fn __generateHash() void {} + }, + /// The [:dir()](https://drafts.csswg.org/selectors-4/#the-dir-pseudo) pseudo class. + dir: struct { + /// A direction. + direction: Direction, + + pub fn __generateEql() void {} + pub fn __generateDeepClone() void {} + pub fn __generateHash() void {} + }, + + // https://drafts.csswg.org/selectors-4/#useraction-pseudos + /// The [:hover](https://drafts.csswg.org/selectors-4/#the-hover-pseudo) pseudo class. + hover, + /// The [:active](https://drafts.csswg.org/selectors-4/#the-active-pseudo) pseudo class. + active, + /// The [:focus](https://drafts.csswg.org/selectors-4/#the-focus-pseudo) pseudo class. + focus, + /// The [:focus-visible](https://drafts.csswg.org/selectors-4/#the-focus-visible-pseudo) pseudo class. + focus_visible, + /// The [:focus-within](https://drafts.csswg.org/selectors-4/#the-focus-within-pseudo) pseudo class. + focus_within, + + /// https://drafts.csswg.org/selectors-4/#time-pseudos + /// The [:current](https://drafts.csswg.org/selectors-4/#the-current-pseudo) pseudo class. + current, + /// The [:past](https://drafts.csswg.org/selectors-4/#the-past-pseudo) pseudo class. + past, + /// The [:future](https://drafts.csswg.org/selectors-4/#the-future-pseudo) pseudo class. + future, + + /// https://drafts.csswg.org/selectors-4/#resource-pseudos + /// The [:playing](https://drafts.csswg.org/selectors-4/#selectordef-playing) pseudo class. + playing, + /// The [:paused](https://drafts.csswg.org/selectors-4/#selectordef-paused) pseudo class. + paused, + /// The [:seeking](https://drafts.csswg.org/selectors-4/#selectordef-seeking) pseudo class. + seeking, + /// The [:buffering](https://drafts.csswg.org/selectors-4/#selectordef-buffering) pseudo class. + buffering, + /// The [:stalled](https://drafts.csswg.org/selectors-4/#selectordef-stalled) pseudo class. + stalled, + /// The [:muted](https://drafts.csswg.org/selectors-4/#selectordef-muted) pseudo class. + muted, + /// The [:volume-locked](https://drafts.csswg.org/selectors-4/#selectordef-volume-locked) pseudo class. + volume_locked, + + /// The [:fullscreen](https://fullscreen.spec.whatwg.org/#:fullscreen-pseudo-class) pseudo class. + fullscreen: css.VendorPrefix, + + /// https://drafts.csswg.org/selectors/#display-state-pseudos + /// The [:open](https://drafts.csswg.org/selectors/#selectordef-open) pseudo class. + open, + /// The [:closed](https://drafts.csswg.org/selectors/#selectordef-closed) pseudo class. + closed, + /// The [:modal](https://drafts.csswg.org/selectors/#modal-state) pseudo class. + modal, + /// The [:picture-in-picture](https://drafts.csswg.org/selectors/#pip-state) pseudo class. + picture_in_picture, + + /// https://html.spec.whatwg.org/multipage/semantics-other.html#selector-popover-open + /// The [:popover-open](https://html.spec.whatwg.org/multipage/semantics-other.html#selector-popover-open) pseudo class. + popover_open, + + /// The [:defined](https://drafts.csswg.org/selectors-4/#the-defined-pseudo) pseudo class. + defined, + + /// https://drafts.csswg.org/selectors-4/#location + /// The [:any-link](https://drafts.csswg.org/selectors-4/#the-any-link-pseudo) pseudo class. + any_link: css.VendorPrefix, + /// The [:link](https://drafts.csswg.org/selectors-4/#link-pseudo) pseudo class. + link, + /// The [:local-link](https://drafts.csswg.org/selectors-4/#the-local-link-pseudo) pseudo class. + local_link, + /// The [:target](https://drafts.csswg.org/selectors-4/#the-target-pseudo) pseudo class. + target, + /// The [:target-within](https://drafts.csswg.org/selectors-4/#the-target-within-pseudo) pseudo class. + target_within, + /// The [:visited](https://drafts.csswg.org/selectors-4/#visited-pseudo) pseudo class. + visited, + + /// https://drafts.csswg.org/selectors-4/#input-pseudos + /// The [:enabled](https://drafts.csswg.org/selectors-4/#enabled-pseudo) pseudo class. + enabled, + /// The [:disabled](https://drafts.csswg.org/selectors-4/#disabled-pseudo) pseudo class. + disabled, + /// The [:read-only](https://drafts.csswg.org/selectors-4/#read-only-pseudo) pseudo class. + read_only: css.VendorPrefix, + /// The [:read-write](https://drafts.csswg.org/selectors-4/#read-write-pseudo) pseudo class. + read_write: css.VendorPrefix, + /// The [:placeholder-shown](https://drafts.csswg.org/selectors-4/#placeholder) pseudo class. + placeholder_shown: css.VendorPrefix, + /// The [:default](https://drafts.csswg.org/selectors-4/#the-default-pseudo) pseudo class. + default, + /// The [:checked](https://drafts.csswg.org/selectors-4/#checked) pseudo class. + checked, + /// The [:indeterminate](https://drafts.csswg.org/selectors-4/#indeterminate) pseudo class. + indeterminate, + /// The [:blank](https://drafts.csswg.org/selectors-4/#blank) pseudo class. + blank, + /// The [:valid](https://drafts.csswg.org/selectors-4/#valid-pseudo) pseudo class. + valid, + /// The [:invalid](https://drafts.csswg.org/selectors-4/#invalid-pseudo) pseudo class. + invalid, + /// The [:in-range](https://drafts.csswg.org/selectors-4/#in-range-pseudo) pseudo class. + in_range, + /// The [:out-of-range](https://drafts.csswg.org/selectors-4/#out-of-range-pseudo) pseudo class. + out_of_range, + /// The [:required](https://drafts.csswg.org/selectors-4/#required-pseudo) pseudo class. + required, + /// The [:optional](https://drafts.csswg.org/selectors-4/#optional-pseudo) pseudo class. + optional, + /// The [:user-valid](https://drafts.csswg.org/selectors-4/#user-valid-pseudo) pseudo class. + user_valid, + /// The [:used-invalid](https://drafts.csswg.org/selectors-4/#user-invalid-pseudo) pseudo class. + user_invalid, + + /// The [:autofill](https://html.spec.whatwg.org/multipage/semantics-other.html#selector-autofill) pseudo class. + autofill: css.VendorPrefix, + + // CSS modules + /// The CSS modules :local() pseudo class. + local: struct { + /// A local selector. + selector: *Selector, + + pub fn __generateEql() void {} + pub fn __generateDeepClone() void {} + pub fn __generateHash() void {} + }, + /// The CSS modules :global() pseudo class. + global: struct { + /// A global selector. + selector: *Selector, + + pub fn __generateEql() void {} + pub fn __generateDeepClone() void {} + pub fn __generateHash() void {} + }, + + /// A [webkit scrollbar](https://webkit.org/blog/363/styling-scrollbars/) pseudo class. + // https://webkit.org/blog/363/styling-scrollbars/ + webkit_scrollbar: WebKitScrollbarPseudoClass, + /// An unknown pseudo class. + custom: struct { + /// The pseudo class name. + name: []const u8, + + pub fn __generateEql() void {} + pub fn __generateDeepClone() void {} + pub fn __generateHash() void {} + }, + /// An unknown functional pseudo class. + custom_function: struct { + /// The pseudo class name. + name: []const u8, + /// The arguments of the pseudo class function. + arguments: css.TokenList, + + pub fn __generateEql() void {} + pub fn __generateDeepClone() void {} + pub fn __generateHash() void {} + }, + + pub fn isEquivalent(this: *const PseudoClass, other: *const PseudoClass) bool { + if (this.* == .fullscreen and other.* == .fullscreen) return true; + if (this.* == .any_link and other.* == .any_link) return true; + if (this.* == .read_only and other.* == .read_only) return true; + if (this.* == .read_write and other.* == .read_write) return true; + if (this.* == .placeholder_shown and other.* == .placeholder_shown) return true; + if (this.* == .autofill and other.* == .autofill) return true; + return this.eql(other); + } + + pub fn toCss(this: *const PseudoClass, comptime W: type, dest: *Printer(W)) PrintErr!void { + var s = ArrayList(u8){}; + // PERF(alloc): I don't like making these little allocations + const writer = s.writer(dest.allocator); + const W2 = @TypeOf(writer); + const scratchbuf = std.ArrayList(u8).init(dest.allocator); + var printer = Printer(W2).new(dest.allocator, scratchbuf, writer, css.PrinterOptions{}, dest.import_records); + try serialize.serializePseudoClass(this, W2, &printer, null); + return dest.writeStr(s.items); + } + + pub fn eql(lhs: *const PseudoClass, rhs: *const PseudoClass) bool { + return css.implementEql(PseudoClass, lhs, rhs); + } + + pub fn hash(this: *const @This(), hasher: *std.hash.Wyhash) void { + return css.implementHash(@This(), this, hasher); + } + + pub fn deepClone(this: *const @This(), allocator: Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn getPrefix(this: *const PseudoClass) css.VendorPrefix { + return switch (this.*) { + inline .fullscreen, .any_link, .read_only, .read_write, .placeholder_shown, .autofill => |p| p, + else => css.VendorPrefix.empty(), + }; + } + + pub fn getNecessaryPrefixes(this: *PseudoClass, targets: css.targets.Targets) css.VendorPrefix { + const F = css.prefixes.Feature; + const p: *css.VendorPrefix, const feature: F = switch (this.*) { + .fullscreen => |*p| .{ p, F.pseudo_class_fullscreen }, + .any_link => |*p| .{ p, F.pseudo_class_any_link }, + .read_only => |*p| .{ p, F.pseudo_class_read_only }, + .read_write => |*p| .{ p, F.pseudo_class_read_write }, + .placeholder_shown => |*p| .{ p, F.pseudo_class_placeholder_shown }, + .autofill => |*p| .{ p, F.pseudo_class_autofill }, + else => return css.VendorPrefix.empty(), + }; + p.* = targets.prefixes(p.*, feature); + return p.*; + } + + pub fn isUserActionState(this: *const PseudoClass) bool { + return switch (this.*) { + .active, .hover, .focus, .focus_within, .focus_visible => true, + else => false, + }; + } + + pub fn isValidBeforeWebkitScrollbar(this: *const PseudoClass) bool { + return !switch (this.*) { + .webkit_scrollbar => true, + else => false, + }; + } + + pub fn isValidAfterWebkitScrollbar(this: *const PseudoClass) bool { + return switch (this.*) { + .webkit_scrollbar, .enabled, .disabled, .hover, .active => true, + else => false, + }; + } +}; + +/// A [webkit scrollbar](https://webkit.org/blog/363/styling-scrollbars/) pseudo class. +pub const WebKitScrollbarPseudoClass = enum { + /// :horizontal + horizontal, + /// :vertical + vertical, + /// :decrement + decrement, + /// :increment + increment, + /// :start + start, + /// :end + end, + /// :double-button + double_button, + /// :single-button + single_button, + /// :no-button + no_button, + /// :corner-present + corner_present, + /// :window-inactive + window_inactive, +}; + +/// A [webkit scrollbar](https://webkit.org/blog/363/styling-scrollbars/) pseudo element. +pub const WebKitScrollbarPseudoElement = enum { + /// ::-webkit-scrollbar + scrollbar, + /// ::-webkit-scrollbar-button + button, + /// ::-webkit-scrollbar-track + track, + /// ::-webkit-scrollbar-track-piece + track_piece, + /// ::-webkit-scrollbar-thumb + thumb, + /// ::-webkit-scrollbar-corner + corner, + /// ::-webkit-resizer + resizer, + + pub inline fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return lhs.* == rhs.*; + } +}; + +pub const SelectorParser = struct { + is_nesting_allowed: bool, + options: *const css.ParserOptions, + allocator: Allocator, + + pub const Impl = impl.Selectors; + + pub fn namespaceForPrefix(this: *SelectorParser, prefix: css.css_values.ident.Ident) ?[]const u8 { + _ = this; // autofix + return prefix.v; + } + + pub fn parseFunctionalPseudoElement(this: *SelectorParser, name: []const u8, input: *css.Parser) Result(Impl.SelectorImpl.PseudoElement) { + _ = this; // autofix + _ = name; // autofix + _ = input; // autofix + @panic(css.todo_stuff.depth); + } + + fn parseIsAndWhere(this: *const SelectorParser) bool { + _ = this; // autofix + return true; + } + + /// Whether the given function name is an alias for the `:is()` function. + fn parseAnyPrefix(this: *const SelectorParser, name: []const u8) ?css.VendorPrefix { + _ = this; // autofix + _ = name; // autofix + return null; + } + + pub fn parseNonTsPseudoClass( + this: *SelectorParser, + loc: css.SourceLocation, + name: []const u8, + ) Result(PseudoClass) { + // @compileError(css.todo_stuff.match_ignore_ascii_case); + const pseudo_class: PseudoClass = pseudo_class: { + if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "hover")) { + // https://drafts.csswg.org/selectors-4/#useraction-pseudos + break :pseudo_class .hover; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "active")) { + // https://drafts.csswg.org/selectors-4/#useraction-pseudos + break :pseudo_class .active; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "focus")) { + // https://drafts.csswg.org/selectors-4/#useraction-pseudos + break :pseudo_class .focus; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "focus-visible")) { + // https://drafts.csswg.org/selectors-4/#useraction-pseudos + break :pseudo_class .focus_visible; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "focus-within")) { + // https://drafts.csswg.org/selectors-4/#useraction-pseudos + break :pseudo_class .focus_within; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "current")) { + // https://drafts.csswg.org/selectors-4/#time-pseudos + break :pseudo_class .current; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "past")) { + // https://drafts.csswg.org/selectors-4/#time-pseudos + break :pseudo_class .past; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "future")) { + // https://drafts.csswg.org/selectors-4/#time-pseudos + break :pseudo_class .future; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "playing")) { + // https://drafts.csswg.org/selectors-4/#resource-pseudos + break :pseudo_class .playing; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "paused")) { + // https://drafts.csswg.org/selectors-4/#resource-pseudos + break :pseudo_class .paused; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "seeking")) { + // https://drafts.csswg.org/selectors-4/#resource-pseudos + break :pseudo_class .seeking; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "buffering")) { + // https://drafts.csswg.org/selectors-4/#resource-pseudos + break :pseudo_class .buffering; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "stalled")) { + // https://drafts.csswg.org/selectors-4/#resource-pseudos + break :pseudo_class .stalled; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "muted")) { + // https://drafts.csswg.org/selectors-4/#resource-pseudos + break :pseudo_class .muted; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "volume-locked")) { + // https://drafts.csswg.org/selectors-4/#resource-pseudos + break :pseudo_class .volume_locked; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "fullscreen")) { + // https://fullscreen.spec.whatwg.org/#:fullscreen-pseudo-class + break :pseudo_class .{ .fullscreen = css.VendorPrefix{ .none = true } }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "-webkit-full-screen")) { + // https://fullscreen.spec.whatwg.org/#:fullscreen-pseudo-class + break :pseudo_class .{ .fullscreen = css.VendorPrefix{ .webkit = true } }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "-moz-full-screen")) { + // https://fullscreen.spec.whatwg.org/#:fullscreen-pseudo-class + break :pseudo_class .{ .fullscreen = css.VendorPrefix{ .moz = true } }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "-ms-fullscreen")) { + // https://fullscreen.spec.whatwg.org/#:fullscreen-pseudo-class + break :pseudo_class .{ .fullscreen = css.VendorPrefix{ .ms = true } }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "open")) { + // https://drafts.csswg.org/selectors/#display-state-pseudos + break :pseudo_class .open; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "closed")) { + // https://drafts.csswg.org/selectors/#display-state-pseudos + break :pseudo_class .closed; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "modal")) { + // https://drafts.csswg.org/selectors/#display-state-pseudos + break :pseudo_class .modal; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "picture-in-picture")) { + // https://drafts.csswg.org/selectors/#display-state-pseudos + break :pseudo_class .picture_in_picture; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "popover-open")) { + // https://html.spec.whatwg.org/multipage/semantics-other.html#selector-popover-open + break :pseudo_class .popover_open; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "defined")) { + // https://drafts.csswg.org/selectors-4/#the-defined-pseudo + break :pseudo_class .defined; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "any-link")) { + // https://drafts.csswg.org/selectors-4/#location + break :pseudo_class .{ .any_link = css.VendorPrefix{ .none = true } }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "-webkit-any-link")) { + // https://drafts.csswg.org/selectors-4/#location + break :pseudo_class .{ .any_link = css.VendorPrefix{ .webkit = true } }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "-moz-any-link")) { + // https://drafts.csswg.org/selectors-4/#location + break :pseudo_class .{ .any_link = css.VendorPrefix{ .moz = true } }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "link")) { + // https://drafts.csswg.org/selectors-4/#location + break :pseudo_class .link; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "local-link")) { + // https://drafts.csswg.org/selectors-4/#location + break :pseudo_class .local_link; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "target")) { + // https://drafts.csswg.org/selectors-4/#location + break :pseudo_class .target; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "target-within")) { + // https://drafts.csswg.org/selectors-4/#location + break :pseudo_class .target_within; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "visited")) { + // https://drafts.csswg.org/selectors-4/#location + break :pseudo_class .visited; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "enabled")) { + // https://drafts.csswg.org/selectors-4/#input-pseudos + break :pseudo_class .enabled; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "disabled")) { + // https://drafts.csswg.org/selectors-4/#input-pseudos + break :pseudo_class .disabled; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "read-only")) { + // https://drafts.csswg.org/selectors-4/#input-pseudos + break :pseudo_class .{ .read_only = css.VendorPrefix{ .none = true } }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "-moz-read-only")) { + // https://drafts.csswg.org/selectors-4/#input-pseudos + break :pseudo_class .{ .read_only = css.VendorPrefix{ .moz = true } }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "read-write")) { + // https://drafts.csswg.org/selectors-4/#input-pseudos + break :pseudo_class .{ .read_write = css.VendorPrefix{ .none = true } }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "-moz-read-write")) { + // https://drafts.csswg.org/selectors-4/#input-pseudos + break :pseudo_class .{ .read_write = css.VendorPrefix{ .moz = true } }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "placeholder-shown")) { + // https://drafts.csswg.org/selectors-4/#input-pseudos + break :pseudo_class .{ .placeholder_shown = css.VendorPrefix{ .none = true } }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "-moz-placeholder-shown")) { + // https://drafts.csswg.org/selectors-4/#input-pseudos + break :pseudo_class .{ .placeholder_shown = css.VendorPrefix{ .moz = true } }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "-ms-placeholder-shown")) { + // https://drafts.csswg.org/selectors-4/#input-pseudos + break :pseudo_class .{ .placeholder_shown = css.VendorPrefix{ .ms = true } }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "default")) { + // https://drafts.csswg.org/selectors-4/#input-pseudos + break :pseudo_class .default; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "checked")) { + // https://drafts.csswg.org/selectors-4/#input-pseudos + break :pseudo_class .checked; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "indeterminate")) { + // https://drafts.csswg.org/selectors-4/#input-pseudos + break :pseudo_class .indeterminate; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "blank")) { + // https://drafts.csswg.org/selectors-4/#input-pseudos + break :pseudo_class .blank; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "valid")) { + // https://drafts.csswg.org/selectors-4/#input-pseudos + break :pseudo_class .valid; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "invalid")) { + // https://drafts.csswg.org/selectors-4/#input-pseudos + break :pseudo_class .invalid; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "in-range")) { + // https://drafts.csswg.org/selectors-4/#input-pseudos + break :pseudo_class .in_range; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "out-of-range")) { + // https://drafts.csswg.org/selectors-4/#input-pseudos + break :pseudo_class .out_of_range; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "required")) { + // https://drafts.csswg.org/selectors-4/#input-pseudos + break :pseudo_class .required; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "optional")) { + // https://drafts.csswg.org/selectors-4/#input-pseudos + break :pseudo_class .optional; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "user-valid")) { + // https://drafts.csswg.org/selectors-4/#input-pseudos + break :pseudo_class .user_valid; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "user-invalid")) { + // https://drafts.csswg.org/selectors-4/#input-pseudos + break :pseudo_class .user_invalid; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "autofill")) { + // https://html.spec.whatwg.org/multipage/semantics-other.html#selector-autofill + break :pseudo_class .{ .autofill = css.VendorPrefix{ .none = true } }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "-webkit-autofill")) { + // https://html.spec.whatwg.org/multipage/semantics-other.html#selector-autofill + break :pseudo_class .{ .autofill = css.VendorPrefix{ .webkit = true } }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "-o-autofill")) { + // https://html.spec.whatwg.org/multipage/semantics-other.html#selector-autofill + break :pseudo_class .{ .autofill = css.VendorPrefix{ .o = true } }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "horizontal")) { + // https://webkit.org/blog/363/styling-scrollbars/ + break :pseudo_class .{ .webkit_scrollbar = .horizontal }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "vertical")) { + // https://webkit.org/blog/363/styling-scrollbars/ + break :pseudo_class .{ .webkit_scrollbar = .vertical }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "decrement")) { + // https://webkit.org/blog/363/styling-scrollbars/ + break :pseudo_class .{ .webkit_scrollbar = .decrement }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "increment")) { + // https://webkit.org/blog/363/styling-scrollbars/ + break :pseudo_class .{ .webkit_scrollbar = .increment }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "start")) { + // https://webkit.org/blog/363/styling-scrollbars/ + break :pseudo_class .{ .webkit_scrollbar = .start }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "end")) { + // https://webkit.org/blog/363/styling-scrollbars/ + break :pseudo_class .{ .webkit_scrollbar = .end }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "double-button")) { + // https://webkit.org/blog/363/styling-scrollbars/ + break :pseudo_class .{ .webkit_scrollbar = .double_button }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "single-button")) { + // https://webkit.org/blog/363/styling-scrollbars/ + break :pseudo_class .{ .webkit_scrollbar = .single_button }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "no-button")) { + // https://webkit.org/blog/363/styling-scrollbars/ + break :pseudo_class .{ .webkit_scrollbar = .no_button }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "corner-present")) { + // https://webkit.org/blog/363/styling-scrollbars/ + break :pseudo_class .{ .webkit_scrollbar = .corner_present }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "window-inactive")) { + // https://webkit.org/blog/363/styling-scrollbars/ + break :pseudo_class .{ .webkit_scrollbar = .window_inactive }; + } else { + if (bun.strings.startsWithChar(name, '_')) { + this.options.warn(loc.newCustomError(SelectorParseErrorKind{ .unsupported_pseudo_class_or_element = name })); + } + return .{ .result = PseudoClass{ .custom = .{ .name = name } } }; + } + }; + + return .{ .result = pseudo_class }; + } + + pub fn parseNonTsFunctionalPseudoClass( + this: *SelectorParser, + name: []const u8, + parser: *css.Parser, + ) Result(PseudoClass) { + + // todo_stuff.match_ignore_ascii_case + const pseudo_class = pseudo_class: { + if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "lang")) { + const languages = switch (parser.parseCommaSeparated([]const u8, css.Parser.expectIdentOrString)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + return .{ .result = PseudoClass{ + .lang = .{ .languages = languages }, + } }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "dir")) { + break :pseudo_class PseudoClass{ + .dir = .{ + .direction = switch (Direction.parse(parser)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }, + }, + }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "local") and this.options.css_modules != null) { + break :pseudo_class PseudoClass{ + .local = .{ + .selector = brk: { + const selector = switch (Selector.parse(this, parser)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + + break :brk bun.create(this.allocator, Selector, selector); + }, + }, + }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "global") and this.options.css_modules != null) { + break :pseudo_class PseudoClass{ + .global = .{ + .selector = brk: { + const selector = switch (Selector.parse(this, parser)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + + break :brk bun.create(this.allocator, Selector, selector); + }, + }, + }; + } else { + if (!bun.strings.startsWithChar(name, '-')) { + this.options.warn(parser.newCustomError(SelectorParseErrorKind.intoDefaultParserError(.{ .unsupported_pseudo_class_or_element = name }))); + } + var args = ArrayList(css.css_properties.custom.TokenOrValue){}; + _ = switch (css.TokenListFns.parseRaw(parser, &args, this.options, 0)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + break :pseudo_class PseudoClass{ + .custom_function = .{ + .name = name, + .arguments = css.TokenList{ .v = args }, + }, + }; + } + }; + + return .{ .result = pseudo_class }; + } + + pub fn isNestingAllowed(this: *SelectorParser) bool { + return this.is_nesting_allowed; + } + + pub fn deepCombinatorEnabled(this: *SelectorParser) bool { + return this.options.flags.contains(css.ParserFlags{ .deep_selector_combinator = true }); + } + + pub fn defaultNamespace(this: *SelectorParser) ?impl.Selectors.SelectorImpl.NamespaceUrl { + _ = this; // autofix + return null; + } + + pub fn parsePart(this: *SelectorParser) bool { + _ = this; // autofix + return true; + } + + pub fn parseSlotted(this: *SelectorParser) bool { + _ = this; // autofix + return true; + } + + /// The error recovery that selector lists inside :is() and :where() have. + fn isAndWhereErrorRecovery(this: *SelectorParser) ParseErrorRecovery { + _ = this; // autofix + return .ignore_invalid_selector; + } + + pub fn parsePseudoElement(this: *SelectorParser, loc: css.SourceLocation, name: []const u8) Result(PseudoElement) { + const Map = comptime bun.ComptimeStringMap(PseudoElement, .{ + .{ "before", PseudoElement.before }, + .{ "after", PseudoElement.after }, + .{ "first-line", PseudoElement.first_line }, + .{ "first-letter", PseudoElement.first_letter }, + .{ "cue", PseudoElement.cue }, + .{ "cue-region", PseudoElement.cue_region }, + .{ "selection", PseudoElement{ .selection = css.VendorPrefix{ .none = true } } }, + .{ "-moz-selection", PseudoElement{ .selection = css.VendorPrefix{ .moz = true } } }, + .{ "placeholder", PseudoElement{ .placeholder = css.VendorPrefix{ .none = true } } }, + .{ "-webkit-input-placeholder", PseudoElement{ .placeholder = css.VendorPrefix{ .webkit = true } } }, + .{ "-moz-placeholder", PseudoElement{ .placeholder = css.VendorPrefix{ .moz = true } } }, + .{ "-ms-input-placeholder", PseudoElement{ .placeholder = css.VendorPrefix{ .ms = true } } }, + .{ "marker", PseudoElement.marker }, + .{ "backdrop", PseudoElement{ .backdrop = css.VendorPrefix{ .none = true } } }, + .{ "-webkit-backdrop", PseudoElement{ .backdrop = css.VendorPrefix{ .webkit = true } } }, + .{ "file-selector-button", PseudoElement{ .file_selector_button = css.VendorPrefix{ .none = true } } }, + .{ "-webkit-file-upload-button", PseudoElement{ .file_selector_button = css.VendorPrefix{ .webkit = true } } }, + .{ "-ms-browse", PseudoElement{ .file_selector_button = css.VendorPrefix{ .ms = true } } }, + .{ "-webkit-scrollbar", PseudoElement{ .webkit_scrollbar = .scrollbar } }, + .{ "-webkit-scrollbar-button", PseudoElement{ .webkit_scrollbar = .button } }, + .{ "-webkit-scrollbar-track", PseudoElement{ .webkit_scrollbar = .track } }, + .{ "-webkit-scrollbar-track-piece", PseudoElement{ .webkit_scrollbar = .track_piece } }, + .{ "-webkit-scrollbar-thumb", PseudoElement{ .webkit_scrollbar = .thumb } }, + .{ "-webkit-scrollbar-corner", PseudoElement{ .webkit_scrollbar = .corner } }, + .{ "-webkit-resizer", PseudoElement{ .webkit_scrollbar = .resizer } }, + .{ "view-transition", PseudoElement.view_transition }, + }); + + const pseudo_element = Map.getCaseInsensitiveWithEql(name, bun.strings.eqlComptimeIgnoreLen) orelse brk: { + if (!bun.strings.startsWithChar(name, '-')) { + this.options.warn(loc.newCustomError(SelectorParseErrorKind{ .unsupported_pseudo_class_or_element = name })); + } + break :brk PseudoElement{ .custom = .{ .name = name } }; + }; + + return .{ .result = pseudo_element }; + } +}; + +pub fn GenericSelectorList(comptime Impl: type) type { + ValidSelectorImpl(Impl); + + const SelectorT = GenericSelector(Impl); + return struct { + // PERF: make this equivalent to SmallVec<[Selector; 1]> + v: css.SmallList(SelectorT, 1) = .{}, + + const This = @This(); + + const DebugFmt = struct { + this: *const This, + + pub fn format(this: @This(), comptime fmt: []const u8, options: std.fmt.FormatOptions, writer: anytype) !void { + if (comptime !bun.Environment.isDebug) return; + _ = fmt; // autofix + _ = options; // autofix + try writer.print("SelectorList[\n", .{}); + const last = this.this.v.len() -| 1; + for (this.this.v.slice(), 0..) |*sel, i| { + if (i != last) { + try writer.print(" {}\n", .{sel.debug()}); + } else { + try writer.print(" {},\n", .{sel.debug()}); + } + } + try writer.print("]\n", .{}); + } + }; + + pub fn debug(this: *const @This()) DebugFmt { + return DebugFmt{ .this = this }; + } + + pub fn anyHasPseudoElement(this: *const This) bool { + for (this.v.slice()) |*sel| { + if (sel.hasPseudoElement()) return true; + } + return false; + } + + pub fn specifitiesAllEqual(this: *const This) bool { + if (this.v.len() == 0) return true; + if (this.v.len() == 1) return true; + + const value = this.v.at(0).specifity(); + for (this.v.slice()[1..]) |*sel| { + if (sel.specifity() != value) return false; + } + return true; + } + + pub fn toCss(this: *const This, comptime W: type, dest: *Printer(W)) PrintErr!void { + _ = this; // autofix + _ = dest; // autofix + @compileError("Do not call this! Use `serializer.serializeSelectorList()` or `tocss_servo.toCss_SelectorList()` instead."); + } + + pub fn parseWithOptions(input: *css.Parser, options: *const css.ParserOptions) Result(This) { + var parser = SelectorParser{ + .options = options, + .is_nesting_allowed = true, + }; + return parse(&parser, input, .discard_list, .none); + } + + pub fn parse( + parser: *SelectorParser, + input: *css.Parser, + error_recovery: ParseErrorRecovery, + nesting_requirement: NestingRequirement, + ) Result(This) { + var state = SelectorParsingState.empty(); + return parseWithState(parser, input, &state, error_recovery, nesting_requirement); + } + + pub fn parseRelative( + parser: *SelectorParser, + input: *css.Parser, + error_recovery: ParseErrorRecovery, + nesting_requirement: NestingRequirement, + ) Result(This) { + var state = SelectorParsingState.empty(); + return parseRelativeWithState(parser, input, &state, error_recovery, nesting_requirement); + } + + pub fn parseWithState( + parser: *SelectorParser, + input: *css.Parser, + state: *SelectorParsingState, + recovery: ParseErrorRecovery, + nesting_requirement: NestingRequirement, + ) Result(This) { + const original_state = state.*; + // TODO: Think about deinitialization in error cases + var values = SmallList(SelectorT, 1){}; + + while (true) { + const Closure = struct { + outer_state: *SelectorParsingState, + original_state: SelectorParsingState, + nesting_requirement: NestingRequirement, + parser: *SelectorParser, + + pub fn parsefn(this: *@This(), input2: *css.Parser) Result(SelectorT) { + var selector_state = this.original_state; + const result = parse_selector(Impl, this.parser, input2, &selector_state, this.nesting_requirement); + if (selector_state.after_nesting) { + this.outer_state.after_nesting = true; + } + return result; + } + }; + var closure = Closure{ + .outer_state = state, + .original_state = original_state, + .nesting_requirement = nesting_requirement, + .parser = parser, + }; + const selector = input.parseUntilBefore(css.Delimiters{ .comma = true }, SelectorT, &closure, Closure.parsefn); + + const was_ok = selector.isOk(); + switch (selector) { + .result => |sel| { + values.append(input.allocator(), sel); + }, + .err => |e| { + switch (recovery) { + .discard_list => return .{ .err = e }, + .ignore_invalid_selector => {}, + } + }, + } + + while (true) { + if (input.next().asValue()) |tok| { + if (tok.* == .comma) break; + // Shouldn't have got a selector if getting here. + bun.debugAssert(!was_ok); + } + return .{ .result = .{ .v = values } }; + } + } + } + + // TODO: this looks exactly the same as `parseWithState()` except it uses `parse_relative_selector()` instead of `parse_selector()` + pub fn parseRelativeWithState( + parser: *SelectorParser, + input: *css.Parser, + state: *SelectorParsingState, + recovery: ParseErrorRecovery, + nesting_requirement: NestingRequirement, + ) Result(This) { + const original_state = state.*; + // TODO: Think about deinitialization in error cases + var values = SmallList(SelectorT, 1){}; + + while (true) { + const Closure = struct { + outer_state: *SelectorParsingState, + original_state: SelectorParsingState, + nesting_requirement: NestingRequirement, + parser: *SelectorParser, + + pub fn parsefn(this: *@This(), input2: *css.Parser) Result(SelectorT) { + var selector_state = this.original_state; + const result = parse_relative_selector(Impl, this.parser, input2, &selector_state, this.nesting_requirement); + if (selector_state.after_nesting) { + this.outer_state.after_nesting = true; + } + return result; + } + }; + var closure = Closure{ + .outer_state = state, + .original_state = original_state, + .nesting_requirement = nesting_requirement, + .parser = parser, + }; + const selector = input.parseUntilBefore(css.Delimiters{ .comma = true }, SelectorT, &closure, Closure.parsefn); + + const was_ok = selector.isOk(); + switch (selector) { + .result => |sel| { + values.append(input.allocator(), sel); + }, + .err => |e| { + switch (recovery) { + .discard_list => return .{ .err = e }, + .ignore_invalid_selector => {}, + } + }, + } + + while (true) { + if (input.next().asValue()) |tok| { + if (tok.* == .comma) break; + // Shouldn't have got a selector if getting here. + bun.debugAssert(!was_ok); + } + return .{ .result = .{ .v = values } }; + } + } + } + + pub fn fromSelector(allocator: Allocator, selector: GenericSelector(Impl)) This { + var result = This{}; + result.v.append(allocator, selector); + return result; + } + + pub fn deepClone(this: *const @This(), allocator: Allocator) This { + return .{ .v = this.v.deepClone(allocator) }; + } + + pub fn eql(lhs: *const This, rhs: *const This) bool { + return lhs.v.eql(&rhs.v); + } + + pub fn hash(this: *const @This(), hasher: *std.hash.Wyhash) void { + return css.implementHash(@This(), this, hasher); + } + }; +} + +/// -- original comment from servo -- +/// A Selector stores a sequence of simple selectors and combinators. The +/// iterator classes allow callers to iterate at either the raw sequence level or +/// at the level of sequences of simple selectors separated by combinators. Most +/// callers want the higher-level iterator. +/// +/// We store compound selectors internally right-to-left (in matching order). +/// Additionally, we invert the order of top-level compound selectors so that +/// each one matches left-to-right. This is because matching namespace, local name, +/// id, and class are all relatively cheap, whereas matching pseudo-classes might +/// be expensive (depending on the pseudo-class). Since authors tend to put the +/// pseudo-classes on the right, it's faster to start matching on the left. +/// +/// This reordering doesn't change the semantics of selector matching, and we +/// handle it in to_css to make it invisible to serialization. +pub fn GenericSelector(comptime Impl: type) type { + ValidSelectorImpl(Impl); + + return struct { + specifity_and_flags: SpecifityAndFlags, + components: ArrayList(GenericComponent(Impl)), + + const This = @This(); + + const DebugFmt = struct { + this: *const This, + + pub fn format(this: @This(), comptime fmt: []const u8, options: std.fmt.FormatOptions, writer: anytype) !void { + if (comptime !bun.Environment.isDebug) return; + _ = fmt; // autofix + _ = options; // autofix + try writer.print("Selector(", .{}); + var arraylist = ArrayList(u8){}; + const w = arraylist.writer(bun.default_allocator); + defer arraylist.deinit(bun.default_allocator); + var printer = css.Printer(@TypeOf(w)).new(bun.default_allocator, std.ArrayList(u8).init(bun.default_allocator), w, .{}, null); + defer printer.deinit(); + css.selector.tocss_servo.toCss_Selector(this.this, @TypeOf(w), &printer) catch |e| return try writer.print("\n", .{@errorName(e)}); + try writer.writeAll(arraylist.items); + } + }; + + pub fn debug(this: *const This) DebugFmt { + return DebugFmt{ .this = this }; + } + + /// Parse a selector, without any pseudo-element. + pub fn parse(parser: *SelectorParser, input: *css.Parser) Result(This) { + var state = SelectorParsingState.empty(); + return parse_selector(Impl, parser, input, &state, .none); + } + + pub fn toCss(this: *const This, comptime W: type, dest: *Printer(W)) PrintErr!void { + _ = this; // autofix + _ = dest; // autofix + @compileError("Do not call this! Use `serializer.serializeSelector()` or `tocss_servo.toCss_Selector()` instead."); + } + + pub fn append(this: *This, allocator: Allocator, component: GenericComponent(Impl)) void { + const index = index: { + for (this.components.items, 0..) |*comp, i| { + switch (comp.*) { + .combinator, .pseudo_element => break :index i, + else => {}, + } + } + break :index this.components.items.len; + }; + this.components.insert(allocator, index, component) catch bun.outOfMemory(); + } + + pub fn deepClone(this: *const @This(), allocator: Allocator) This { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn eql(this: *const This, other: *const This) bool { + return css.implementEql(This, this, other); + } + + pub fn hasCombinator(this: *const This) bool { + for (this.components.items) |*c| { + if (c.* == .combinator and c.combinator.isTreeCombinator()) return true; + } + return false; + } + + pub fn hasPseudoElement(this: *const This) bool { + return this.specifity_and_flags.hasPseudoElement(); + } + + /// Returns count of simple selectors and combinators in the Selector. + pub fn len(this: *const This) usize { + return this.components.items.len; + } + + pub fn fromComponent(allocator: Allocator, component: GenericComponent(Impl)) This { + var builder = SelectorBuilder(Impl).init(allocator); + if (component.asCombinator()) |combinator| { + builder.pushCombinator(combinator); + } else { + builder.pushSimpleSelector(component); + } + const result = builder.build(false, false, false); + return This{ + .specifity_and_flags = result.specifity_and_flags, + .components = result.components, + }; + } + + pub fn specifity(this: *const This) u32 { + return this.specifity_and_flags.specificity; + } + + pub fn parseWithOptions(input: *css.Parser, options: *const css.ParserOptions) Result(This) { + var selector_parser = SelectorParser{ + .is_nesting_allowed = true, + .options = options, + }; + return parse(&selector_parser, input); + } + + /// Returns an iterator over the sequence of simple selectors and + /// combinators, in parse order (from left to right), starting from + /// `offset`. + pub fn iterRawParseOrderFrom(this: *const This, offset: usize) RawParseOrderFromIter { + return RawParseOrderFromIter{ + .slice = this.components.items[0 .. this.components.items.len - offset], + }; + } + + const RawParseOrderFromIter = struct { + slice: []const GenericComponent(Impl), + i: usize = 0, + + pub fn next(this: *@This()) ?GenericComponent(Impl) { + if (!(this.i < this.slice.len)) return null; + const result = this.slice[this.slice.len - 1 - this.i]; + this.i += 1; + return result; + } + }; + + pub fn hash(this: *const @This(), hasher: *std.hash.Wyhash) void { + return css.implementHash(@This(), this, hasher); + } + }; +} + +/// A CSS simple selector or combinator. We store both in the same enum for +/// optimal packing and cache performance, see [1]. +/// +/// [1] https://bugzilla.mozilla.org/show_bug.cgi?id=1357973 +pub fn GenericComponent(comptime Impl: type) type { + ValidSelectorImpl(Impl); + + return union(enum) { + combinator: Combinator, + + explicit_any_namespace, + explicit_no_namespace, + default_namespace: Impl.SelectorImpl.NamespaceUrl, + namespace: struct { + prefix: Impl.SelectorImpl.NamespacePrefix, + url: Impl.SelectorImpl.NamespaceUrl, + + pub fn __generateEql() void {} + pub fn __generateDeepClone() void {} + pub fn __generateHash() void {} + }, + + explicit_universal_type, + local_name: LocalName(Impl), + + id: Impl.SelectorImpl.Identifier, + class: Impl.SelectorImpl.Identifier, + + attribute_in_no_namespace_exists: struct { + local_name: Impl.SelectorImpl.LocalName, + local_name_lower: Impl.SelectorImpl.LocalName, + + pub fn __generateEql() void {} + pub fn __generateDeepClone() void {} + pub fn __generateHash() void {} + }, + /// Used only when local_name is already lowercase. + attribute_in_no_namespace: struct { + local_name: Impl.SelectorImpl.LocalName, + operator: attrs.AttrSelectorOperator, + value: Impl.SelectorImpl.AttrValue, + case_sensitivity: attrs.ParsedCaseSensitivity, + never_matches: bool, + + pub fn __generateEql() void {} + pub fn __generateDeepClone() void {} + pub fn __generateHash() void {} + }, + /// Use a Box in the less common cases with more data to keep size_of::() small. + attribute_other: *attrs.AttrSelectorWithOptionalNamespace(Impl), + + /// Pseudo-classes + negation: []GenericSelector(Impl), + root, + empty, + scope, + nth: NthSelectorData, + nth_of: NthOfSelectorData(Impl), + non_ts_pseudo_class: Impl.SelectorImpl.NonTSPseudoClass, + /// The ::slotted() pseudo-element: + /// + /// https://drafts.csswg.org/css-scoping/#slotted-pseudo + /// + /// The selector here is a compound selector, that is, no combinators. + /// + /// NOTE(emilio): This should support a list of selectors, but as of this + /// writing no other browser does, and that allows them to put ::slotted() + /// in the rule hash, so we do that too. + /// + /// See https://github.com/w3c/csswg-drafts/issues/2158 + slotted: GenericSelector(Impl), + /// The `::part` pseudo-element. + /// https://drafts.csswg.org/css-shadow-parts/#part + part: []Impl.SelectorImpl.Identifier, + /// The `:host` pseudo-class: + /// + /// https://drafts.csswg.org/css-scoping/#host-selector + /// + /// NOTE(emilio): This should support a list of selectors, but as of this + /// writing no other browser does, and that allows them to put :host() + /// in the rule hash, so we do that too. + /// + /// See https://github.com/w3c/csswg-drafts/issues/2158 + host: ?GenericSelector(Impl), + /// The `:where` pseudo-class. + /// + /// https://drafts.csswg.org/selectors/#zero-matches + /// + /// The inner argument is conceptually a SelectorList, but we move the + /// selectors to the heap to keep Component small. + where: []GenericSelector(Impl), + /// The `:is` pseudo-class. + /// + /// https://drafts.csswg.org/selectors/#matches-pseudo + /// + /// Same comment as above re. the argument. + is: []GenericSelector(Impl), + any: struct { + vendor_prefix: Impl.SelectorImpl.VendorPrefix, + selectors: []GenericSelector(Impl), + + pub fn __generateEql() void {} + pub fn __generateDeepClone() void {} + pub fn __generateHash() void {} + }, + /// The `:has` pseudo-class. + /// + /// https://www.w3.org/TR/selectors/#relational + has: []GenericSelector(Impl), + /// An implementation-dependent pseudo-element selector. + pseudo_element: Impl.SelectorImpl.PseudoElement, + /// A nesting selector: + /// + /// https://drafts.csswg.org/css-nesting-1/#nest-selector + /// + /// NOTE: This is a lightningcss addition. + nesting, + + const This = @This(); + + pub fn deepClone(this: *const @This(), allocator: Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn eql(lhs: *const This, rhs: *const This) bool { + return css.implementEql(This, lhs, rhs); + } + + pub fn format(this: *const This, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { + switch (this.*) { + .local_name => return try writer.print("local_name={s}", .{this.local_name.name.v}), + .combinator => return try writer.print("combinator={}", .{this.combinator}), + .pseudo_element => return try writer.print("pseudo_element={}", .{this.pseudo_element}), + else => {}, + } + return writer.print("{s}", .{@tagName(this.*)}); + } + + pub fn asCombinator(this: *const This) ?Combinator { + if (this.* == .combinator) return this.combinator; + return null; + } + + pub fn convertHelper_is(s: []GenericSelector(Impl)) This { + return .{ .is = s }; + } + + pub fn convertHelper_where(s: []GenericSelector(Impl)) This { + return .{ .where = s }; + } + + pub fn convertHelper_any(s: []GenericSelector(Impl), prefix: Impl.SelectorImpl.VendorPrefix) This { + return .{ + .any = .{ + .vendor_prefix = prefix, + .selectors = s, + }, + }; + } + + /// Returns true if this is a combinator. + pub fn isCombinator(this: *const This) bool { + return this.* == .combinator; + } + + pub fn toCss(this: *const This, comptime W: type, dest: *Printer(W)) PrintErr!void { + _ = this; // autofix + _ = dest; // autofix + @compileError("Do not call this! Use `serializer.serializeComponent()` or `tocss_servo.toCss_Component()` instead."); + } + + pub fn hash(this: *const @This(), hasher: *std.hash.Wyhash) void { + return css.implementHash(@This(), this, hasher); + } + }; +} + +/// The properties that comprise an :nth- pseudoclass as of Selectors 3 (e.g., +/// nth-child(An+B)). +/// https://www.w3.org/TR/selectors-3/#nth-child-pseudo +pub const NthSelectorData = struct { + ty: NthType, + is_function: bool, + a: i32, + b: i32, + + /// Returns selector data for :only-{child,of-type} + pub fn only(of_type: bool) NthSelectorData { + return NthSelectorData{ + .ty = if (of_type) NthType.only_of_type else NthType.only_child, + .is_function = false, + .a = 0, + .b = 1, + }; + } + + /// Returns selector data for :first-{child,of-type} + pub fn first(of_type: bool) NthSelectorData { + return NthSelectorData{ + .ty = if (of_type) NthType.of_type else NthType.child, + .is_function = false, + .a = 0, + .b = 1, + }; + } + + /// Returns selector data for :last-{child,of-type} + pub fn last(of_type: bool) NthSelectorData { + return NthSelectorData{ + .ty = if (of_type) NthType.last_of_type else NthType.last_child, + .is_function = false, + .a = 0, + .b = 1, + }; + } + + pub fn writeStart(this: *const @This(), comptime W: type, dest: *Printer(W), is_function: bool) PrintErr!void { + try dest.writeStr(switch (this.ty) { + .child => if (is_function) ":nth-child(" else ":first-child", + .last_child => if (is_function) ":nth-last-child(" else ":last-child", + .of_type => if (is_function) ":nth-of-type(" else ":first-of-type", + .last_of_type => if (is_function) ":nth-last-of-type(" else ":last-of-type", + .only_child => if (is_function) ":nth-only-child(" else ":only-of-type", + .only_of_type => ":only-of-type", + .col => ":nth-col(", + .last_col => ":nth-last-col(", + }); + } + + pub fn isFunction(this: *const @This()) bool { + return this.a != 0 or this.b != 1; + } + + fn numberSign(num: i32) []const u8 { + if (num >= 0) return "+"; + return "-"; + } + + pub fn writeAffine(this: *const @This(), comptime W: type, dest: *Printer(W)) PrintErr!void { + // PERF: this could be made faster + if (this.a == 0 and this.b == 0) { + try dest.writeChar('0'); + } else if (this.a == 1 and this.b == 0) { + try dest.writeChar('n'); + } else if (this.a == -1 and this.b == 0) { + try dest.writeStr("-n"); + } else if (this.b == 0) { + try dest.writeFmt("{d}n", .{this.a}); + } else if (this.a == 2 and this.b == 1) { + try dest.writeStr("odd"); + } else if (this.a == 0) { + try dest.writeFmt("{d}", .{this.b}); + } else if (this.a == 1) { + try dest.writeFmt("n{s}{d}", .{ numberSign(this.b), this.b }); + } else if (this.a == -1) { + try dest.writeFmt("-n{s}{d}", .{ numberSign(this.b), this.b }); + } else { + try dest.writeFmt("{}n{s}{d}", .{ this.a, numberSign(this.b), this.b }); + } + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub fn hash(this: *const @This(), hasher: *std.hash.Wyhash) void { + return css.implementHash(@This(), this, hasher); + } +}; + +/// The properties that comprise an :nth- pseudoclass as of Selectors 4 (e.g., +/// nth-child(An+B [of S]?)). +/// https://www.w3.org/TR/selectors-4/#nth-child-pseudo +pub fn NthOfSelectorData(comptime Impl: type) type { + return struct { + data: NthSelectorData, + selectors: []GenericSelector(Impl), + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub fn hash(this: *const @This(), hasher: *std.hash.Wyhash) void { + return css.implementHash(@This(), this, hasher); + } + + pub fn deepClone(this: *const @This(), allocator: Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn nthData(this: *const @This()) NthSelectorData { + return this.data; + } + + pub fn selectors(this: *const @This()) []GenericSelector(Impl) { + return this.selectors; + } + }; +} + +pub const SelectorParsingState = packed struct(u16) { + /// Whether we should avoid adding default namespaces to selectors that + /// aren't type or universal selectors. + skip_default_namespace: bool = false, + + /// Whether we've parsed a ::slotted() pseudo-element already. + /// + /// If so, then we can only parse a subset of pseudo-elements, and + /// whatever comes after them if so. + after_slotted: bool = false, + + /// Whether we've parsed a ::part() pseudo-element already. + /// + /// If so, then we can only parse a subset of pseudo-elements, and + /// whatever comes after them if so. + after_part: bool = false, + + /// Whether we've parsed a pseudo-element (as in, an + /// `Impl::PseudoElement` thus not accounting for `::slotted` or + /// `::part`) already. + /// + /// If so, then other pseudo-elements and most other selectors are + /// disallowed. + after_pseudo_element: bool = false, + + /// Whether we've parsed a non-stateful pseudo-element (again, as-in + /// `Impl::PseudoElement`) already. If so, then other pseudo-classes are + /// disallowed. If this flag is set, `AFTER_PSEUDO_ELEMENT` must be set + /// as well. + after_non_stateful_pseudo_element: bool = false, + + /// Whether we explicitly disallow combinators. + disallow_combinators: bool = false, + + /// Whether we explicitly disallow pseudo-element-like things. + disallow_pseudos: bool = false, + + /// Whether we have seen a nesting selector. + after_nesting: bool = false, + + after_webkit_scrollbar: bool = false, + after_view_transition: bool = false, + after_unknown_pseudo_element: bool = false, + __unused: u5 = 0, + + /// Whether we are after any of the pseudo-like things. + pub const AFTER_PSEUDO = SelectorParsingState{ .after_part = true, .after_slotted = true, .after_pseudo_element = true }; + + pub usingnamespace css.Bitflags(@This()); + + pub fn allowsPseudos(this: SelectorParsingState) bool { + return !this.intersects(SelectorParsingState{ + .after_pseudo_element = true, + .disallow_pseudos = true, + }); + } + + pub fn allowsPart(this: SelectorParsingState) bool { + return !this.intersects(SelectorParsingState.AFTER_PSEUDO.bitwiseOr(SelectorParsingState{ .disallow_pseudos = true })); + } + + pub fn allowsSlotted(this: SelectorParsingState) bool { + return !this.intersects(SelectorParsingState.AFTER_PSEUDO.bitwiseOr(.{ .disallow_pseudos = true })); + } + + pub fn allowsTreeStructuralPseudoClasses(this: SelectorParsingState) bool { + return !this.intersects(SelectorParsingState.AFTER_PSEUDO); + } + + pub fn allowsNonFunctionalPseudoClasses(this: SelectorParsingState) bool { + return !this.intersects(SelectorParsingState{ .after_slotted = true, .after_non_stateful_pseudo_element = true }); + } + + pub fn allowsCombinators(this: SelectorParsingState) bool { + return !this.intersects(SelectorParsingState{ .disallow_combinators = true }); + } + + pub fn allowsCustomFunctionalPseudoClasses(this: SelectorParsingState) bool { + return !this.intersects(SelectorParsingState.AFTER_PSEUDO); + } +}; + +pub const SpecifityAndFlags = struct { + /// There are two free bits here, since we use ten bits for each specificity + /// kind (id, class, element). + specificity: u32, + /// There's padding after this field due to the size of the flags. + flags: SelectorFlags, + + pub fn eql(this: *const SpecifityAndFlags, other: *const SpecifityAndFlags) bool { + return this.specificity == other.specificity and this.flags.eql(other.flags); + } + + pub fn hasPseudoElement(this: *const SpecifityAndFlags) bool { + return this.flags.intersects(SelectorFlags{ .has_pseudo = true }); + } + + pub fn hash(this: *const @This(), hasher: *std.hash.Wyhash) void { + return css.implementHash(@This(), this, hasher); + } + + pub fn deepClone(this: *const @This(), allocator: Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } +}; + +pub const SelectorFlags = packed struct(u8) { + has_pseudo: bool = false, + has_slotted: bool = false, + has_part: bool = false, + __unused: u5 = 0, + + pub usingnamespace css.Bitflags(@This()); +}; + +/// How to treat invalid selectors in a selector list. +pub const ParseErrorRecovery = enum { + /// Discard the entire selector list, this is the default behavior for + /// almost all of CSS. + discard_list, + /// Ignore invalid selectors, potentially creating an empty selector list. + /// + /// This is the error recovery mode of :is() and :where() + ignore_invalid_selector, +}; + +pub const NestingRequirement = enum { + none, + prefixed, + contained, + implicit, +}; + +pub const Combinator = enum { + child, // > + descendant, // space + next_sibling, // + + later_sibling, // ~ + /// A dummy combinator we use to the left of pseudo-elements. + /// + /// It serializes as the empty string, and acts effectively as a child + /// combinator in most cases. If we ever actually start using a child + /// combinator for this, we will need to fix up the way hashes are computed + /// for revalidation selectors. + pseudo_element, + /// Another combinator used for ::slotted(), which represent the jump from + /// a node to its assigned slot. + slot_assignment, + + /// Another combinator used for `::part()`, which represents the jump from + /// the part to the containing shadow host. + part, + + /// Non-standard Vue >>> combinator. + /// https://vue-loader.vuejs.org/guide/scoped-css.html#deep-selectors + deep_descendant, + /// Non-standard /deep/ combinator. + /// Appeared in early versions of the css-scoping-1 specification: + /// https://www.w3.org/TR/2014/WD-css-scoping-1-20140403/#deep-combinator + /// And still supported as an alias for >>> by Vue. + deep, + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return lhs.* == rhs.*; + } + + pub fn toCss(this: *const @This(), comptime W: type, dest: *Printer(W)) PrintErr!void { + _ = this; // autofix + _ = dest; // autofix + @compileError("Do not call this! Use `serializer.serializeCombinator()` or `tocss_servo.toCss_Combinator()` instead."); + } + + pub fn isTreeCombinator(this: *const @This()) bool { + return switch (this.*) { + .child, .descendant, .next_sibling, .later_sibling => true, + else => false, + }; + } + + pub fn format(this: *const Combinator, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { + return switch (this.*) { + .child => writer.print(">", .{}), + .descendant => writer.print("`descendant` (space)", .{}), + .next_sibling => writer.print("+", .{}), + .later_sibling => writer.print("~", .{}), + else => writer.print("{s}", .{@tagName(this.*)}), + }; + } +}; + +pub const SelectorParseErrorKind = union(enum) { + invalid_state, + class_needs_ident: css.Token, + pseudo_element_expected_ident: css.Token, + unsupported_pseudo_class_or_element: []const u8, + no_qualified_name_in_attribute_selector: css.Token, + unexpected_token_in_attribute_selector: css.Token, + unexpected_selector_after_pseudo_element: css.Token, + invalid_qual_name_in_attr: css.Token, + expected_bar_in_attr: css.Token, + empty_selector, + dangling_combinator, + invalid_pseudo_class_before_webkit_scrollbar, + invalid_pseudo_class_after_webkit_scrollbar, + invalid_pseudo_class_after_pseudo_element, + missing_nesting_selector, + missing_nesting_prefix, + expected_namespace: []const u8, + bad_value_in_attr: css.Token, + explicit_namespace_unexpected_token: css.Token, + unexpected_ident: []const u8, + + pub fn intoDefaultParserError(this: SelectorParseErrorKind) css.ParserError { + return css.ParserError{ + .selector_error = this.intoSelectorError(), + }; + } + + pub fn intoSelectorError(this: SelectorParseErrorKind) css.SelectorError { + return switch (this) { + .invalid_state => .invalid_state, + .class_needs_ident => |token| .{ .class_needs_ident = token }, + .pseudo_element_expected_ident => |token| .{ .pseudo_element_expected_ident = token }, + .unsupported_pseudo_class_or_element => |name| .{ .unsupported_pseudo_class_or_element = name }, + .no_qualified_name_in_attribute_selector => |token| .{ .no_qualified_name_in_attribute_selector = token }, + .unexpected_token_in_attribute_selector => |token| .{ .unexpected_token_in_attribute_selector = token }, + .invalid_qual_name_in_attr => |token| .{ .invalid_qual_name_in_attr = token }, + .expected_bar_in_attr => |token| .{ .expected_bar_in_attr = token }, + .empty_selector => .empty_selector, + .dangling_combinator => .dangling_combinator, + .invalid_pseudo_class_before_webkit_scrollbar => .invalid_pseudo_class_before_webkit_scrollbar, + .invalid_pseudo_class_after_webkit_scrollbar => .invalid_pseudo_class_after_webkit_scrollbar, + .invalid_pseudo_class_after_pseudo_element => .invalid_pseudo_class_after_pseudo_element, + .missing_nesting_selector => .missing_nesting_selector, + .missing_nesting_prefix => .missing_nesting_prefix, + .expected_namespace => |name| .{ .expected_namespace = name }, + .bad_value_in_attr => |token| .{ .bad_value_in_attr = token }, + .explicit_namespace_unexpected_token => |token| .{ .explicit_namespace_unexpected_token = token }, + .unexpected_ident => |ident| .{ .unexpected_ident = ident }, + .unexpected_selector_after_pseudo_element => |tok| .{ .unexpected_selector_after_pseudo_element = tok }, + }; + } +}; + +pub fn SimpleSelectorParseResult(comptime Impl: type) type { + ValidSelectorImpl(Impl); + + return union(enum) { + simple_selector: GenericComponent(Impl), + pseudo_element: Impl.SelectorImpl.PseudoElement, + slotted_pseudo: GenericSelector(Impl), + // todo_stuff.think_mem_mgmt + part_pseudo: []Impl.SelectorImpl.Identifier, + }; +} + +/// A pseudo element. +pub const PseudoElement = union(enum) { + /// The [::after](https://drafts.csswg.org/css-pseudo-4/#selectordef-after) pseudo element. + after, + /// The [::before](https://drafts.csswg.org/css-pseudo-4/#selectordef-before) pseudo element. + before, + /// The [::first-line](https://drafts.csswg.org/css-pseudo-4/#first-line-pseudo) pseudo element. + first_line, + /// The [::first-letter](https://drafts.csswg.org/css-pseudo-4/#first-letter-pseudo) pseudo element. + first_letter, + /// The [::selection](https://drafts.csswg.org/css-pseudo-4/#selectordef-selection) pseudo element. + selection: css.VendorPrefix, + /// The [::placeholder](https://drafts.csswg.org/css-pseudo-4/#placeholder-pseudo) pseudo element. + placeholder: css.VendorPrefix, + /// The [::marker](https://drafts.csswg.org/css-pseudo-4/#marker-pseudo) pseudo element. + marker, + /// The [::backdrop](https://fullscreen.spec.whatwg.org/#::backdrop-pseudo-element) pseudo element. + backdrop: css.VendorPrefix, + /// The [::file-selector-button](https://drafts.csswg.org/css-pseudo-4/#file-selector-button-pseudo) pseudo element. + file_selector_button: css.VendorPrefix, + /// A [webkit scrollbar](https://webkit.org/blog/363/styling-scrollbars/) pseudo element. + webkit_scrollbar: WebKitScrollbarPseudoElement, + /// The [::cue](https://w3c.github.io/webvtt/#the-cue-pseudo-element) pseudo element. + cue, + /// The [::cue-region](https://w3c.github.io/webvtt/#the-cue-region-pseudo-element) pseudo element. + cue_region, + /// The [::cue()](https://w3c.github.io/webvtt/#cue-selector) functional pseudo element. + cue_function: struct { + /// The selector argument. + selector: *Selector, + + pub fn __generateEql() void {} + pub fn __generateDeepClone() void {} + pub fn __generateHash() void {} + }, + /// The [::cue-region()](https://w3c.github.io/webvtt/#cue-region-selector) functional pseudo element. + cue_region_function: struct { + /// The selector argument. + selector: *Selector, + + pub fn __generateEql() void {} + pub fn __generateDeepClone() void {} + pub fn __generateHash() void {} + }, + /// The [::view-transition](https://w3c.github.io/csswg-drafts/css-view-transitions-1/#view-transition) pseudo element. + view_transition, + /// The [::view-transition-group()](https://w3c.github.io/csswg-drafts/css-view-transitions-1/#view-transition-group-pt-name-selector) functional pseudo element. + view_transition_group: struct { + /// A part name selector. + part_name: ViewTransitionPartName, + + pub fn __generateEql() void {} + pub fn __generateDeepClone() void {} + pub fn __generateHash() void {} + }, + /// The [::view-transition-image-pair()](https://w3c.github.io/csswg-drafts/css-view-transitions-1/#view-transition-image-pair-pt-name-selector) functional pseudo element. + view_transition_image_pair: struct { + /// A part name selector. + part_name: ViewTransitionPartName, + + pub fn __generateEql() void {} + pub fn __generateDeepClone() void {} + pub fn __generateHash() void {} + }, + /// The [::view-transition-old()](https://w3c.github.io/csswg-drafts/css-view-transitions-1/#view-transition-old-pt-name-selector) functional pseudo element. + view_transition_old: struct { + /// A part name selector. + part_name: ViewTransitionPartName, + + pub fn __generateEql() void {} + pub fn __generateDeepClone() void {} + pub fn __generateHash() void {} + }, + /// The [::view-transition-new()](https://w3c.github.io/csswg-drafts/css-view-transitions-1/#view-transition-new-pt-name-selector) functional pseudo element. + view_transition_new: struct { + /// A part name selector. + part_name: ViewTransitionPartName, + + pub fn __generateEql() void {} + pub fn __generateDeepClone() void {} + pub fn __generateHash() void {} + }, + /// An unknown pseudo element. + custom: struct { + /// The name of the pseudo element. + name: []const u8, + + pub fn __generateEql() void {} + pub fn __generateDeepClone() void {} + pub fn __generateHash() void {} + }, + /// An unknown functional pseudo element. + custom_function: struct { + /// The name of the pseudo element. + name: []const u8, + /// The arguments of the pseudo element function. + arguments: css.TokenList, + + pub fn __generateEql() void {} + pub fn __generateDeepClone() void {} + pub fn __generateHash() void {} + }, + + pub fn isEquivalent(this: *const PseudoElement, other: *const PseudoElement) bool { + if (this.* == .selection and other.* == .selection) return true; + if (this.* == .placeholder and other.* == .placeholder) return true; + if (this.* == .backdrop and other.* == .backdrop) return true; + if (this.* == .file_selector_button and other.* == .file_selector_button) return true; + return this.eql(other); + } + + pub fn eql(this: *const PseudoElement, other: *const PseudoElement) bool { + return css.implementEql(PseudoElement, this, other); + } + + pub fn hash(this: *const @This(), hasher: *std.hash.Wyhash) void { + return css.implementHash(@This(), this, hasher); + } + + pub fn deepClone(this: *const @This(), allocator: Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn getNecessaryPrefixes(this: *PseudoElement, targets: css.targets.Targets) css.VendorPrefix { + const F = css.prefixes.Feature; + const p: *css.VendorPrefix, const feature: F = switch (this.*) { + .selection => |*p| .{ p, F.pseudo_element_selection }, + .placeholder => |*p| .{ p, F.pseudo_element_placeholder }, + .backdrop => |*p| .{ p, F.pseudo_element_backdrop }, + .file_selector_button => |*p| .{ p, F.pseudo_element_file_selector_button }, + else => return css.VendorPrefix.empty(), + }; + + p.* = targets.prefixes(p.*, feature); + + return p.*; + } + + pub fn getPrefix(this: *const PseudoElement) css.VendorPrefix { + return switch (this.*) { + .selection, .placeholder, .backdrop, .file_selector_button => |p| p, + else => css.VendorPrefix.empty(), + }; + } + + pub fn format(this: *const PseudoElement, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { + try writer.print("{s}", .{@tagName(this.*)}); + } + + pub fn validAfterSlotted(this: *const PseudoElement) bool { + return switch (this.*) { + .before, .after, .marker, .placeholder, .file_selector_button => true, + else => false, + }; + } + + pub fn isUnknown(this: *const PseudoElement) bool { + return switch (this.*) { + .custom, .custom_function => true, + else => false, + }; + } + + pub fn acceptsStatePseudoClasses(this: *const PseudoElement) bool { + _ = this; // autofix + // Be lienient. + return true; + } + + pub fn isWebkitScrollbar(this: *const PseudoElement) bool { + return this.* == .webkit_scrollbar; + } + + pub fn isViewTransition(this: *const PseudoElement) bool { + return switch (this.*) { + .view_transition_group, .view_transition_image_pair, .view_transition_new, .view_transition_old => true, + else => false, + }; + } + + pub fn toCss(this: *const PseudoElement, comptime W: type, dest: *Printer(W)) PrintErr!void { + var s = ArrayList(u8){}; + // PERF(alloc): I don't like making small allocations here for the string. + const writer = s.writer(dest.allocator); + const W2 = @TypeOf(writer); + const scratchbuf = std.ArrayList(u8).init(dest.allocator); + var printer = Printer(W2).new(dest.allocator, scratchbuf, writer, css.PrinterOptions{}, dest.import_records); + try serialize.serializePseudoElement(this, W2, &printer, null); + return dest.writeStr(s.items); + } +}; + +/// An enum for the different types of :nth- pseudoclasses +pub const NthType = enum { + child, + last_child, + only_child, + of_type, + last_of_type, + only_of_type, + col, + last_col, + + pub fn isOnly(self: NthType) bool { + return self == NthType.only_child or self == NthType.only_of_type; + } + + pub fn isOfType(self: NthType) bool { + return self == NthType.of_type or self == NthType.last_of_type or self == NthType.only_of_type; + } + + pub fn isFromEnd(self: NthType) bool { + return self == NthType.last_child or self == NthType.last_of_type or self == NthType.last_col; + } + + pub fn allowsOfSelector(self: NthType) bool { + return self == NthType.child or self == NthType.last_child; + } +}; + +/// * `Err(())`: Invalid selector, abort +/// * `Ok(false)`: Not a type selector, could be something else. `input` was not consumed. +/// * `Ok(true)`: Length 0 (`*|*`), 1 (`*|E` or `ns|*`) or 2 (`|E` or `ns|E`) +pub fn parse_type_selector( + comptime Impl: type, + parser: *SelectorParser, + input: *css.Parser, + state: SelectorParsingState, + sink: *SelectorBuilder(Impl), +) Result(bool) { + const result = switch (parse_qualified_name( + Impl, + parser, + input, + false, + )) { + .result => |v| v, + .err => |e| { + if (e.kind == .basic and e.kind.basic == .end_of_input) { + return .{ .result = false }; + } + + return .{ .err = e }; + }, + }; + + if (result == .none) return .{ .result = false }; + + const namespace: QNamePrefix(Impl) = result.some[0]; + const local_name: ?[]const u8 = result.some[1]; + if (state.intersects(SelectorParsingState.AFTER_PSEUDO)) { + return .{ .err = input.newCustomError(SelectorParseErrorKind.intoDefaultParserError(.invalid_state)) }; + } + + switch (namespace) { + .implicit_any_namespace => {}, + .implicit_default_namespace => |url| { + sink.pushSimpleSelector(.{ .default_namespace = url }); + }, + .explicit_namespace => { + const prefix = namespace.explicit_namespace[0]; + const url = namespace.explicit_namespace[1]; + const component: GenericComponent(Impl) = component: { + if (parser.defaultNamespace()) |default_url| { + if (bun.strings.eql(url, default_url)) { + break :component .{ .default_namespace = url }; + } + } + break :component .{ + .namespace = .{ + .prefix = prefix, + .url = url, + }, + }; + }; + sink.pushSimpleSelector(component); + }, + .explicit_no_namespace => { + sink.pushSimpleSelector(.explicit_no_namespace); + }, + .explicit_any_namespace => { + // Element type selectors that have no namespace + // component (no namespace separator) represent elements + // without regard to the element's namespace (equivalent + // to "*|") unless a default namespace has been declared + // for namespaced selectors (e.g. in CSS, in the style + // sheet). If a default namespace has been declared, + // such selectors will represent only elements in the + // default namespace. + // -- Selectors § 6.1.1 + // So we'll have this act the same as the + // QNamePrefix::ImplicitAnyNamespace case. + // For lightning css this logic was removed, should be handled when matching. + sink.pushSimpleSelector(.explicit_any_namespace); + }, + .implicit_no_namespace => { + bun.unreachablePanic("Should not be returned with in_attr_selector = false", .{}); + }, + } + + if (local_name) |name| { + sink.pushSimpleSelector(.{ + .local_name = LocalName(Impl){ + .lower_name = brk: { + var lowercase = parser.allocator.alloc(u8, name.len) catch unreachable; // PERF: check if it's already lowercase + break :brk .{ .v = bun.strings.copyLowercase(name, lowercase[0..]) }; + }, + .name = .{ .v = name }, + }, + }); + } else { + sink.pushSimpleSelector(.explicit_universal_type); + } + + return .{ .result = true }; +} + +/// Parse a simple selector other than a type selector. +/// +/// * `Err(())`: Invalid selector, abort +/// * `Ok(None)`: Not a simple selector, could be something else. `input` was not consumed. +/// * `Ok(Some(_))`: Parsed a simple selector or pseudo-element +pub fn parse_one_simple_selector( + comptime Impl: type, + parser: *SelectorParser, + input: *css.Parser, + state: *SelectorParsingState, +) Result(?SimpleSelectorParseResult(Impl)) { + const S = SimpleSelectorParseResult(Impl); + + const start = input.state(); + const token = switch (input.nextIncludingWhitespace()) { + .result => |v| v.*, + .err => { + input.reset(&start); + return .{ .result = null }; + }, + }; + + switch (token) { + .idhash => |id| { + if (state.intersects(SelectorParsingState.AFTER_PSEUDO)) { + return .{ .err = input.newCustomError(SelectorParseErrorKind.intoDefaultParserError(.invalid_state)) }; + } + const component: GenericComponent(Impl) = .{ .id = .{ .v = id } }; + return .{ .result = S{ + .simple_selector = component, + } }; + }, + .open_square => { + if (state.intersects(SelectorParsingState.AFTER_PSEUDO)) { + return .{ .err = input.newCustomError(SelectorParseErrorKind.intoDefaultParserError(.invalid_state)) }; + } + const Closure = struct { + parser: *SelectorParser, + pub fn parsefn(this: *@This(), input2: *css.Parser) Result(GenericComponent(Impl)) { + return parse_attribute_selector(Impl, this.parser, input2); + } + }; + var closure = Closure{ + .parser = parser, + }; + const attr = switch (input.parseNestedBlock(GenericComponent(Impl), &closure, Closure.parsefn)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + return .{ .result = .{ .simple_selector = attr } }; + }, + .colon => { + const location = input.currentSourceLocation(); + const is_single_colon: bool, const next_token: css.Token = switch ((switch (input.nextIncludingWhitespace()) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }).*) { + .colon => .{ false, (switch (input.nextIncludingWhitespace()) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }).* }, + else => |t| .{ true, t }, + }; + const name: []const u8, const is_functional = switch (next_token) { + .ident => |name| .{ name, false }, + .function => |name| .{ name, true }, + else => |t| { + const e = SelectorParseErrorKind{ .pseudo_element_expected_ident = t }; + return .{ .err = input.newCustomError(e.intoDefaultParserError()) }; + }, + }; + const is_pseudo_element = !is_single_colon or is_css2_pseudo_element(name); + if (is_pseudo_element) { + if (!state.allowsPseudos()) { + return .{ .err = input.newCustomError(SelectorParseErrorKind.intoDefaultParserError(.invalid_state)) }; + } + const pseudo_element: Impl.SelectorImpl.PseudoElement = if (is_functional) pseudo_element: { + if (parser.parsePart() and bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "part")) { + if (!state.allowsPart()) { + return .{ .err = input.newCustomError(SelectorParseErrorKind.intoDefaultParserError(.invalid_state)) }; + } + + const Closure = struct { + parser: *SelectorParser, + + pub fn parsefn(self: *const @This(), input2: *css.Parser) Result([]Impl.SelectorImpl.Identifier) { + // todo_stuff.think_about_mem_mgmt + var result = ArrayList(Impl.SelectorImpl.Identifier).initCapacity( + self.parser.allocator, + // TODO: source does this, should see if initializing to 1 is actually better + // when appending empty std.ArrayList(T), it will usually initially reserve 8 elements, + // maybe that's unnecessary, or maybe smallvec is gud here + 1, + ) catch unreachable; + + result.append( + self.parser.allocator, + switch (input2.expectIdent()) { + .err => |e| return .{ .err = e }, + .result => |v| .{ .v = v }, + }, + ) catch unreachable; + + while (!input2.isExhausted()) { + result.append( + self.parser.allocator, + switch (input2.expectIdent()) { + .err => |e| return .{ .err = e }, + .result => |v| .{ .v = v }, + }, + ) catch unreachable; + } + + return .{ .result = result.items }; + } + }; + + const names = switch (input.parseNestedBlock([]Impl.SelectorImpl.Identifier, &Closure{ .parser = parser }, Closure.parsefn)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + + return .{ .result = .{ .part_pseudo = names } }; + } + + if (parser.parseSlotted() and bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "slotted")) { + if (!state.allowsSlotted()) { + return .{ .err = input.newCustomError(SelectorParseErrorKind.intoDefaultParserError(.invalid_state)) }; + } + const Closure = struct { + parser: *SelectorParser, + state: *SelectorParsingState, + pub fn parsefn(this: *@This(), input2: *css.Parser) Result(GenericSelector(Impl)) { + return parse_inner_compound_selector(Impl, this.parser, input2, this.state); + } + }; + var closure = Closure{ + .parser = parser, + .state = state, + }; + const selector = switch (input.parseNestedBlock(GenericSelector(Impl), &closure, Closure.parsefn)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + return .{ .result = .{ .slotted_pseudo = selector } }; + } + + const Closure = struct { + parser: *SelectorParser, + state: *SelectorParsingState, + name: []const u8, + }; + break :pseudo_element switch (input.parseNestedBlock(Impl.SelectorImpl.PseudoElement, &Closure{ .parser = parser, .state = state, .name = name }, struct { + pub fn parseFn(closure: *const Closure, i: *css.Parser) Result(Impl.SelectorImpl.PseudoElement) { + return closure.parser.parseFunctionalPseudoElement(closure.name, i); + } + }.parseFn)) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + } else pseudo_element: { + break :pseudo_element switch (parser.parsePseudoElement(location, name)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + }; + + if (state.intersects(.{ .after_slotted = true }) and pseudo_element.validAfterSlotted()) { + return .{ .result = .{ .pseudo_element = pseudo_element } }; + } + + return .{ .result = .{ .pseudo_element = pseudo_element } }; + } else { + const pseudo_class: GenericComponent(Impl) = if (is_functional) pseudo_class: { + const Closure = struct { + parser: *SelectorParser, + name: []const u8, + state: *SelectorParsingState, + pub fn parsefn(this: *@This(), input2: *css.Parser) Result(GenericComponent(Impl)) { + return parse_functional_pseudo_class(Impl, this.parser, input2, this.name, this.state); + } + }; + var closure = Closure{ + .parser = parser, + .name = name, + .state = state, + }; + + break :pseudo_class switch (input.parseNestedBlock(GenericComponent(Impl), &closure, Closure.parsefn)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + } else switch (parse_simple_pseudo_class(Impl, parser, location, name, state.*)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + return .{ .result = .{ .simple_selector = pseudo_class } }; + } + }, + .delim => |d| { + switch (d) { + '.' => { + if (state.intersects(SelectorParsingState.AFTER_PSEUDO)) { + return .{ .err = input.newCustomError(SelectorParseErrorKind.intoDefaultParserError(.invalid_state)) }; + } + const location = input.currentSourceLocation(); + const class = switch ((switch (input.nextIncludingWhitespace()) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }).*) { + .ident => |class| class, + else => |t| { + const e = SelectorParseErrorKind{ .class_needs_ident = t }; + return .{ .err = location.newCustomError(e.intoDefaultParserError()) }; + }, + }; + return .{ .result = .{ .simple_selector = .{ .class = .{ .v = class } } } }; + }, + '&' => { + if (parser.isNestingAllowed()) { + state.insert(SelectorParsingState{ .after_nesting = true }); + return .{ .result = S{ + .simple_selector = .nesting, + } }; + } + }, + else => {}, + } + }, + else => {}, + } + + input.reset(&start); + return .{ .result = null }; +} + +pub fn parse_attribute_selector(comptime Impl: type, parser: *SelectorParser, input: *css.Parser) Result(GenericComponent(Impl)) { + const N = attrs.NamespaceConstraint(attrs.NamespaceUrl(Impl)); + + const namespace: ?N, const local_name: []const u8 = brk: { + input.skipWhitespace(); + + const _qname = switch (parse_qualified_name(Impl, parser, input, true)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + switch (_qname) { + .none => |t| return .{ .err = input.newCustomError(SelectorParseErrorKind.intoDefaultParserError(.{ .no_qualified_name_in_attribute_selector = t })) }, + .some => |qname| { + if (qname[1] == null) { + bun.unreachablePanic("", .{}); + } + const ns: QNamePrefix(Impl) = qname[0]; + const ln = qname[1].?; + break :brk .{ + switch (ns) { + .implicit_no_namespace, .explicit_no_namespace => null, + .explicit_namespace => |x| .{ .specific = .{ .prefix = x[0], .url = x[1] } }, + .explicit_any_namespace => .any, + .implicit_any_namespace, .implicit_default_namespace => { + bun.unreachablePanic("Not returned with in_attr_selector = true", .{}); + }, + }, + ln, + }; + }, + } + }; + + const location = input.currentSourceLocation(); + const operator: attrs.AttrSelectorOperator = operator: { + const tok = switch (input.next()) { + .result => |v| v, + .err => { + // [foo] + const local_name_lower = local_name_lower: { + const lower = parser.allocator.alloc(u8, local_name.len) catch unreachable; + _ = bun.strings.copyLowercase(local_name, lower); + break :local_name_lower lower; + }; + if (namespace) |ns| { + const x = attrs.AttrSelectorWithOptionalNamespace(Impl){ + .namespace = ns, + .local_name = .{ .v = local_name }, + .local_name_lower = .{ .v = local_name_lower }, + .never_matches = false, + .operation = .exists, + }; + return .{ + .result = .{ .attribute_other = bun.create(parser.allocator, attrs.AttrSelectorWithOptionalNamespace(Impl), x) }, + }; + } else { + return .{ .result = .{ + .attribute_in_no_namespace_exists = .{ + .local_name = .{ .v = local_name }, + .local_name_lower = .{ .v = local_name_lower }, + }, + } }; + } + }, + }; + + switch (tok.*) { + // [foo=bar] + .delim => |d| { + if (d == '=') break :operator .equal; + }, + // [foo~=bar] + .include_match => break :operator .includes, + // [foo|=bar] + .dash_match => break :operator .dash_match, + // [foo^=bar] + .prefix_match => break :operator .prefix, + // [foo*=bar] + .substring_match => break :operator .substring, + // [foo$=bar] + .suffix_match => break :operator .suffix, + else => {}, + } + return .{ .err = location.newCustomError(SelectorParseErrorKind.intoDefaultParserError(.{ .unexpected_token_in_attribute_selector = tok.* })) }; + }; + + const value_str: []const u8 = switch (input.expectIdentOrString()) { + .result => |v| v, + .err => |e| { + if (e.kind == .basic and e.kind.basic == .unexpected_token) { + return .{ .err = e.location.newCustomError(SelectorParseErrorKind.intoDefaultParserError(.{ .bad_value_in_attr = e.kind.basic.unexpected_token })) }; + } + return .{ + .err = .{ + .kind = e.kind, + .location = e.location, + }, + }; + }, + }; + const never_matches = switch (operator) { + .equal, .dash_match => false, + .includes => value_str.len == 0 or std.mem.indexOfAny(u8, value_str, SELECTOR_WHITESPACE) != null, + .prefix, .substring, .suffix => value_str.len == 0, + }; + + const attribute_flags = switch (parse_attribute_flags(input)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + + const value: Impl.SelectorImpl.AttrValue = value_str; + const local_name_lower: Impl.SelectorImpl.LocalName, const local_name_is_ascii_lowercase: bool = brk: { + if (a: { + for (local_name, 0..) |b, i| { + if (b >= 'A' and b <= 'Z') break :a i; + } + break :a null; + }) |first_uppercase| { + const str = local_name[first_uppercase..]; + const lower = parser.allocator.alloc(u8, str.len) catch unreachable; + break :brk .{ .{ .v = bun.strings.copyLowercase(str, lower) }, false }; + } else { + break :brk .{ .{ .v = local_name }, true }; + } + }; + const case_sensitivity: attrs.ParsedCaseSensitivity = attribute_flags.toCaseSensitivity(local_name_lower.v, namespace != null); + if (namespace != null and !local_name_is_ascii_lowercase) { + return .{ .result = .{ + .attribute_other = brk: { + const x = attrs.AttrSelectorWithOptionalNamespace(Impl){ + .namespace = namespace, + .local_name = .{ .v = local_name }, + .local_name_lower = local_name_lower, + .never_matches = never_matches, + .operation = .{ + .with_value = .{ + .operator = operator, + .case_sensitivity = case_sensitivity, + .expected_value = value, + }, + }, + }; + break :brk bun.create(parser.allocator, @TypeOf(x), x); + }, + } }; + } else { + return .{ .result = .{ + .attribute_in_no_namespace = .{ + .local_name = .{ .v = local_name }, + .operator = operator, + .value = value, + .case_sensitivity = case_sensitivity, + .never_matches = never_matches, + }, + } }; + } +} + +/// Returns whether the name corresponds to a CSS2 pseudo-element that +/// can be specified with the single colon syntax (in addition to the +/// double-colon syntax, which can be used for all pseudo-elements). +pub fn is_css2_pseudo_element(name: []const u8) bool { + // ** Do not add to this list! ** + // TODO: todo_stuff.match_ignore_ascii_case + return bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "before") or + bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "after") or + bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "first-line") or + bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "first-letter"); +} + +/// Parses one compound selector suitable for nested stuff like :-moz-any, etc. +pub fn parse_inner_compound_selector( + comptime Impl: type, + parser: *SelectorParser, + input: *css.Parser, + state: *SelectorParsingState, +) Result(GenericSelector(Impl)) { + var child_state = brk: { + var child_state = state.*; + child_state.disallow_pseudos = true; + child_state.disallow_combinators = true; + break :brk child_state; + }; + const result = switch (parse_selector(Impl, parser, input, &child_state, NestingRequirement.none)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + if (child_state.after_nesting) { + state.after_nesting = true; + } + return .{ .result = result }; +} + +pub fn parse_functional_pseudo_class( + comptime Impl: type, + parser: *SelectorParser, + input: *css.Parser, + name: []const u8, + state: *SelectorParsingState, +) Result(GenericComponent(Impl)) { + const FunctionalPseudoClass = enum { + @"nth-child", + @"nth-of-type", + @"nth-last-child", + @"nth-last-of-type", + @"nth-col", + @"nth-last-col", + is, + where, + has, + host, + not, + }; + const Map = bun.ComptimeEnumMap(FunctionalPseudoClass); + + if (Map.getASCIIICaseInsensitive(name)) |functional_pseudo_class| { + switch (functional_pseudo_class) { + .@"nth-child" => return parse_nth_pseudo_class(Impl, parser, input, state.*, .child), + .@"nth-of-type" => return parse_nth_pseudo_class(Impl, parser, input, state.*, .of_type), + .@"nth-last-child" => return parse_nth_pseudo_class(Impl, parser, input, state.*, .last_child), + .@"nth-last-of-type" => return parse_nth_pseudo_class(Impl, parser, input, state.*, .last_of_type), + .@"nth-col" => return parse_nth_pseudo_class(Impl, parser, input, state.*, .col), + .@"nth-last-col" => return parse_nth_pseudo_class(Impl, parser, input, state.*, .last_col), + .is => if (parser.parseIsAndWhere()) return parse_is_or_where(Impl, parser, input, state, GenericComponent(Impl).convertHelper_is, .{}), + .where => if (parser.parseIsAndWhere()) return parse_is_or_where(Impl, parser, input, state, GenericComponent(Impl).convertHelper_where, .{}), + .has => return parse_has(Impl, parser, input, state), + .host => if (!state.allowsTreeStructuralPseudoClasses()) + return .{ .err = input.newCustomError(SelectorParseErrorKind.intoDefaultParserError(.invalid_state)) } + else + return .{ .result = .{ + .host = switch (parse_inner_compound_selector(Impl, parser, input, state)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }, + } }, + .not => return parse_negation(Impl, parser, input, state), + } + } + + if (parser.parseAnyPrefix(name)) |prefix| { + return parse_is_or_where(Impl, parser, input, state, GenericComponent(Impl).convertHelper_any, .{prefix}); + } + + if (!state.allowsCustomFunctionalPseudoClasses()) { + return .{ .err = input.newCustomError(SelectorParseErrorKind.intoDefaultParserError(.invalid_state)) }; + } + + const result = switch (parser.parseNonTsFunctionalPseudoClass(name, input)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + + return .{ .result = .{ .non_ts_pseudo_class = result } }; +} + +pub fn parse_simple_pseudo_class( + comptime Impl: type, + parser: *SelectorParser, + location: css.SourceLocation, + name: []const u8, + state: SelectorParsingState, +) Result(GenericComponent(Impl)) { + if (!state.allowsNonFunctionalPseudoClasses()) { + return .{ .err = location.newCustomError(SelectorParseErrorKind.intoDefaultParserError(.invalid_state)) }; + } + + if (state.allowsTreeStructuralPseudoClasses()) { + // css.todo_stuff.match_ignore_ascii_case + if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "first-child")) { + return .{ .result = .{ .nth = NthSelectorData.first(false) } }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "last-child")) { + return .{ .result = .{ .nth = NthSelectorData.last(false) } }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "only-child")) { + return .{ .result = .{ .nth = NthSelectorData.only(false) } }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "root")) { + return .{ .result = .root }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "empty")) { + return .{ .result = .empty }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "scope")) { + return .{ .result = .scope }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "host")) { + return .{ .result = .{ .host = null } }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "first-of-type")) { + return .{ .result = .{ .nth = NthSelectorData.first(true) } }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "last-of-type")) { + return .{ .result = .{ .nth = NthSelectorData.last(true) } }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "only-of-type")) { + return .{ .result = .{ .nth = NthSelectorData.only(true) } }; + } else {} + } + + // The view-transition pseudo elements accept the :only-child pseudo class. + // https://w3c.github.io/csswg-drafts/css-view-transitions-1/#pseudo-root + if (state.intersects(SelectorParsingState{ .after_view_transition = true })) { + if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "only-child")) { + return .{ .result = .{ .nth = NthSelectorData.only(false) } }; + } + } + + const pseudo_class = switch (parser.parseNonTsPseudoClass(location, name)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + if (state.intersects(SelectorParsingState{ .after_webkit_scrollbar = true })) { + if (!pseudo_class.isValidAfterWebkitScrollbar()) { + return .{ .err = location.newCustomError(SelectorParseErrorKind.intoDefaultParserError(.invalid_pseudo_class_after_webkit_scrollbar)) }; + } + } else if (state.intersects(SelectorParsingState{ .after_pseudo_element = true })) { + if (!pseudo_class.isUserActionState()) { + return .{ .err = location.newCustomError(SelectorParseErrorKind.intoDefaultParserError(.invalid_pseudo_class_after_pseudo_element)) }; + } + } else if (!pseudo_class.isValidBeforeWebkitScrollbar()) { + return .{ .err = location.newCustomError(SelectorParseErrorKind.intoDefaultParserError(.invalid_pseudo_class_before_webkit_scrollbar)) }; + } + + return .{ .result = .{ .non_ts_pseudo_class = pseudo_class } }; +} + +pub fn parse_nth_pseudo_class( + comptime Impl: type, + parser: *SelectorParser, + input: *css.Parser, + state: SelectorParsingState, + ty: NthType, +) Result(GenericComponent(Impl)) { + if (!state.allowsTreeStructuralPseudoClasses()) { + return .{ .err = input.newCustomError(SelectorParseErrorKind.intoDefaultParserError(.invalid_state)) }; + } + + const a, const b = switch (css.nth.parse_nth(input)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + const nth_data = NthSelectorData{ + .ty = ty, + .is_function = true, + .a = a, + .b = b, + }; + + if (!ty.allowsOfSelector()) { + return .{ .result = .{ .nth = nth_data } }; + } + + // Try to parse "of ". + if (input.tryParse(css.Parser.expectIdentMatching, .{"of"}).isErr()) { + return .{ .result = .{ .nth = nth_data } }; + } + + // Whitespace between "of" and the selector list is optional + // https://github.com/w3c/csswg-drafts/issues/8285 + var child_state = child_state: { + var s = state; + s.skip_default_namespace = true; + s.disallow_pseudos = true; + break :child_state s; + }; + + const selectors = switch (SelectorList.parseWithState( + parser, + input, + &child_state, + .ignore_invalid_selector, + .none, + )) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + + return .{ .result = .{ + .nth_of = NthOfSelectorData(Impl){ + .data = nth_data, + .selectors = selectors.v.toOwnedSlice(input.allocator()), + }, + } }; +} + +/// `func` must be of the type: fn([]GenericSelector(Impl), ...@TypeOf(args_)) GenericComponent(Impl) +pub fn parse_is_or_where( + comptime Impl: type, + parser: *SelectorParser, + input: *css.Parser, + state: *SelectorParsingState, + comptime func: anytype, + args_: anytype, +) Result(GenericComponent(Impl)) { + bun.debugAssert(parser.parseIsAndWhere()); + // https://drafts.csswg.org/selectors/#matches-pseudo: + // + // Pseudo-elements cannot be represented by the matches-any + // pseudo-class; they are not valid within :is(). + // + var child_state = brk: { + var child_state = state.*; + child_state.skip_default_namespace = true; + child_state.disallow_pseudos = true; + break :brk child_state; + }; + + const inner = switch (SelectorList.parseWithState(parser, input, &child_state, parser.isAndWhereErrorRecovery(), NestingRequirement.none)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + if (child_state.after_nesting) { + state.after_nesting = true; + } + + const selector_slice = inner.v.toOwnedSlice(input.allocator()); + + const result = result: { + const args = brk: { + var args: std.meta.ArgsTuple(@TypeOf(func)) = undefined; + args[0] = selector_slice; + + inline for (args_, 1..) |a, i| { + args[i] = a; + } + + break :brk args; + }; + + break :result @call(.auto, func, args); + }; + + return .{ .result = result }; +} + +pub fn parse_has( + comptime Impl: type, + parser: *SelectorParser, + input: *css.Parser, + state: *SelectorParsingState, +) Result(GenericComponent(Impl)) { + var child_state = state.*; + const inner = switch (SelectorList.parseRelativeWithState( + parser, + input, + &child_state, + parser.isAndWhereErrorRecovery(), + .none, + )) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + + if (child_state.after_nesting) { + state.after_nesting = true; + } + return .{ .result = .{ .has = inner.v.toOwnedSlice(input.allocator()) } }; +} + +/// Level 3: Parse **one** simple_selector. (Though we might insert a second +/// implied "|*" type selector.) +pub fn parse_negation( + comptime Impl: type, + parser: *SelectorParser, + input: *css.Parser, + state: *SelectorParsingState, +) Result(GenericComponent(Impl)) { + var child_state = state.*; + child_state.skip_default_namespace = true; + child_state.disallow_pseudos = true; + + const list = switch (SelectorList.parseWithState(parser, input, &child_state, .discard_list, .none)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + + if (child_state.after_nesting) { + state.after_nesting = true; + } + + return .{ .result = .{ .negation = list.v.toOwnedSlice(input.allocator()) } }; +} + +pub fn OptionalQName(comptime Impl: type) type { + return union(enum) { + some: struct { QNamePrefix(Impl), ?[]const u8 }, + none: css.Token, + }; +} + +pub fn QNamePrefix(comptime Impl: type) type { + return union(enum) { + implicit_no_namespace, // `foo` in attr selectors + implicit_any_namespace, // `foo` in type selectors, without a default ns + implicit_default_namespace: Impl.SelectorImpl.NamespaceUrl, // `foo` in type selectors, with a default ns + explicit_no_namespace, // `|foo` + explicit_any_namespace, // `*|foo` + explicit_namespace: struct { Impl.SelectorImpl.NamespacePrefix, Impl.SelectorImpl.NamespaceUrl }, // `prefix|foo` + }; +} + +/// * `Err(())`: Invalid selector, abort +/// * `Ok(None(token))`: Not a simple selector, could be something else. `input` was not consumed, +/// but the token is still returned. +/// * `Ok(Some(namespace, local_name))`: `None` for the local name means a `*` universal selector +pub fn parse_qualified_name( + comptime Impl: type, + parser: *SelectorParser, + input: *css.Parser, + in_attr_selector: bool, +) Result(OptionalQName(Impl)) { + const start = input.state(); + + const tok = switch (input.nextIncludingWhitespace()) { + .result => |v| v, + .err => |e| { + input.reset(&start); + return .{ .err = e }; + }, + }; + switch (tok.*) { + .ident => |value| { + const after_ident = input.state(); + const n = if (input.nextIncludingWhitespace().asValue()) |t| t.* == .delim and t.delim == '|' else false; + if (n) { + const prefix: Impl.SelectorImpl.NamespacePrefix = .{ .v = value }; + const result: ?Impl.SelectorImpl.NamespaceUrl = parser.namespaceForPrefix(prefix); + const url: Impl.SelectorImpl.NamespaceUrl = brk: { + if (result) |url| break :brk url; + return .{ .err = input.newCustomError(SelectorParseErrorKind.intoDefaultParserError(.{ .unsupported_pseudo_class_or_element = value })) }; + }; + return parse_qualified_name_eplicit_namespace_helper( + Impl, + input, + .{ .explicit_namespace = .{ prefix, url } }, + in_attr_selector, + ); + } else { + input.reset(&after_ident); + if (in_attr_selector) return .{ .result = .{ .some = .{ .implicit_no_namespace, value } } }; + return .{ .result = parse_qualified_name_default_namespace_helper(Impl, parser, value) }; + } + }, + .delim => |c| { + switch (c) { + '*' => { + const after_star = input.state(); + const result = input.nextIncludingWhitespace(); + if (result.asValue()) |t| if (t.* == .delim and t.delim == '|') + return parse_qualified_name_eplicit_namespace_helper( + Impl, + input, + .explicit_any_namespace, + in_attr_selector, + ); + input.reset(&after_star); + if (in_attr_selector) { + switch (result) { + .result => |t| { + return .{ .err = after_star.sourceLocation().newCustomError(SelectorParseErrorKind{ + .expected_bar_in_attr = t.*, + }) }; + }, + .err => |e| { + return .{ .err = e }; + }, + } + } else { + return .{ .result = parse_qualified_name_default_namespace_helper(Impl, parser, null) }; + } + }, + '|' => return parse_qualified_name_eplicit_namespace_helper(Impl, input, .explicit_no_namespace, in_attr_selector), + else => {}, + } + }, + else => {}, + } + input.reset(&start); + return .{ .result = .{ .none = tok.* } }; +} + +fn parse_qualified_name_default_namespace_helper( + comptime Impl: type, + parser: *SelectorParser, + local_name: ?[]const u8, +) OptionalQName(Impl) { + const namespace: QNamePrefix(Impl) = if (parser.defaultNamespace()) |url| .{ .implicit_default_namespace = url } else .implicit_any_namespace; + return .{ + .some = .{ + namespace, + local_name, + }, + }; +} + +fn parse_qualified_name_eplicit_namespace_helper( + comptime Impl: type, + input: *css.Parser, + namespace: QNamePrefix(Impl), + in_attr_selector: bool, +) Result(OptionalQName(Impl)) { + const location = input.currentSourceLocation(); + const t = switch (input.nextIncludingWhitespace()) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + switch (t.*) { + .ident => |local_name| return .{ .result = .{ .some = .{ namespace, local_name } } }, + .delim => |c| { + if (c == '*') { + return .{ .result = .{ .some = .{ namespace, null } } }; + } + }, + else => {}, + } + if (in_attr_selector) { + const e = SelectorParseErrorKind{ .invalid_qual_name_in_attr = t.* }; + return .{ .err = location.newCustomError(e) }; + } + return .{ .err = location.newCustomError(SelectorParseErrorKind{ .explicit_namespace_unexpected_token = t.* }) }; +} + +pub fn LocalName(comptime Impl: type) type { + return struct { + name: Impl.SelectorImpl.LocalName, + lower_name: Impl.SelectorImpl.LocalName, + + pub fn toCss(this: *const @This(), comptime W: type, dest: *Printer(W)) PrintErr!void { + return css.IdentFns.toCss(&this.name, W, dest); + } + + pub fn __generateEql() void {} + pub fn __generateDeepClone() void {} + pub fn __generateHash() void {} + }; +} + +/// An attribute selector can have 's' or 'i' as flags, or no flags at all. +pub const AttributeFlags = enum { + // Matching should be case-sensitive ('s' flag). + case_sensitive, + // Matching should be case-insensitive ('i' flag). + ascii_case_insensitive, + // No flags. Matching behavior depends on the name of the attribute. + case_sensitivity_depends_on_name, + + pub fn toCaseSensitivity(this: AttributeFlags, local_name: []const u8, have_namespace: bool) attrs.ParsedCaseSensitivity { + return switch (this) { + .case_sensitive => .explicit_case_sensitive, + .ascii_case_insensitive => .ascii_case_insensitive, + .case_sensitivity_depends_on_name => { + // + const AsciiCaseInsensitiveHtmlAttributes = enum { + dir, + http_equiv, + rel, + enctype, + @"align", + accept, + nohref, + lang, + bgcolor, + direction, + valign, + checked, + frame, + link, + accept_charset, + hreflang, + text, + valuetype, + language, + nowrap, + vlink, + disabled, + noshade, + codetype, + @"defer", + noresize, + target, + scrolling, + rules, + scope, + rev, + media, + method, + charset, + alink, + selected, + multiple, + color, + shape, + type, + clear, + compact, + face, + declare, + axis, + readonly, + }; + const Map = comptime bun.ComptimeEnumMap(AsciiCaseInsensitiveHtmlAttributes); + if (!have_namespace and Map.has(local_name)) return .ascii_case_insensitive_if_in_html_element_in_html_document; + return .case_sensitive; + }, + }; + } +}; + +/// A [view transition part name](https://w3c.github.io/csswg-drafts/css-view-transitions-1/#typedef-pt-name-selector). +pub const ViewTransitionPartName = union(enum) { + /// * + all, + /// + name: css.css_values.ident.CustomIdent, + + pub fn toCss(this: *const @This(), comptime W: type, dest: *css.Printer(W)) css.PrintErr!void { + return switch (this.*) { + .all => try dest.writeStr("*"), + .name => |name| try css.CustomIdentFns.toCss(&name, W, dest), + }; + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub fn hash(this: *const @This(), hasher: *std.hash.Wyhash) void { + return css.implementHash(@This(), this, hasher); + } + + pub fn deepClone(this: *const @This(), allocator: Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } +}; + +pub fn parse_attribute_flags(input: *css.Parser) Result(AttributeFlags) { + const location = input.currentSourceLocation(); + const token = switch (input.next()) { + .result => |v| v, + .err => { + // Selectors spec says language-defined; HTML says it depends on the + // exact attribute name. + return .{ .result = AttributeFlags.case_sensitivity_depends_on_name }; + }, + }; + + const ident = if (token.* == .ident) token.ident else return .{ .err = location.newBasicUnexpectedTokenError(token.*) }; + + if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(ident, "i")) { + return .{ .result = AttributeFlags.ascii_case_insensitive }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(ident, "s")) { + return .{ .result = AttributeFlags.case_sensitive }; + } else { + return .{ .err = location.newBasicUnexpectedTokenError(token.*) }; + } +} diff --git a/src/css/selectors/selector.zig b/src/css/selectors/selector.zig new file mode 100644 index 0000000000..84364080ae --- /dev/null +++ b/src/css/selectors/selector.zig @@ -0,0 +1,1608 @@ +const std = @import("std"); +const Allocator = std.mem.Allocator; +const bun = @import("root").bun; +const logger = bun.logger; +const Log = logger.Log; + +pub const css = @import("../css_parser.zig"); +const CSSString = css.CSSString; +const CSSStringFns = css.CSSStringFns; + +pub const Printer = css.Printer; +pub const PrintErr = css.PrintErr; + +const Result = css.Result; +const PrintResult = css.PrintResult; + +const ArrayList = std.ArrayListUnmanaged; + +pub const Selector = parser.Selector; +pub const SelectorList = parser.SelectorList; +pub const Component = parser.Component; +pub const PseudoClass = parser.PseudoClass; +pub const PseudoElement = parser.PseudoElement; + +const debug = bun.Output.scoped(.CSS_SELECTORS, false); + +/// Our implementation of the `SelectorImpl` interface +/// +pub const impl = struct { + pub const Selectors = struct { + pub const SelectorImpl = struct { + pub const AttrValue = css.css_values.string.CSSString; + pub const Identifier = css.css_values.ident.Ident; + pub const LocalName = css.css_values.ident.Ident; + pub const NamespacePrefix = css.css_values.ident.Ident; + pub const NamespaceUrl = []const u8; + pub const BorrowedNamespaceUrl = []const u8; + pub const BorrowedLocalName = css.css_values.ident.Ident; + + pub const NonTSPseudoClass = parser.PseudoClass; + pub const PseudoElement = parser.PseudoElement; + pub const VendorPrefix = css.VendorPrefix; + pub const ExtraMatchingData = void; + }; + }; +}; + +pub const parser = @import("./parser.zig"); + +/// Returns whether two selector lists are equivalent, i.e. the same minus any vendor prefix differences. +pub fn isEquivalent(selectors: []const Selector, other: []const Selector) bool { + if (selectors.len != other.len) return false; + + for (selectors, 0..) |*a, i| { + const b = &other[i]; + if (a.len() != b.len()) return false; + + for (a.components.items, b.components.items) |*a_comp, *b_comp| { + const is_equivalent = blk: { + if (a_comp.* == .non_ts_pseudo_class and b_comp.* == .non_ts_pseudo_class) { + break :blk a_comp.non_ts_pseudo_class.isEquivalent(&b_comp.non_ts_pseudo_class); + } else if (a_comp.* == .pseudo_element and b_comp.* == .pseudo_element) { + break :blk a_comp.pseudo_element.isEquivalent(&b_comp.pseudo_element); + } else if ((a_comp.* == .any and b_comp.* == .is) or + (a_comp.* == .is and b_comp.* == .any) or + (a_comp.* == .any and b_comp.* == .any) or + (a_comp.* == .is and b_comp.* == .is)) + { + const a_selectors = switch (a_comp.*) { + .any => |v| v.selectors, + .is => |v| v, + else => unreachable, + }; + const b_selectors = switch (b_comp.*) { + .any => |v| v.selectors, + .is => |v| v, + else => unreachable, + }; + break :blk isEquivalent(a_selectors, b_selectors); + } else { + break :blk Component.eql(a_comp, b_comp); + } + }; + + if (!is_equivalent) { + return false; + } + } + } + + return true; +} + +/// Downlevels the given selectors to be compatible with the given browser targets. +/// Returns the necessary vendor prefixes. +pub fn downlevelSelectors(allocator: Allocator, selectors: []Selector, targets: css.targets.Targets) css.VendorPrefix { + var necessary_prefixes = css.VendorPrefix.empty(); + for (selectors) |*selector| { + for (selector.components.items) |*component| { + necessary_prefixes.insert(downlevelComponent(allocator, component, targets)); + } + } + return necessary_prefixes; +} + +pub fn downlevelComponent(allocator: Allocator, component: *Component, targets: css.targets.Targets) css.VendorPrefix { + return switch (component.*) { + .non_ts_pseudo_class => |*pc| { + return switch (pc.*) { + .dir => |*d| { + if (targets.shouldCompileSame(.dir_selector)) { + component.* = downlevelDir(allocator, d.direction, targets); + return downlevelComponent(allocator, component, targets); + } + return css.VendorPrefix.empty(); + }, + .lang => |l| { + // :lang() with multiple languages is not supported everywhere. + // compile this to :is(:lang(a), :lang(b)) etc. + if (l.languages.items.len > 1 and targets.shouldCompileSame(.lang_selector_list)) { + component.* = .{ .is = langListToSelectors(allocator, l.languages.items) }; + return downlevelComponent(allocator, component, targets); + } + return css.VendorPrefix.empty(); + }, + else => pc.getNecessaryPrefixes(targets), + }; + }, + .pseudo_element => |*pe| pe.getNecessaryPrefixes(targets), + .is => |selectors| { + var necessary_prefixes = downlevelSelectors(allocator, selectors, targets); + + // Convert :is to :-webkit-any/:-moz-any if needed. + // All selectors must be simple, no combinators are supported. + if (targets.shouldCompileSame(.is_selector) and + !shouldUnwrapIs(selectors) and brk: { + for (selectors) |*selector| { + if (selector.hasCombinator()) break :brk false; + } + break :brk true; + }) { + necessary_prefixes.insert(targets.prefixes(css.VendorPrefix{ .none = true }, .any_pseudo)); + } else { + necessary_prefixes.insert(css.VendorPrefix{ .none = true }); + } + + return necessary_prefixes; + }, + .negation => |selectors| { + var necessary_prefixes = downlevelSelectors(allocator, selectors, targets); + + // Downlevel :not(.a, .b) -> :not(:is(.a, .b)) if not list is unsupported. + // We need to use :is() / :-webkit-any() rather than :not(.a):not(.b) to ensure the specificity is equivalent. + // https://drafts.csswg.org/selectors/#specificity-rules + if (selectors.len > 1 and css.targets.Targets.shouldCompileSame(&targets, .not_selector_list)) { + const is: Selector = Selector.fromComponent(allocator, Component{ .is = selectors: { + const new_selectors = allocator.alloc(Selector, selectors.len) catch bun.outOfMemory(); + for (new_selectors, selectors) |*new, *sel| { + new.* = sel.deepClone(allocator); + } + break :selectors new_selectors; + } }); + var list = ArrayList(Selector).initCapacity(allocator, 1) catch bun.outOfMemory(); + list.appendAssumeCapacity(is); + component.* = .{ .negation = list.items }; + + if (targets.shouldCompileSame(.is_selector)) { + necessary_prefixes.insert(targets.prefixes(css.VendorPrefix{ .none = true }, .any_pseudo)); + } else { + necessary_prefixes.insert(css.VendorPrefix{ .none = true }); + } + } + + return necessary_prefixes; + }, + .where, .has => |s| downlevelSelectors(allocator, s, targets), + .any => |*a| downlevelSelectors(allocator, a.selectors, targets), + else => css.VendorPrefix.empty(), + }; +} + +const RTL_LANGS: []const []const u8 = &.{ + "ae", "ar", "arc", "bcc", "bqi", "ckb", "dv", "fa", "glk", "he", "ku", "mzn", "nqo", "pnb", "ps", "sd", "ug", + "ur", "yi", +}; + +fn downlevelDir(allocator: Allocator, dir: parser.Direction, targets: css.targets.Targets) Component { + // Convert :dir to :lang. If supported, use a list of languages in a single :lang, + // otherwise, use :is/:not, which may be further downleveled to e.g. :-webkit-any. + if (!targets.shouldCompileSame(.lang_selector_list)) { + const c = Component{ + .non_ts_pseudo_class = PseudoClass{ + .lang = .{ .languages = lang: { + var list = ArrayList([]const u8).initCapacity(allocator, RTL_LANGS.len) catch bun.outOfMemory(); + list.appendSliceAssumeCapacity(RTL_LANGS); + break :lang list; + } }, + }, + }; + if (dir == .ltr) return Component{ + .negation = negation: { + var list = allocator.alloc(Selector, 1) catch bun.outOfMemory(); + list[0] = Selector.fromComponent(allocator, c); + break :negation list; + }, + }; + return c; + } else { + if (dir == .ltr) return Component{ .negation = langListToSelectors(allocator, RTL_LANGS) }; + return Component{ .is = langListToSelectors(allocator, RTL_LANGS) }; + } +} + +fn langListToSelectors(allocator: Allocator, langs: []const []const u8) []Selector { + var selectors = allocator.alloc(Selector, langs.len) catch bun.outOfMemory(); + for (langs, selectors[0..]) |lang, *sel| { + sel.* = Selector.fromComponent(allocator, Component{ + .non_ts_pseudo_class = PseudoClass{ + .lang = .{ .languages = langs: { + var list = ArrayList([]const u8).initCapacity(allocator, 1) catch bun.outOfMemory(); + list.appendAssumeCapacity(lang); + break :langs list; + } }, + }, + }); + } + return selectors; +} + +/// Returns the vendor prefix (if any) used in the given selector list. +/// If multiple vendor prefixes are seen, this is invalid, and an empty result is returned. +pub fn getPrefix(selectors: *const SelectorList) css.VendorPrefix { + var prefix = css.VendorPrefix.empty(); + for (selectors.v.slice()) |*selector| { + for (selector.components.items) |*component_| { + const component: *const Component = component_; + const p = switch (component.*) { + // Return none rather than empty for these so that we call downlevel_selectors. + .non_ts_pseudo_class => |*pc| switch (pc.*) { + .lang => css.VendorPrefix{ .none = true }, + .dir => css.VendorPrefix{ .none = true }, + else => pc.getPrefix(), + }, + .is => css.VendorPrefix{ .none = true }, + .where => css.VendorPrefix{ .none = true }, + .has => css.VendorPrefix{ .none = true }, + .negation => css.VendorPrefix{ .none = true }, + .any => |*any| any.vendor_prefix, + .pseudo_element => |*pe| pe.getPrefix(), + else => css.VendorPrefix.empty(), + }; + + if (!p.isEmpty()) { + // Allow none to be mixed with a prefix. + const prefix_without_none = prefix.maskOut(css.VendorPrefix{ .none = true }); + if (prefix_without_none.isEmpty() or prefix_without_none.eql(p)) { + prefix.insert(p); + } else { + return css.VendorPrefix.empty(); + } + } + } + } + + return prefix; +} + +pub fn isCompatible(selectors: []const parser.Selector, targets: css.targets.Targets) bool { + const F = css.compat.Feature; + for (selectors) |*selector| { + for (selector.components.items) |*component| { + const feature = switch (component.*) { + .id, .class, .local_name => continue, + + .explicit_any_namespace, + .explicit_no_namespace, + .default_namespace, + .namespace, + => F.namespaces, + + .explicit_universal_type => F.selectors2, + + .attribute_in_no_namespace_exists => F.selectors2, + + .attribute_in_no_namespace => |x| brk: { + if (x.case_sensitivity != parser.attrs.ParsedCaseSensitivity.case_sensitive) break :brk F.case_insensitive; + break :brk switch (x.operator) { + .equal, .includes, .dash_match => F.selectors2, + .prefix, .substring, .suffix => F.selectors3, + }; + }, + + .attribute_other => |attr| switch (attr.operation) { + .exists => F.selectors2, + .with_value => |*x| brk: { + if (x.case_sensitivity != parser.attrs.ParsedCaseSensitivity.case_sensitive) break :brk F.case_insensitive; + + break :brk switch (x.operator) { + .equal, .includes, .dash_match => F.selectors2, + .prefix, .substring, .suffix => F.selectors3, + }; + }, + }, + + .empty, .root => F.selectors3, + .negation => |sels| { + // :not() selector list is not forgiving. + if (!targets.isCompatible(F.selectors3) or !isCompatible(sels, targets)) return false; + continue; + }, + + .nth => |*data| brk: { + if (data.ty == .child and data.a == 0 and data.b == 1) break :brk F.selectors2; + if (data.ty == .col or data.ty == .last_col) return false; + break :brk F.selectors3; + }, + .nth_of => |*n| { + if (!targets.isCompatible(F.nth_child_of) or !isCompatible(n.selectors, targets)) return false; + continue; + }, + + // These support forgiving selector lists, so no need to check nested selectors. + .is => |sels| brk: { + // ... except if we are going to unwrap them. + if (shouldUnwrapIs(sels) and isCompatible(sels, targets)) continue; + break :brk F.is_selector; + }, + .where, .nesting => F.is_selector, + .any => return false, + .has => |sels| { + if (!targets.isCompatible(F.has_selector) or !isCompatible(sels, targets)) return false; + continue; + }, + + .scope, .host, .slotted => F.shadowdomv1, + + .part => F.part_pseudo, + + .non_ts_pseudo_class => |*pseudo| brk: { + switch (pseudo.*) { + .link, .visited, .active, .hover, .focus, .lang => break :brk F.selectors2, + + .checked, .disabled, .enabled, .target => break :brk F.selectors3, + + .any_link => |prefix| { + if (prefix.eql(css.VendorPrefix{ .none = true })) break :brk F.any_link; + }, + .indeterminate => break :brk F.indeterminate_pseudo, + + .fullscreen => |prefix| { + if (prefix.eql(css.VendorPrefix{ .none = true })) break :brk F.fullscreen; + }, + + .focus_visible => break :brk F.focus_visible, + .focus_within => break :brk F.focus_within, + .default => break :brk F.default_pseudo, + .dir => break :brk F.dir_selector, + .optional => break :brk F.optional_pseudo, + .placeholder_shown => |prefix| { + if (prefix.eql(css.VendorPrefix{ .none = true })) break :brk F.placeholder_shown; + }, + + inline .read_only, .read_write => |prefix| { + if (prefix.eql(css.VendorPrefix{ .none = true })) break :brk F.read_only_write; + }, + + .valid, .invalid, .required => break :brk F.form_validation, + .in_range, .out_of_range => break :brk F.in_out_of_range, + + .autofill => |prefix| { + if (prefix.eql(css.VendorPrefix{ .none = true })) break :brk F.autofill; + }, + + // Experimental, no browser support. + .current, + .past, + .future, + .playing, + .paused, + .seeking, + .stalled, + .buffering, + .muted, + .volume_locked, + .target_within, + .local_link, + .blank, + .user_invalid, + .user_valid, + .defined, + => return false, + + .custom => {}, + + else => {}, + } + return false; + }, + + .pseudo_element => |*pseudo| brk: { + switch (pseudo.*) { + .after, .before => break :brk F.gencontent, + .first_line => break :brk F.first_line, + .first_letter => break :brk F.first_letter, + .selection => |prefix| { + if (prefix.eql(css.VendorPrefix{ .none = true })) break :brk F.selection; + }, + .placeholder => |prefix| { + if (prefix.eql(css.VendorPrefix{ .none = true })) break :brk F.placeholder; + }, + .marker => break :brk F.marker_pseudo, + .backdrop => |prefix| { + if (prefix.eql(css.VendorPrefix{ .none = true })) break :brk F.dialog; + }, + .cue => break :brk F.cue, + .cue_function => break :brk F.cue_function, + .custom => return false, + else => {}, + } + return false; + }, + + .combinator => |*combinator| brk: { + break :brk switch (combinator.*) { + .child, .next_sibling => F.selectors2, + .later_sibling => F.selectors3, + else => continue, + }; + }, + }; + + if (!targets.isCompatible(feature)) return false; + } + } + + return true; +} + +/// Determines whether a selector list contains only unused selectors. +/// A selector is considered unused if it contains a class or id component that exists in the set of unused symbols. +pub fn isUnused( + selectors: []const parser.Selector, + unused_symbols: *const std.StringArrayHashMapUnmanaged(void), + parent_is_unused: bool, +) bool { + if (unused_symbols.count() == 0) return false; + + for (selectors) |*selector| { + if (!isSelectorUnused(selector, unused_symbols, parent_is_unused)) return false; + } + + return true; +} + +fn isSelectorUnused( + selector: *const parser.Selector, + unused_symbols: *const std.StringArrayHashMapUnmanaged(void), + parent_is_unused: bool, +) bool { + for (selector.components.items) |*component| { + switch (component.*) { + .class, .id => |ident| { + if (unused_symbols.contains(ident.v)) return true; + }, + .is, .where => |is| { + if (isUnused(is, unused_symbols, parent_is_unused)) return true; + }, + .any => |any| { + if (isUnused(any.selectors, unused_symbols, parent_is_unused)) return true; + }, + .nesting => { + if (parent_is_unused) return true; + }, + else => {}, + } + } + return false; +} + +/// The serialization module ported from lightningcss. +/// +/// Note that we have two serialization modules, one from lightningcss and one from servo. +/// +/// This is because it actually uses both implementations. This is confusing. +pub const serialize = struct { + pub fn serializeSelectorList( + list: []const parser.Selector, + comptime W: type, + dest: *Printer(W), + context: ?*const css.StyleContext, + is_relative: bool, + ) PrintErr!void { + var first = true; + for (list) |*selector| { + if (!first) { + try dest.delim(',', false); + } + first = false; + try serializeSelector(selector, W, dest, context, is_relative); + } + } + + pub fn serializeSelector( + selector: *const parser.Selector, + comptime W: type, + dest: *css.Printer(W), + context: ?*const css.StyleContext, + __is_relative: bool, + ) PrintErr!void { + var is_relative = __is_relative; + + if (comptime bun.Environment.isDebug) { + debug("Selector components:\n", .{}); + for (selector.components.items) |*comp| { + debug(" {}\n", .{comp}); + } + + debug("Compound selector iter\n", .{}); + var compound_selectors = CompoundSelectorIter{ .sel = selector }; + while (compound_selectors.next()) |comp| { + for (comp) |c| { + debug(" {}, ", .{c}); + } + } + debug("\n", .{}); + } + + // Compound selectors invert the order of their contents, so we need to + // undo that during serialization. + // + // This two-iterator strategy involves walking over the selector twice. + // We could do something more clever, but selector serialization probably + // isn't hot enough to justify it, and the stringification likely + // dominates anyway. + // + // NB: A parse-order iterator is a Rev<>, which doesn't expose as_slice(), + // which we need for |split|. So we split by combinators on a match-order + // sequence and then reverse. + var combinators = CombinatorIter{ .sel = selector }; + var compound_selectors = CompoundSelectorIter{ .sel = selector }; + const should_compile_nesting = dest.targets.shouldCompileSame(.nesting); + + var first = true; + var combinators_exhausted = false; + while (compound_selectors.next()) |_compound_| { + bun.debugAssert(!combinators_exhausted); + var compound = _compound_; + + // Skip implicit :scope in relative selectors (e.g. :has(:scope > foo) -> :has(> foo)) + if (is_relative and compound.len >= 1 and compound[0] == .scope) { + if (combinators.next()) |*combinator| { + try serializeCombinator(combinator, W, dest); + } + compound = compound[1..]; + is_relative = false; + } + + // https://drafts.csswg.org/cssom/#serializing-selectors + if (compound.len == 0) continue; + + const has_leading_nesting = first and compound[0] == .nesting; + const first_index: usize = if (has_leading_nesting) 1 else 0; + first = false; + + // 1. If there is only one simple selector in the compound selectors + // which is a universal selector, append the result of + // serializing the universal selector to s. + // + // Check if `!compound.empty()` first--this can happen if we have + // something like `... > ::before`, because we store `>` and `::` + // both as combinators internally. + // + // If we are in this case, after we have serialized the universal + // selector, we skip Step 2 and continue with the algorithm. + const can_elide_namespace, const first_non_namespace = if (first_index >= compound.len) + .{ true, first_index } + else switch (compound[0]) { + .explicit_any_namespace, .explicit_no_namespace, .namespace => .{ false, first_index + 1 }, + .default_namespace => .{ true, first_index + 1 }, + else => .{ true, first_index }, + }; + var perform_step_2 = true; + const next_combinator = combinators.next(); + if (first_non_namespace == compound.len - 1) { + // We have to be careful here, because if there is a + // pseudo element "combinator" there isn't really just + // the one simple selector. Technically this compound + // selector contains the pseudo element selector as well + // -- Combinator::PseudoElement, just like + // Combinator::SlotAssignment, don't exist in the + // spec. + if (next_combinator == .pseudo_element and compound[first_non_namespace].asCombinator() == .slot_assignment) { + // do nothing + } else if (compound[first_non_namespace] == .explicit_universal_type) { + // Iterate over everything so we serialize the namespace + // too. + const swap_nesting = has_leading_nesting and should_compile_nesting; + const slice = if (swap_nesting) brk: { + // Swap nesting and type selector (e.g. &div -> div&). + break :brk compound[@min(1, compound.len)..]; + } else compound; + + for (slice) |*simple| { + try serializeComponent(simple, W, dest, context); + } + + if (swap_nesting) { + try serializeNesting(W, dest, context, false); + } + + // Skip step 2, which is an "otherwise". + perform_step_2 = false; + } else { + // do nothing + } + } + + // 2. Otherwise, for each simple selector in the compound selectors + // that is not a universal selector of which the namespace prefix + // maps to a namespace that is not the default namespace + // serialize the simple selector and append the result to s. + // + // See https://github.com/w3c/csswg-drafts/issues/1606, which is + // proposing to change this to match up with the behavior asserted + // in cssom/serialize-namespaced-type-selectors.html, which the + // following code tries to match. + if (perform_step_2) { + const iter = compound; + var i: usize = 0; + if (has_leading_nesting and + should_compile_nesting and + isTypeSelector(if (first_non_namespace < compound.len) &compound[first_non_namespace] else null)) + { + // Swap nesting and type selector (e.g. &div -> div&). + // This ensures that the compiled selector is valid. e.g. (div.foo is valid, .foodiv is not). + const nesting = &iter[i]; + i += 1; + const local = &iter[i]; + i += 1; + try serializeComponent(local, W, dest, context); + + // Also check the next item in case of namespaces. + if (first_non_namespace > first_index) { + const local2 = &iter[i]; + i += 1; + try serializeComponent(local2, W, dest, context); + } + + try serializeComponent(nesting, W, dest, context); + } else if (has_leading_nesting and should_compile_nesting) { + // Nesting selector may serialize differently if it is leading, due to type selectors. + i += 1; + try serializeNesting(W, dest, context, true); + } + + if (i < compound.len) { + for (iter[i..]) |*simple| { + if (simple.* == .explicit_universal_type) { + // Can't have a namespace followed by a pseudo-element + // selector followed by a universal selector in the same + // compound selector, so we don't have to worry about the + // real namespace being in a different `compound`. + if (can_elide_namespace) { + continue; + } + } + try serializeComponent(simple, W, dest, context); + } + } + } + + // 3. If this is not the last part of the chain of the selector + // append a single SPACE (U+0020), followed by the combinator + // ">", "+", "~", ">>", "||", as appropriate, followed by another + // single SPACE (U+0020) if the combinator was not whitespace, to + // s. + if (next_combinator) |*c| { + try serializeCombinator(c, W, dest); + } else { + combinators_exhausted = true; + } + + // 4. If this is the last part of the chain of the selector and + // there is a pseudo-element, append "::" followed by the name of + // the pseudo-element, to s. + // + // (we handle this above) + } + } + + pub fn serializeComponent( + component: *const parser.Component, + comptime W: type, + dest: *css.Printer(W), + context: ?*const css.StyleContext, + ) PrintErr!void { + switch (component.*) { + .combinator => |c| return serializeCombinator(&c, W, dest), + .attribute_in_no_namespace => |*v| { + try dest.writeChar('['); + try css.css_values.ident.IdentFns.toCss(&v.local_name, W, dest); + try v.operator.toCss(W, dest); + + if (dest.minify) { + // PERF: should we put a scratch buffer in the printer + // Serialize as both an identifier and a string and choose the shorter one. + var id = std.ArrayList(u8).init(dest.allocator); + const writer = id.writer(); + css.serializer.serializeIdentifier(v.value, writer) catch return dest.addFmtError(); + + const s = try css.to_css.string(dest.allocator, CSSString, &v.value, css.PrinterOptions{}, dest.import_records); + + if (id.items.len > 0 and id.items.len < s.len) { + try dest.writeStr(id.items); + } else { + try dest.writeStr(s); + } + } else { + try css.CSSStringFns.toCss(&v.value, W, dest); + } + + switch (v.case_sensitivity) { + .case_sensitive, .ascii_case_insensitive_if_in_html_element_in_html_document => {}, + .ascii_case_insensitive => try dest.writeStr(" i"), + .explicit_case_sensitive => try dest.writeStr(" s"), + } + return dest.writeChar(']'); + }, + .is, .where, .negation, .any => { + switch (component.*) { + .where => try dest.writeStr(":where("), + .is => |selectors| { + // If there's only one simple selector, serialize it directly. + if (shouldUnwrapIs(selectors)) { + return serializeSelector(&selectors[0], W, dest, context, false); + } + + const vp = dest.vendor_prefix; + if (vp.intersects(css.VendorPrefix{ .webkit = true, .moz = true })) { + try dest.writeChar(':'); + try vp.toCss(W, dest); + try dest.writeStr("any("); + } else { + try dest.writeStr(":is("); + } + }, + .negation => { + try dest.writeStr(":not("); + }, + .any => |v| { + const vp = dest.vendor_prefix.bitwiseOr(v.vendor_prefix); + if (vp.intersects(css.VendorPrefix{ .webkit = true, .moz = true })) { + try dest.writeChar(':'); + try vp.toCss(W, dest); + try dest.writeStr("any("); + } else { + try dest.writeStr(":is("); + } + }, + else => unreachable, + } + try serializeSelectorList(switch (component.*) { + .where, .is, .negation => |list| list, + .any => |v| v.selectors, + else => unreachable, + }, W, dest, context, false); + return dest.writeStr(")"); + }, + .has => |list| { + try dest.writeStr(":has("); + try serializeSelectorList(list, W, dest, context, true); + return dest.writeStr(")"); + }, + .non_ts_pseudo_class => |*pseudo| { + return serializePseudoClass(pseudo, W, dest, context); + }, + .pseudo_element => |*pseudo| { + return serializePseudoElement(pseudo, W, dest, context); + }, + .nesting => { + return serializeNesting(W, dest, context, false); + }, + .class => |class| { + try dest.writeChar('.'); + return dest.writeIdent(class.v, true); + }, + .id => |id| { + try dest.writeChar('#'); + return dest.writeIdent(id.v, true); + }, + .host => |selector| { + try dest.writeStr(":host"); + if (selector) |*sel| { + try dest.writeChar('('); + try serializeSelector(sel, W, dest, dest.context(), false); + try dest.writeChar(')'); + } + return; + }, + .slotted => |*selector| { + try dest.writeStr("::slotted("); + try serializeSelector(selector, W, dest, dest.context(), false); + try dest.writeChar(')'); + }, + // .nth => |nth_data| { + // try nth_data.writeStart(W, dest, nth_data.isFunction()); + // if (nth_data.isFunction()) { + // try nth_data.writeAffine(W, dest); + // try dest.writeChar(')'); + // } + // }, + + else => { + try tocss_servo.toCss_Component(component, W, dest); + }, + } + } + + pub fn serializeCombinator( + combinator: *const parser.Combinator, + comptime W: type, + dest: *Printer(W), + ) PrintErr!void { + switch (combinator.*) { + .child => try dest.delim('>', true), + .descendant => try dest.writeStr(" "), + .next_sibling => try dest.delim('+', true), + .later_sibling => try dest.delim('~', true), + .deep => try dest.writeStr(" /deep/ "), + .deep_descendant => { + try dest.whitespace(); + try dest.writeStr(">>>"); + try dest.whitespace(); + }, + .pseudo_element, .part, .slot_assignment => return, + } + } + + pub fn serializePseudoClass( + pseudo_class: *const parser.PseudoClass, + comptime W: type, + dest: *Printer(W), + context: ?*const css.StyleContext, + ) PrintErr!void { + switch (pseudo_class.*) { + .lang => { + try dest.writeStr(":lang("); + var first = true; + for (pseudo_class.lang.languages.items) |lang| { + if (first) { + first = false; + } else { + try dest.delim(',', false); + } + css.serializer.serializeIdentifier(lang, dest) catch return dest.addFmtError(); + } + return dest.writeStr(")"); + }, + .dir => { + const dir = pseudo_class.dir.direction; + try dest.writeStr(":dir("); + try dir.toCss(W, dest); + return try dest.writeStr(")"); + }, + else => {}, + } + + const Helpers = struct { + pub inline fn writePrefixed( + d: *Printer(W), + prefix: css.VendorPrefix, + comptime val: []const u8, + ) PrintErr!void { + try d.writeChar(':'); + // If the printer has a vendor prefix override, use that. + const vp = if (!d.vendor_prefix.isEmpty()) + d.vendor_prefix.bitwiseOr(prefix).orNone() + else + prefix; + + try vp.toCss(W, d); + try d.writeStr(val); + } + pub inline fn pseudo( + d: *Printer(W), + comptime key: []const u8, + comptime s: []const u8, + ) PrintErr!void { + const key_snake_case = comptime key_snake_case: { + var buf: [key.len]u8 = undefined; + for (key, 0..) |c, i| { + buf[i] = if (c >= 'A' and c <= 'Z') c + 32 else if (c == '-') '_' else c; + } + const buf2 = buf; + break :key_snake_case buf2; + }; + const _class = if (d.pseudo_classes) |*pseudo_classes| @field(pseudo_classes, &key_snake_case) else null; + + if (_class) |class| { + try d.writeChar('.'); + try d.writeIdent(class, true); + } else { + try d.writeStr(s); + } + } + }; + + switch (pseudo_class.*) { + // https://drafts.csswg.org/selectors-4/#useraction-pseudos + .hover => try Helpers.pseudo(dest, "hover", ":hover"), + .active => try Helpers.pseudo(dest, "active", ":active"), + .focus => try Helpers.pseudo(dest, "focus", ":focus"), + .focus_visible => try Helpers.pseudo(dest, "focus-visible", ":focus-visible"), + .focus_within => try Helpers.pseudo(dest, "focus-within", ":focus-within"), + + // https://drafts.csswg.org/selectors-4/#time-pseudos + .current => try dest.writeStr(":current"), + .past => try dest.writeStr(":past"), + .future => try dest.writeStr(":future"), + + // https://drafts.csswg.org/selectors-4/#resource-pseudos + .playing => try dest.writeStr(":playing"), + .paused => try dest.writeStr(":paused"), + .seeking => try dest.writeStr(":seeking"), + .buffering => try dest.writeStr(":buffering"), + .stalled => try dest.writeStr(":stalled"), + .muted => try dest.writeStr(":muted"), + .volume_locked => try dest.writeStr(":volume-locked"), + + // https://fullscreen.spec.whatwg.org/#:fullscreen-pseudo-class + .fullscreen => |prefix| { + try dest.writeChar(':'); + const vp = if (!dest.vendor_prefix.isEmpty()) + dest.vendor_prefix.bitwiseAnd(prefix).orNone() + else + prefix; + try vp.toCss(W, dest); + if (vp.webkit or vp.moz) { + try dest.writeStr("full-screen"); + } else { + try dest.writeStr("fullscreen"); + } + }, + + // https://drafts.csswg.org/selectors/#display-state-pseudos + .open => try dest.writeStr(":open"), + .closed => try dest.writeStr(":closed"), + .modal => try dest.writeStr(":modal"), + .picture_in_picture => try dest.writeStr(":picture-in-picture"), + + // https://html.spec.whatwg.org/multipage/semantics-other.html#selector-popover-open + .popover_open => try dest.writeStr(":popover-open"), + + // https://drafts.csswg.org/selectors-4/#the-defined-pseudo + .defined => try dest.writeStr(":defined"), + + // https://drafts.csswg.org/selectors-4/#location + .any_link => |prefix| try Helpers.writePrefixed(dest, prefix, "any-link"), + .link => try dest.writeStr(":link"), + .local_link => try dest.writeStr(":local-link"), + .target => try dest.writeStr(":target"), + .target_within => try dest.writeStr(":target-within"), + .visited => try dest.writeStr(":visited"), + + // https://drafts.csswg.org/selectors-4/#input-pseudos + .enabled => try dest.writeStr(":enabled"), + .disabled => try dest.writeStr(":disabled"), + .read_only => |prefix| try Helpers.writePrefixed(dest, prefix, "read-only"), + .read_write => |prefix| try Helpers.writePrefixed(dest, prefix, "read-write"), + .placeholder_shown => |prefix| try Helpers.writePrefixed(dest, prefix, "placeholder-shown"), + .default => try dest.writeStr(":default"), + .checked => try dest.writeStr(":checked"), + .indeterminate => try dest.writeStr(":indeterminate"), + .blank => try dest.writeStr(":blank"), + .valid => try dest.writeStr(":valid"), + .invalid => try dest.writeStr(":invalid"), + .in_range => try dest.writeStr(":in-range"), + .out_of_range => try dest.writeStr(":out-of-range"), + .required => try dest.writeStr(":required"), + .optional => try dest.writeStr(":optional"), + .user_valid => try dest.writeStr(":user-valid"), + .user_invalid => try dest.writeStr(":user-invalid"), + + // https://html.spec.whatwg.org/multipage/semantics-other.html#selector-autofill + .autofill => |prefix| try Helpers.writePrefixed(dest, prefix, "autofill"), + + .local => |selector| try serializeSelector(selector.selector, W, dest, context, false), + .global => |selector| { + const css_module = if (dest.css_module) |module| css_module: { + dest.css_module = null; + break :css_module module; + } else null; + try serializeSelector(selector.selector, W, dest, context, false); + dest.css_module = css_module; + }, + + // https://webkit.org/blog/363/styling-scrollbars/ + .webkit_scrollbar => |s| { + try dest.writeStr(switch (s) { + .horizontal => ":horizontal", + .vertical => ":vertical", + .decrement => ":decrement", + .increment => ":increment", + .start => ":start", + .end => ":end", + .double_button => ":double-button", + .single_button => ":single-button", + .no_button => ":no-button", + .corner_present => ":corner-present", + .window_inactive => ":window-inactive", + }); + }, + + .lang => unreachable, + .dir => unreachable, + .custom => |name| { + try dest.writeChar(':'); + return dest.writeStr(name.name); + }, + .custom_function => |v| { + try dest.writeChar(':'); + try dest.writeStr(v.name); + try dest.writeChar('('); + try v.arguments.toCssRaw(W, dest); + try dest.writeChar(')'); + }, + } + } + + pub fn serializePseudoElement( + pseudo_element: *const parser.PseudoElement, + comptime W: type, + dest: *Printer(W), + context: ?*const css.StyleContext, + ) PrintErr!void { + const Helpers = struct { + pub fn writePrefix(d: *Printer(W), prefix: css.VendorPrefix) PrintErr!css.VendorPrefix { + try d.writeStr("::"); + // If the printer has a vendor prefix override, use that. + const vp = if (!d.vendor_prefix.isEmpty()) d.vendor_prefix.bitwiseAnd(prefix).orNone() else prefix; + try vp.toCss(W, d); + return vp; + } + + pub fn writePrefixed(d: *Printer(W), prefix: css.VendorPrefix, comptime val: []const u8) PrintErr!void { + _ = try writePrefix(d, prefix); + try d.writeStr(val); + } + }; + // switch (pseudo_element.*) { + // // CSS2 pseudo elements support a single colon syntax in addition + // // to the more correct double colon for other pseudo elements. + // // We use that here because it's supported everywhere and is shorter. + // .after => try dest.writeStr(":after"), + // .before => try dest.writeStr(":before"), + // .marker => try dest.writeStr(":first-letter"), + // .selection => |prefix| Helpers.writePrefixed(dest, prefix, "selection"), + // .cue => dest.writeStr("::cue"), + // .cue_region => dest.writeStr("::cue-region"), + // .cue_function => |v| { + // dest.writeStr("::cue("); + // try serializeSelector(v.selector, W, dest, context, false); + // try dest.writeChar(')'); + // }, + // } + switch (pseudo_element.*) { + // CSS2 pseudo elements support a single colon syntax in addition + // to the more correct double colon for other pseudo elements. + // We use that here because it's supported everywhere and is shorter. + .after => try dest.writeStr(":after"), + .before => try dest.writeStr(":before"), + .first_line => try dest.writeStr(":first-line"), + .first_letter => try dest.writeStr(":first-letter"), + .marker => try dest.writeStr("::marker"), + .selection => |prefix| try Helpers.writePrefixed(dest, prefix, "selection"), + .cue => try dest.writeStr("::cue"), + .cue_region => try dest.writeStr("::cue-region"), + .cue_function => |v| { + try dest.writeStr("::cue("); + try serializeSelector(v.selector, W, dest, context, false); + try dest.writeChar(')'); + }, + .cue_region_function => |v| { + try dest.writeStr("::cue-region("); + try serializeSelector(v.selector, W, dest, context, false); + try dest.writeChar(')'); + }, + .placeholder => |prefix| { + const vp = try Helpers.writePrefix(dest, prefix); + if (vp.webkit or vp.ms) { + try dest.writeStr("input-placeholder"); + } else { + try dest.writeStr("placeholder"); + } + }, + .backdrop => |prefix| try Helpers.writePrefixed(dest, prefix, "backdrop"), + .file_selector_button => |prefix| { + const vp = try Helpers.writePrefix(dest, prefix); + if (vp.webkit) { + try dest.writeStr("file-upload-button"); + } else if (vp.ms) { + try dest.writeStr("browse"); + } else { + try dest.writeStr("file-selector-button"); + } + }, + .webkit_scrollbar => |s| { + try dest.writeStr(switch (s) { + .scrollbar => "::-webkit-scrollbar", + .button => "::-webkit-scrollbar-button", + .track => "::-webkit-scrollbar-track", + .track_piece => "::-webkit-scrollbar-track-piece", + .thumb => "::-webkit-scrollbar-thumb", + .corner => "::-webkit-scrollbar-corner", + .resizer => "::-webkit-resizer", + }); + }, + .view_transition => try dest.writeStr("::view-transition"), + .view_transition_group => |v| { + try dest.writeStr("::view-transition-group("); + try v.part_name.toCss(W, dest); + try dest.writeChar(')'); + }, + .view_transition_image_pair => |v| { + try dest.writeStr("::view-transition-image-pair("); + try v.part_name.toCss(W, dest); + try dest.writeChar(')'); + }, + .view_transition_old => |v| { + try dest.writeStr("::view-transition-old("); + try v.part_name.toCss(W, dest); + try dest.writeChar(')'); + }, + .view_transition_new => |v| { + try dest.writeStr("::view-transition-new("); + try v.part_name.toCss(W, dest); + try dest.writeChar(')'); + }, + .custom => |val| { + try dest.writeStr("::"); + return dest.writeStr(val.name); + }, + .custom_function => |v| { + const name = v.name; + try dest.writeStr("::"); + try dest.writeStr(name); + try dest.writeChar('('); + try v.arguments.toCssRaw(W, dest); + try dest.writeChar(')'); + }, + } + } + + pub fn serializeNesting( + comptime W: type, + dest: *Printer(W), + context: ?*const css.StyleContext, + first: bool, + ) PrintErr!void { + if (context) |ctx| { + // If there's only one simple selector, just serialize it directly. + // Otherwise, use an :is() pseudo class. + // Type selectors are only allowed at the start of a compound selector, + // so use :is() if that is not the case. + if (ctx.selectors.v.len() == 1 and + (first or (!hasTypeSelector(ctx.selectors.v.at(0)) and + isSimple(ctx.selectors.v.at(0))))) + { + try serializeSelector(ctx.selectors.v.at(0), W, dest, ctx.parent, false); + } else { + try dest.writeStr(":is("); + try serializeSelectorList(ctx.selectors.v.slice(), W, dest, ctx.parent, false); + try dest.writeChar(')'); + } + } else { + // If there is no context, we are at the root if nesting is supported. This is equivalent to :scope. + // Otherwise, if nesting is supported, serialize the nesting selector directly. + if (dest.targets.shouldCompileSame(.nesting)) { + try dest.writeStr(":scope"); + } else { + try dest.writeChar('&'); + } + } + } +}; + +pub const tocss_servo = struct { + pub fn toCss_SelectorList( + selectors: []const parser.Selector, + comptime W: type, + dest: *css.Printer(W), + ) PrintErr!void { + if (selectors.len == 0) { + return; + } + + try tocss_servo.toCss_Selector(&selectors[0], W, dest); + + if (selectors.len > 1) { + for (selectors[1..]) |*selector| { + try dest.writeStr(", "); + try tocss_servo.toCss_Selector(selector, W, dest); + } + } + } + + pub fn toCss_Selector( + selector: *const parser.Selector, + comptime W: type, + dest: *css.Printer(W), + ) PrintErr!void { + // Compound selectors invert the order of their contents, so we need to + // undo that during serialization. + // + // This two-iterator strategy involves walking over the selector twice. + // We could do something more clever, but selector serialization probably + // isn't hot enough to justify it, and the stringification likely + // dominates anyway. + // + // NB: A parse-order iterator is a Rev<>, which doesn't expose as_slice(), + // which we need for |split|. So we split by combinators on a match-order + // sequence and then reverse. + var combinators = CombinatorIter{ .sel = selector }; + var compound_selectors = CompoundSelectorIter{ .sel = selector }; + + var combinators_exhausted = false; + while (compound_selectors.next()) |compound| { + bun.debugAssert(!combinators_exhausted); + + // https://drafts.csswg.org/cssom/#serializing-selectors + if (compound.len == 0) continue; + + // 1. If there is only one simple selector in the compound selectors + // which is a universal selector, append the result of + // serializing the universal selector to s. + // + // Check if `!compound.empty()` first--this can happen if we have + // something like `... > ::before`, because we store `>` and `::` + // both as combinators internally. + // + // If we are in this case, after we have serialized the universal + // selector, we skip Step 2 and continue with the algorithm. + const can_elide_namespace, const first_non_namespace: usize = if (0 >= compound.len) + .{ true, 0 } + else switch (compound[0]) { + .explicit_any_namespace, .explicit_no_namespace, .namespace => .{ false, 1 }, + .default_namespace => .{ true, 1 }, + else => .{ true, 0 }, + }; + var perform_step_2 = true; + const next_combinator = combinators.next(); + if (first_non_namespace == compound.len - 1) { + // We have to be careful here, because if there is a + // pseudo element "combinator" there isn't really just + // the one simple selector. Technically this compound + // selector contains the pseudo element selector as well + // -- Combinator::PseudoElement, just like + // Combinator::SlotAssignment, don't exist in the + // spec. + if (next_combinator == .pseudo_element and compound[first_non_namespace].asCombinator() == .slot_assignment) { + // do nothing + } else if (compound[first_non_namespace] == .explicit_universal_type) { + // Iterate over everything so we serialize the namespace + // too. + for (compound) |*simple| { + try tocss_servo.toCss_Component(simple, W, dest); + } + // Skip step 2, which is an "otherwise". + perform_step_2 = false; + } else { + // do nothing + } + } + + // 2. Otherwise, for each simple selector in the compound selectors + // that is not a universal selector of which the namespace prefix + // maps to a namespace that is not the default namespace + // serialize the simple selector and append the result to s. + // + // See https://github.com/w3c/csswg-drafts/issues/1606, which is + // proposing to change this to match up with the behavior asserted + // in cssom/serialize-namespaced-type-selectors.html, which the + // following code tries to match. + if (perform_step_2) { + for (compound) |*simple| { + if (simple.* == .explicit_universal_type) { + // Can't have a namespace followed by a pseudo-element + // selector followed by a universal selector in the same + // compound selector, so we don't have to worry about the + // real namespace being in a different `compound`. + if (can_elide_namespace) { + continue; + } + } + try tocss_servo.toCss_Component(simple, W, dest); + } + } + + // 3. If this is not the last part of the chain of the selector + // append a single SPACE (U+0020), followed by the combinator + // ">", "+", "~", ">>", "||", as appropriate, followed by another + // single SPACE (U+0020) if the combinator was not whitespace, to + // s. + if (next_combinator) |c| { + try toCss_Combinator(&c, W, dest); + } else { + combinators_exhausted = true; + } + + // 4. If this is the last part of the chain of the selector and + // there is a pseudo-element, append "::" followed by the name of + // the pseudo-element, to s. + // + // (we handle this above) + } + } + + pub fn toCss_Component( + component: *const parser.Component, + comptime W: type, + dest: *Printer(W), + ) PrintErr!void { + switch (component.*) { + .combinator => |*c| try toCss_Combinator(c, W, dest), + .slotted => |*selector| { + try dest.writeStr("::slotted("); + try tocss_servo.toCss_Selector(selector, W, dest); + try dest.writeChar(')'); + }, + .part => |part_names| { + try dest.writeStr("::part("); + for (part_names, 0..) |name, i| { + if (i != 0) { + try dest.writeChar(' '); + } + try css.IdentFns.toCss(&name, W, dest); + } + try dest.writeChar(')'); + }, + .pseudo_element => |*p| { + try p.toCss(W, dest); + }, + .id => |s| { + try dest.writeChar('#'); + try css.IdentFns.toCss(&s, W, dest); + }, + .class => |s| { + try dest.writeChar('.'); + try css.IdentFns.toCss(&s, W, dest); + }, + .local_name => |local_name| { + try local_name.toCss(W, dest); + }, + .explicit_universal_type => { + try dest.writeChar('*'); + }, + .default_namespace => return, + + .explicit_no_namespace => { + try dest.writeChar('|'); + }, + .explicit_any_namespace => { + try dest.writeStr("*|"); + }, + .namespace => |ns| { + try css.IdentFns.toCss(&ns.prefix, W, dest); + try dest.writeChar('|'); + }, + .attribute_in_no_namespace_exists => |v| { + try dest.writeChar('['); + try css.IdentFns.toCss(&v.local_name, W, dest); + try dest.writeChar(']'); + }, + .attribute_in_no_namespace => |v| { + try dest.writeChar('['); + try css.IdentFns.toCss(&v.local_name, W, dest); + try v.operator.toCss(W, dest); + try css.CSSStringFns.toCss(&v.value, W, dest); + switch (v.case_sensitivity) { + .case_sensitive, .ascii_case_insensitive_if_in_html_element_in_html_document => {}, + .ascii_case_insensitive => try dest.writeStr(" i"), + .explicit_case_sensitive => try dest.writeStr(" s"), + } + try dest.writeChar(']'); + }, + .attribute_other => |attr_selector| { + try attr_selector.toCss(W, dest); + }, + // Pseudo-classes + .root => { + try dest.writeStr(":root"); + }, + .empty => { + try dest.writeStr(":empty"); + }, + .scope => { + try dest.writeStr(":scope"); + }, + .host => |selector| { + try dest.writeStr(":host"); + if (selector) |*sel| { + try dest.writeChar('('); + try tocss_servo.toCss_Selector(sel, W, dest); + try dest.writeChar(')'); + } + }, + .nth => |nth_data| { + try nth_data.writeStart(W, dest, nth_data.isFunction()); + if (nth_data.isFunction()) { + try nth_data.writeAffine(W, dest); + try dest.writeChar(')'); + } + }, + .nth_of => |nth_of_data| { + const nth_data = nth_of_data.nthData(); + try nth_data.writeStart(W, dest, true); + // A selector must be a function to hold An+B notation + bun.debugAssert(nth_data.is_function); + try nth_data.writeAffine(W, dest); + // Only :nth-child or :nth-last-child can be of a selector list + bun.debugAssert(nth_data.ty == .child or nth_data.ty == .last_child); + // The selector list should not be empty + bun.debugAssert(nth_of_data.selectors.len != 0); + try dest.writeStr(" of "); + try tocss_servo.toCss_SelectorList(nth_of_data.selectors, W, dest); + try dest.writeChar(')'); + }, + .is, .where, .negation, .has, .any => { + switch (component.*) { + .where => try dest.writeStr(":where("), + .is => try dest.writeStr(":is("), + .negation => try dest.writeStr(":not("), + .has => try dest.writeStr(":has("), + .any => |v| { + try dest.writeChar(':'); + try v.vendor_prefix.toCss(W, dest); + try dest.writeStr("any("); + }, + else => unreachable, + } + try tocss_servo.toCss_SelectorList( + switch (component.*) { + .where, .is, .negation, .has => |list| list, + .any => |v| v.selectors, + else => unreachable, + }, + W, + dest, + ); + try dest.writeStr(")"); + }, + .non_ts_pseudo_class => |*pseudo| { + try pseudo.toCss(W, dest); + }, + .nesting => try dest.writeChar('&'), + } + } + + pub fn toCss_Combinator( + combinator: *const parser.Combinator, + comptime W: type, + dest: *Printer(W), + ) PrintErr!void { + switch (combinator.*) { + .child => try dest.writeStr(" > "), + .descendant => try dest.writeStr(" "), + .next_sibling => try dest.writeStr(" + "), + .later_sibling => try dest.writeStr(" ~ "), + .deep => try dest.writeStr(" /deep/ "), + .deep_descendant => { + try dest.writeStr(" >>> "); + }, + .pseudo_element, .part, .slot_assignment => return, + } + } + + pub fn toCss_PseudoElement( + pseudo_element: *const parser.PseudoElement, + comptime W: type, + dest: *Printer(W), + ) PrintErr!void { + switch (pseudo_element.*) { + .before => try dest.writeStr("::before"), + .after => try dest.writeStr("::after"), + } + } +}; + +pub fn shouldUnwrapIs(selectors: []const parser.Selector) bool { + if (selectors.len == 1) { + const first = selectors[0]; + if (!hasTypeSelector(&first) and isSimple(&first)) return true; + } + + return false; +} + +fn hasTypeSelector(selector: *const parser.Selector) bool { + var iter = selector.iterRawParseOrderFrom(0); + const first = iter.next(); + + if (isNamespace(if (first) |*f| f else null)) return isTypeSelector(if (iter.next()) |*n| n else null); + + return isTypeSelector(if (first) |*f| f else null); +} + +fn isNamespace(component: ?*const parser.Component) bool { + if (component) |c| return switch (c.*) { + .explicit_any_namespace, .explicit_no_namespace, .namespace, .default_namespace => true, + else => false, + }; + return false; +} + +fn isTypeSelector(component: ?*const parser.Component) bool { + if (component) |c| return switch (c.*) { + .local_name, .explicit_universal_type => true, + else => false, + }; + return false; +} + +fn isSimple(selector: *const parser.Selector) bool { + var iter = selector.iterRawParseOrderFrom(0); + while (iter.next()) |component| { + if (component.isCombinator()) return true; + } + return false; +} + +const CombinatorIter = struct { + sel: *const parser.Selector, + i: usize = 0, + + /// Original source has this iterator defined like so: + /// ```rs + /// selector + /// .iter_raw_match_order() // just returns an iterator + /// .rev() // reverses the iterator + /// .filter_map(|x| x.as_combinator()) // returns only entries which are combinators + /// ``` + pub fn next(this: *@This()) ?parser.Combinator { + while (this.i < this.sel.components.items.len) { + defer this.i += 1; + const combinator = this.sel.components.items[this.sel.components.items.len - 1 - this.i].asCombinator() orelse continue; + return combinator; + } + return null; + } +}; +const CompoundSelectorIter = struct { + sel: *const parser.Selector, + i: usize = 0, + + /// This iterator is basically like doing `selector.components.splitByCombinator()`. + /// + /// For example: + /// ```css + /// div > p.class + /// ``` + /// + /// The iterator would return: + /// ``` + /// First slice: + /// .{ + /// .{ .local_name = "div" } + /// } + /// + /// Second slice: + /// .{ + /// .{ .local_name = "p" }, + /// .{ .class = "class" } + /// } + /// ``` + /// + /// BUT, the selectors are stored in reverse order, so this code needs to split the components backwards. + /// + /// Original source has this iterator defined like so: + /// ```rs + /// selector + /// .iter_raw_match_order() + /// .as_slice() + /// .split(|x| x.is_combinator()) // splits the slice into subslices by elements that match over the predicate + /// .rev() // reverse + /// ``` + pub inline fn next(this: *@This()) ?[]const parser.Component { + // Since we iterating backwards, we convert all indices into "backwards form" by doing `this.sel.components.items.len - 1 - i` + while (this.i < this.sel.components.items.len) { + const next_index: ?usize = next_index: { + for (this.i..this.sel.components.items.len) |j| { + if (this.sel.components.items[this.sel.components.items.len - 1 - j].isCombinator()) break :next_index j; + } + break :next_index null; + }; + if (next_index) |combinator_index| { + const start = if (combinator_index == 0) 0 else combinator_index - 1; + const end = this.i; + const slice = this.sel.components.items[this.sel.components.items.len - 1 - start .. this.sel.components.items.len - end]; + this.i = combinator_index + 1; + return slice; + } + const slice = this.sel.components.items[0 .. this.sel.components.items.len - 1 - this.i + 1]; + this.i = this.sel.components.items.len; + return slice; + } + return null; + } +}; diff --git a/src/css/small_list.zig b/src/css/small_list.zig new file mode 100644 index 0000000000..fbc86efdd0 --- /dev/null +++ b/src/css/small_list.zig @@ -0,0 +1,560 @@ +const std = @import("std"); +const bun = @import("root").bun; +const css = @import("./css_parser.zig"); +const Printer = css.Printer; +const Parser = css.Parser; +const Result = css.Result; +const voidWrap = css.voidWrap; +const generic = css.generic; +const Delimiters = css.Delimiters; +const PrintErr = css.PrintErr; +const Allocator = std.mem.Allocator; +const implementEql = css.implementEql; +const TextShadow = css.css_properties.text.TextShadow; + +/// This is a type whose items can either be heap-allocated (essentially the +/// same as a BabyList(T)) or inlined in the struct itself. +/// +/// This is type is a performance optimizations for avoiding allocations, especially when you know the list +/// will commonly have N or fewer items. +/// +/// The `capacity` field is used to disambiguate between the two states: - When +/// `capacity <= N`, the items are stored inline, and `capacity` is the length +/// of the items. - When `capacity > N`, the items are stored on the heap, and +/// this type essentially becomes a BabyList(T), but with the fields reordered. +/// +/// This code is based on servo/rust-smallvec and the Zig std.ArrayList source. +pub fn SmallList(comptime T: type, comptime N: comptime_int) type { + return struct { + capacity: u32 = 0, + data: Data = .{ .inlined = undefined }, + + const Data = union { + inlined: [N]T, + heap: HeapData, + }; + + const HeapData = struct { + len: u32, + ptr: [*]T, + + pub fn initCapacity(allocator: Allocator, capacity: u32) HeapData { + return .{ + .len = 0, + .ptr = (allocator.alloc(T, capacity) catch bun.outOfMemory()).ptr, + }; + } + }; + + const This = @This(); + + pub fn parse(input: *Parser) Result(@This()) { + const parseFn = comptime voidWrap(T, generic.parseFor(T)); + var values: @This() = .{}; + while (true) { + input.skipWhitespace(); + switch (input.parseUntilBefore(Delimiters{ .comma = true }, T, {}, parseFn)) { + .result => |v| { + values.append(input.allocator(), v); + }, + .err => |e| return .{ .err = e }, + } + switch (input.next()) { + .err => return .{ .result = values }, + .result => |t| { + if (t.* == .comma) continue; + std.debug.panic("Expected a comma", .{}); + }, + } + } + unreachable; + } + + pub fn toCss(this: *const @This(), comptime W: type, dest: *Printer(W)) PrintErr!void { + const length = this.len(); + for (this.slice(), 0..) |*val, idx| { + try val.toCss(W, dest); + if (idx < length - 1) { + try dest.delim(',', false); + } + } + } + + pub fn withOne(val: T) @This() { + var ret = This{}; + ret.capacity = 1; + ret.data.inlined[0] = val; + return ret; + } + + pub inline fn at(this: *const @This(), idx: u32) *const T { + return &this.as_const_ptr()[idx]; + } + + pub inline fn mut(this: *@This(), idx: u32) *T { + return &this.as_ptr()[idx]; + } + + pub inline fn last(this: *const @This()) ?*const T { + const sl = this.slice(); + if (sl.len == 0) return null; + return &sl[sl.len - 1]; + } + + pub inline fn toOwnedSlice(this: *const @This(), allocator: Allocator) []T { + if (this.spilled()) return this.data.heap.ptr[0..this.data.heap.len]; + return allocator.dupe(T, this.data.inlined[0..this.capacity]) catch bun.outOfMemory(); + } + + /// NOTE: If this is inlined then this will refer to stack memory, if + /// need it to be stable then you should use `.toOwnedSlice()` + pub inline fn slice(this: *const @This()) []const T { + if (this.capacity > N) return this.data.heap.ptr[0..this.data.heap.len]; + return this.data.inlined[0..this.capacity]; + } + + /// NOTE: If this is inlined then this will refer to stack memory, if + /// need it to be stable then you should use `.toOwnedSlice()` + pub inline fn slice_mut(this: *@This()) []T { + if (this.capacity > N) return this.data.heap.ptr[0..this.data.heap.len]; + return this.data.inlined[0..this.capacity]; + } + + pub fn isCompatible(this: *const @This(), browsers: css.targets.Browsers) bool { + for (this.slice()) |*v| { + if (!v.isCompatible(browsers)) return false; + } + return true; + } + + /// For this function to be called the T here must implement the ImageFallback interface + pub fn getFallbacks(this: *@This(), allocator: Allocator, targets: css.targets.Targets) getFallbacksReturnType(T, N) { + // Implements ImageFallback interface + if (@hasDecl(T, "getImage") and N == 1) { + const ColorFallbackKind = css.css_values.color.ColorFallbackKind; + // Determine what vendor prefixes and color fallbacks are needed. + var prefixes = css.VendorPrefix.empty(); + var fallbacks = ColorFallbackKind.empty(); + var res: bun.BabyList(@This()) = .{}; + for (this.slice()) |*item| { + prefixes.insert(item.getImage().getNecessaryPrefixes(targets)); + fallbacks.insert(item.getNecessaryFallbacks(targets)); + } + + // Get RGB fallbacks if needed. + const rgb: ?SmallList(T, 1) = if (fallbacks.contains(ColorFallbackKind{ .rgb = true })) brk: { + var shallow_clone = this.shallowClone(allocator); + for (shallow_clone.slice_mut(), this.slice_mut()) |*out, *in| { + out.* = in.getFallback(allocator, ColorFallbackKind{ .rgb = true }); + } + break :brk shallow_clone; + } else null; + + // Prefixed properties only support RGB. + const prefix_images: *const SmallList(T, 1) = if (rgb) |*r| r else this; + + // Legacy -webkit-gradient() + if (prefixes.contains(css.VendorPrefix{ .webkit = true }) and targets.browsers != null and css.prefixes.Feature.isWebkitGradient(targets.browsers.?)) { + const images = images: { + var images = SmallList(T, 1){}; + for (prefix_images.slice()) |*item| { + if (item.getImage().getLegacyWebkit(allocator)) |img| { + images.append(allocator, item.withImage(allocator, img)); + } + } + break :images images; + }; + if (!images.isEmpty()) { + res.push(allocator, images) catch bun.outOfMemory(); + } + } + + const prefix = struct { + pub inline fn helper(comptime prefix: []const u8, pfs: *css.VendorPrefix, pfi: *const SmallList(T, 1), r: *bun.BabyList(This), alloc: Allocator) void { + if (pfs.contains(css.VendorPrefix.fromName(prefix))) { + var images = SmallList(T, 1).initCapacity(alloc, pfi.len()); + for (images.slice_mut(), pfi.slice()) |*out, *in| { + const image = in.getImage().getPrefixed(alloc, css.VendorPrefix.fromName(prefix)); + out.* = in.withImage(alloc, image); + } + r.push(alloc, images) catch bun.outOfMemory(); + } + } + }.helper; + + prefix("webkit", &prefixes, prefix_images, &res, allocator); + prefix("moz", &prefixes, prefix_images, &res, allocator); + prefix("o", &prefixes, prefix_images, &res, allocator); + + if (prefixes.contains(css.VendorPrefix{ .none = true })) { + if (rgb) |r| { + res.push(allocator, r) catch bun.outOfMemory(); + } + + if (fallbacks.contains(ColorFallbackKind{ .p3 = true })) { + var p3_images = this.shallowClone(allocator); + for (p3_images.slice_mut(), this.slice_mut()) |*out, *in| { + out.* = in.getFallback(allocator, ColorFallbackKind{ .p3 = true }); + } + } + + // Convert to lab if needed (e.g. if oklab is not supported but lab is). + if (fallbacks.contains(ColorFallbackKind{ .lab = true })) { + for (this.slice_mut()) |*item| { + var old = item.*; + item.* = item.getFallback(allocator, ColorFallbackKind{ .lab = true }); + old.deinit(allocator); + } + } + } else if (res.popOrNull()) |the_last| { + var old = this.*; + // Prefixed property with no unprefixed version. + // Replace self with the last prefixed version so that it doesn't + // get duplicated when the caller pushes the original value. + this.* = the_last; + old.deinit(allocator); + } + return res; + } + if (T == TextShadow and N == 1) { + var fallbacks = css.ColorFallbackKind.empty(); + for (this.slice()) |*shadow| { + fallbacks.insert(shadow.color.getNecessaryFallbacks(targets)); + } + + var res = SmallList(SmallList(TextShadow, 1), 2){}; + if (fallbacks.contains(css.ColorFallbackKind{ .rgb = true })) { + var rgb = SmallList(TextShadow, 1).initCapacity(allocator, this.len()); + for (this.slice()) |*shadow| { + var new_shadow = shadow.*; + // dummy non-alloced color to avoid deep cloning the real one since we will replace it + new_shadow.color = .current_color; + new_shadow = new_shadow.deepClone(allocator); + new_shadow.color = shadow.color.toRGB(allocator).?; + rgb.appendAssumeCapacity(new_shadow); + } + res.append(allocator, rgb); + } + + if (fallbacks.contains(css.ColorFallbackKind{ .p3 = true })) { + var p3 = SmallList(TextShadow, 1).initCapacity(allocator, this.len()); + for (this.slice()) |*shadow| { + var new_shadow = shadow.*; + // dummy non-alloced color to avoid deep cloning the real one since we will replace it + new_shadow.color = .current_color; + new_shadow = new_shadow.deepClone(allocator); + new_shadow.color = shadow.color.toP3(allocator).?; + p3.appendAssumeCapacity(new_shadow); + } + res.append(allocator, p3); + } + + if (fallbacks.contains(css.ColorFallbackKind{ .lab = true })) { + for (this.slice_mut()) |*shadow| { + const out = shadow.color.toLAB(allocator).?; + shadow.color.deinit(allocator); + shadow.color = out; + } + } + + return res; + } + @compileError("Dunno what to do here."); + } + + fn getFallbacksReturnType(comptime Type: type, comptime InlineSize: comptime_int) type { + // Implements ImageFallback interface + if (@hasDecl(Type, "getImage") and InlineSize == 1) { + return bun.BabyList(SmallList(Type, 1)); + } + if (Type == TextShadow and InlineSize == 1) { + return SmallList(SmallList(TextShadow, 1), 2); + } + @compileError("Unhandled for: " ++ @typeName(Type)); + } + + // TODO: remove this stupid function + pub fn map(this: *@This(), comptime func: anytype) void { + for (this.slice_mut()) |*item| { + func(item); + } + } + + /// `predicate` must be: `fn(*const T) bool` + pub fn any(this: *const @This(), comptime predicate: anytype) bool { + for (this.slice()) |*item| { + if (predicate(item)) return true; + } + return false; + } + + pub fn orderedRemove(this: *@This(), idx: u32) T { + var ptr, const len_ptr, const capp = this.tripleMut(); + _ = capp; // autofix + bun.assert(idx < len_ptr.*); + + const length = len_ptr.*; + + len_ptr.* = len_ptr.* - 1; + ptr += idx; + const item = ptr[0]; + std.mem.copyForwards(T, ptr[0 .. length - idx - 1], ptr[1..][0 .. length - idx - 1]); + + return item; + } + + pub fn swapRemove(this: *@This(), idx: u32) T { + var ptr, const len_ptr, const capp = this.tripleMut(); + _ = capp; // autofix + bun.assert(idx < len_ptr.*); + + const ret = ptr[idx]; + ptr[idx] = ptr[len_ptr.* -| 1]; + len_ptr.* = len_ptr.* - 1; + + return ret; + } + + pub fn clearRetainingCapacity(this: *@This()) void { + if (this.spilled()) { + this.data.heap.len = 0; + } else { + this.capacity = 0; + } + } + + pub fn shallowClone(this: *const @This(), allocator: Allocator) @This() { + if (!this.spilled()) return this.*; + var h = HeapData.initCapacity(allocator, this.capacity); + @memcpy(h.ptr[0..this.capacity], this.data.heap.ptr[0..this.capacity]); + return .{ + .capacity = this.capacity, + .data = .{ .heap = h }, + }; + } + + pub fn deepClone(this: *const @This(), allocator: Allocator) @This() { + var ret: @This() = .{}; + ret.appendSlice(allocator, this.slice()); + for (ret.slice_mut()) |*item| { + item.* = generic.deepClone(T, item, allocator); + } + return ret; + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + if (lhs.len() != rhs.len()) return false; + for (lhs.slice(), rhs.slice()) |*a, *b| { + if (!generic.eql(T, a, b)) return false; + } + return true; + } + + /// Shallow clone + pub fn clone(this: *const @This(), allocator: Allocator) @This() { + var ret = this.*; + if (!this.spilled()) return ret; + ret.data.heap.ptr = (allocator.dupe(T, ret.data.heap.ptr[0..ret.data.heap.len]) catch bun.outOfMemory()).ptr; + return ret; + } + + pub fn deinit(this: *@This(), allocator: Allocator) void { + if (this.spilled()) { + allocator.free(this.data.heap.ptr[0..this.data.heap.len]); + } + } + + pub fn hash(this: *const @This(), hasher: anytype) void { + for (this.slice()) |*item| { + css.generic.hash(T, item, hasher); + } + } + + pub inline fn len(this: *const @This()) u32 { + if (this.spilled()) return this.data.heap.len; + return this.capacity; + } + + pub inline fn isEmpty(this: *const @This()) bool { + return this.len() == 0; + } + + pub fn initCapacity(allocator: Allocator, capacity: u32) @This() { + if (capacity > N) { + var list: This = .{}; + list.capacity = capacity; + list.data = .{ .heap = HeapData.initCapacity(allocator, capacity) }; + return list; + } + + return .{ + .capacity = 0, + }; + } + + pub fn ensureTotalCapacity(this: *@This(), allocator: Allocator, new_capacity: u32) void { + if (this.capacity >= new_capacity) return; + this.tryGrow(allocator, new_capacity); + } + + pub fn insert( + this: *@This(), + allocator: Allocator, + index: u32, + item: T, + ) void { + var ptr, var len_ptr, const capp = this.tripleMut(); + if (len_ptr.* == capp) { + this.reserveOneUnchecked(allocator); + const heap_ptr, const heap_len_ptr = this.heap(); + ptr = heap_ptr; + len_ptr = heap_len_ptr; + } + const length = len_ptr.*; + ptr += index; + if (index < length) { + const count = length - index; + std.mem.copyBackwards(T, ptr[1..][0..count], ptr[0..count]); + } else if (index == length) { + // No elements need shifting. + } else { + @panic("index exceeds length"); + } + len_ptr.* = length + 1; + ptr[0] = item; + } + + pub fn appendAssumeCapacity(this: *@This(), item: T) void { + var ptr, const len_ptr, const capp = this.tripleMut(); + bun.debugAssert(len_ptr.* < capp); + ptr[len_ptr.*] = item; + len_ptr.* += 1; + } + + pub fn append(this: *@This(), allocator: Allocator, item: T) void { + var ptr, var len_ptr, const capp = this.tripleMut(); + if (len_ptr.* == capp) { + this.reserveOneUnchecked(allocator); + const heap_ptr, const heap_len = this.heap(); + ptr = heap_ptr; + len_ptr = heap_len; + } + ptr[len_ptr.*] = item; + len_ptr.* += 1; + } + + pub fn appendSlice(this: *@This(), allocator: Allocator, items: []const T) void { + this.insertSlice(allocator, this.len(), items); + } + + pub fn insertSlice(this: *@This(), allocator: Allocator, index: u32, items: []const T) void { + this.reserve(allocator, @intCast(items.len)); + + const length = this.len(); + bun.assert(index <= length); + const ptr: [*]T = this.as_ptr()[index..]; + const count = length - index; + std.mem.copyBackwards(T, ptr[items.len..][0..count], ptr[0..count]); + @memcpy(ptr[0..items.len], items); + this.setLen(length + @as(u32, @intCast(items.len))); + } + + pub fn setLen(this: *@This(), new_len: u32) void { + const len_ptr = this.lenMut(); + len_ptr.* = new_len; + } + + inline fn heap(this: *@This()) struct { [*]T, *u32 } { + return .{ this.data.heap.ptr, &this.data.heap.len }; + } + + fn as_const_ptr(this: *const @This()) [*]const T { + if (this.spilled()) return this.data.heap.ptr; + return &this.data.inlined; + } + + fn as_ptr(this: *@This()) [*]T { + if (this.spilled()) return this.data.heap.ptr; + return &this.data.inlined; + } + + fn reserve(this: *@This(), allocator: Allocator, additional: u32) void { + const ptr, const __len, const capp = this.tripleMut(); + _ = ptr; // autofix + const len_ = __len.*; + + if (capp - len_ >= additional) return; + const new_cap = growCapacity(capp, len_ + additional); + this.tryGrow(allocator, new_cap); + } + + fn reserveOneUnchecked(this: *@This(), allocator: Allocator) void { + @setCold(true); + bun.assert(this.len() == this.capacity); + const new_cap = growCapacity(this.capacity, this.len() + 1); + this.tryGrow(allocator, new_cap); + } + + fn tryGrow(this: *@This(), allocator: Allocator, new_cap: u32) void { + const unspilled = !this.spilled(); + const ptr, const __len, const cap = this.tripleMut(); + const length = __len.*; + bun.assert(new_cap >= length); + if (new_cap <= N) { + if (unspilled) return; + this.data = .{ .inlined = undefined }; + @memcpy(ptr[0..length], this.data.inlined[0..length]); + this.capacity = length; + allocator.free(ptr[0..length]); + } else if (new_cap != cap) { + const new_alloc: [*]T = if (unspilled) new_alloc: { + const new_alloc = allocator.alloc(T, new_cap) catch bun.outOfMemory(); + @memcpy(new_alloc[0..length], ptr[0..length]); + break :new_alloc new_alloc.ptr; + } else new_alloc: { + break :new_alloc (allocator.realloc(ptr[0..length], new_cap * @sizeOf(T)) catch bun.outOfMemory()).ptr; + }; + this.data = .{ .heap = .{ .ptr = new_alloc, .len = length } }; + this.capacity = new_cap; + } + } + + /// Returns a tuple with (data ptr, len, capacity) + /// Useful to get all SmallVec properties with a single check of the current storage variant. + inline fn tripleMut(this: *@This()) struct { [*]T, *u32, u32 } { + if (this.spilled()) return .{ this.data.heap.ptr, &this.data.heap.len, this.capacity }; + return .{ &this.data.inlined, &this.capacity, N }; + } + + inline fn lenMut(this: *@This()) *u32 { + if (this.spilled()) return &this.data.heap.len; + return &this.capacity; + } + + fn growToHeap(this: *@This(), allocator: Allocator, additional: usize) void { + bun.assert(!this.spilled()); + const new_size = growCapacity(this.capacity, this.capacity + additional); + var slc = allocator.alloc(T, new_size) catch bun.outOfMemory(); + @memcpy(slc[0..this.capacity], this.data.inlined[0..this.capacity]); + this.data = .{ .heap = HeapData{ .len = this.capacity, .ptr = slc.ptr } }; + this.capacity = new_size; + } + + inline fn spilled(this: *const @This()) bool { + return this.capacity > N; + } + + /// Copy pasted from Zig std in array list: + /// + /// Called when memory growth is necessary. Returns a capacity larger than + /// minimum that grows super-linearly. + fn growCapacity(current: u32, minimum: u32) u32 { + var new = current; + while (true) { + new +|= new / 2 + 8; + if (new >= minimum) + return new; + } + } + }; +} diff --git a/src/css/sourcemap.zig b/src/css/sourcemap.zig new file mode 100644 index 0000000000..57f5cab30f --- /dev/null +++ b/src/css/sourcemap.zig @@ -0,0 +1,36 @@ +const std = @import("std"); +const Allocator = std.mem.Allocator; +const bun = @import("root").bun; +const logger = bun.logger; +const Log = logger.Log; + +pub const css = @import("./css_parser.zig"); +pub const css_values = @import("./values/values.zig"); +const DashedIdent = css_values.ident.DashedIdent; +const Ident = css_values.ident.Ident; +pub const Error = css.Error; +const Location = css.Location; +const ArrayList = std.ArrayListUnmanaged; + +pub const SourceMap = struct { + project_root: []const u8, + inner: SourceMapInner, +}; + +pub const SourceMapInner = struct { + sources: ArrayList([]const u8), + sources_content: ArrayList([]const u8), + names: ArrayList([]const u8), + mapping_lines: ArrayList(MappingLine), +}; + +pub const MappingLine = struct { mappings: ArrayList(LineMapping), last_column: u32, is_sorted: bool }; + +pub const LineMapping = struct { generated_column: u32, original: ?OriginalLocation }; + +pub const OriginalLocation = struct { + original_line: u32, + original_column: u32, + source: u32, + name: ?u32, +}; diff --git a/src/css/targets.zig b/src/css/targets.zig new file mode 100644 index 0000000000..ab720f8304 --- /dev/null +++ b/src/css/targets.zig @@ -0,0 +1,135 @@ +const std = @import("std"); +const bun = @import("root").bun; +const Allocator = std.mem.Allocator; + +pub const css = @import("./css_parser.zig"); + +const Printer = css.Printer; +const PrintErr = css.PrintErr; +const VendorPrefix = css.VendorPrefix; + +/// Target browsers and features to compile. +pub const Targets = struct { + /// Browser targets to compile the CSS for. + browsers: ?Browsers = null, + /// Features that should always be compiled, even when supported by targets. + include: Features = .{}, + /// Features that should never be compiled, even when unsupported by targets. + exclude: Features = .{}, + + pub fn prefixes(this: *const Targets, prefix: css.VendorPrefix, feature: css.prefixes.Feature) css.VendorPrefix { + if (prefix.contains(css.VendorPrefix{ .none = true }) and !this.exclude.contains(css.targets.Features{ .vendor_prefixes = true })) { + if (this.include.contains(css.targets.Features{ .vendor_prefixes = true })) { + return css.VendorPrefix.all(); + } else { + return if (this.browsers) |b| feature.prefixesFor(b) else prefix; + } + } else { + return prefix; + } + } + + pub fn shouldCompileLogical(this: *const Targets, feature: css.compat.Feature) bool { + return this.shouldCompile(feature, css.Features{ .logical_properties = true }); + } + + pub fn shouldCompile(this: *const Targets, feature: css.compat.Feature, flag: Features) bool { + return this.include.contains(flag) or (!this.exclude.contains(flag) and !this.isCompatible(feature)); + } + + pub fn shouldCompileSame(this: *const Targets, comptime prop: @Type(.EnumLiteral)) bool { + const compat_feature: css.compat.Feature = prop; + const target_feature: css.targets.Features = target_feature: { + var feature: css.targets.Features = .{}; + @field(feature, @tagName(prop)) = true; + break :target_feature feature; + }; + + return shouldCompile(this, compat_feature, target_feature); + } + + pub fn shouldCompileSelectors(this: *const Targets) bool { + return this.include.intersects(Features.selectors) or + (!this.exclude.intersects(Features.selectors) and this.browsers != null); + } + + pub fn isCompatible(this: *const Targets, feature: css.compat.Feature) bool { + if (this.browsers) |*targets| { + return feature.isCompatible(targets.*); + } + return true; + } +}; + +/// Autogenerated by build-prefixes.js +/// +/// Browser versions to compile CSS for. +/// +/// Versions are represented as a single 24-bit integer, with one byte +/// per `major.minor.patch` component. +/// +/// # Example +/// +/// This example represents a target of Safari 13.2.0. +/// +/// ``` +/// const Browsers = struct { +/// safari: ?u32 = (13 << 16) | (2 << 8), +/// ..Browsers{} +/// }; +/// ``` +pub const Browsers = struct { + android: ?u32 = null, + chrome: ?u32 = null, + edge: ?u32 = null, + firefox: ?u32 = null, + ie: ?u32 = null, + ios_saf: ?u32 = null, + opera: ?u32 = null, + safari: ?u32 = null, + samsung: ?u32 = null, + pub usingnamespace BrowsersImpl(@This()); +}; + +/// Autogenerated by build-prefixes.js +/// Features to explicitly enable or disable. +pub const Features = packed struct(u32) { + nesting: bool = false, + not_selector_list: bool = false, + dir_selector: bool = false, + lang_selector_list: bool = false, + is_selector: bool = false, + text_decoration_thickness_percent: bool = false, + media_interval_syntax: bool = false, + media_range_syntax: bool = false, + custom_media_queries: bool = false, + clamp_function: bool = false, + color_function: bool = false, + oklab_colors: bool = false, + lab_colors: bool = false, + p3_colors: bool = false, + hex_alpha_colors: bool = false, + space_separated_color_notation: bool = false, + font_family_system_ui: bool = false, + double_position_gradients: bool = false, + vendor_prefixes: bool = false, + logical_properties: bool = false, + __unused: u12 = 0, + + pub const selectors = Features.fromNames(&.{ "nesting", "not_selector_list", "dir_selector", "lang_selector_list", "is_selector" }); + pub const media_queries = Features.fromNames(&.{ "media_interval_syntax", "media_range_syntax", "custom_media_queries" }); + pub const colors = Features.fromNames(&.{ "color_function", "oklab_colors", "lab_colors", "p3_colors", "hex_alpha_colors", "space_separated_color_notation" }); + + pub usingnamespace css.Bitflags(@This()); + pub usingnamespace FeaturesImpl(@This()); +}; + +pub fn BrowsersImpl(comptime T: type) type { + _ = T; // autofix + return struct {}; +} + +pub fn FeaturesImpl(comptime T: type) type { + _ = T; // autofix + return struct {}; +} diff --git a/src/css/values/alpha.zig b/src/css/values/alpha.zig new file mode 100644 index 0000000000..531e718b52 --- /dev/null +++ b/src/css/values/alpha.zig @@ -0,0 +1,63 @@ +const std = @import("std"); +const bun = @import("root").bun; +pub const css = @import("../css_parser.zig"); +const Result = css.Result; +const ArrayList = std.ArrayListUnmanaged; +const Printer = css.Printer; +const PrintErr = css.PrintErr; +const CSSNumber = css.css_values.number.CSSNumber; +const CSSNumberFns = css.css_values.number.CSSNumberFns; +const Calc = css.css_values.calc.Calc; +const DimensionPercentage = css.css_values.percentage.DimensionPercentage; +const LengthPercentage = css.css_values.length.LengthPercentage; +const Length = css.css_values.length.Length; +const Percentage = css.css_values.percentage.Percentage; +const CssColor = css.css_values.color.CssColor; +const Image = css.css_values.image.Image; +const Url = css.css_values.url.Url; +const CSSInteger = css.css_values.number.CSSInteger; +const CSSIntegerFns = css.css_values.number.CSSIntegerFns; +const Angle = css.css_values.angle.Angle; +const Time = css.css_values.time.Time; +const Resolution = css.css_values.resolution.Resolution; +const CustomIdent = css.css_values.ident.CustomIdent; +const CustomIdentFns = css.css_values.ident.CustomIdentFns; +const Ident = css.css_values.ident.Ident; +const NumberOrPercentage = css.css_values.percentage.NumberOrPercentage; + +/// A CSS [``](https://www.w3.org/TR/css-color-4/#typedef-alpha-value), +/// used to represent opacity. +/// +/// Parses either a `` or ``, but is always stored and serialized as a number. +pub const AlphaValue = struct { + v: f32, + + pub fn parse(input: *css.Parser) Result(AlphaValue) { + // For some reason NumberOrPercentage.parse makes zls crash, using this instead. + const val: NumberOrPercentage = switch (@call(.auto, @field(NumberOrPercentage, "parse"), .{input})) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + const final = switch (val) { + .percentage => |percent| AlphaValue{ .v = percent.v }, + .number => |num| AlphaValue{ .v = num }, + }; + return .{ .result = final }; + } + + pub fn toCss(this: *const AlphaValue, comptime W: type, dest: *css.Printer(W)) css.PrintErr!void { + return CSSNumberFns.toCss(&this.v, W, dest); + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub fn hash(this: *const @This(), hasher: *std.hash.Wyhash) void { + return css.implementHash(@This(), this, hasher); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } +}; diff --git a/src/css/values/angle.zig b/src/css/values/angle.zig new file mode 100644 index 0000000000..ee207c553a --- /dev/null +++ b/src/css/values/angle.zig @@ -0,0 +1,302 @@ +const std = @import("std"); +const bun = @import("root").bun; +pub const css = @import("../css_parser.zig"); +const Result = css.Result; +const ArrayList = std.ArrayListUnmanaged; +const Printer = css.Printer; +const PrintErr = css.PrintErr; +const CSSNumber = css.css_values.number.CSSNumber; +const CSSNumberFns = css.css_values.number.CSSNumberFns; +const Calc = css.css_values.calc.Calc; + +const Tag = enum(u8) { + deg = 1, + rad = 2, + grad = 4, + turn = 8, +}; + +/// A CSS [``](https://www.w3.org/TR/css-values-4/#angles) value. +/// +/// Angles may be explicit or computed by `calc()`, but are always stored and serialized +/// as their computed value. +pub const Angle = union(Tag) { + /// An angle in degrees. There are 360 degrees in a full circle. + deg: CSSNumber, + /// An angle in radians. There are 2π radians in a full circle. + rad: CSSNumber, + /// An angle in gradians. There are 400 gradians in a full circle. + grad: CSSNumber, + /// An angle in turns. There is 1 turn in a full circle. + turn: CSSNumber, + + // ~toCssImpl + const This = @This(); + + pub fn parse(input: *css.Parser) Result(Angle) { + return Angle.parseInternal(input, false); + } + + fn parseInternal(input: *css.Parser, allow_unitless_zero: bool) Result(Angle) { + if (input.tryParse(Calc(Angle).parse, .{}).asValue()) |calc_value| { + if (calc_value == .value) return .{ .result = calc_value.value.* }; + // Angles are always compatible, so they will always compute to a value. + return .{ .err = input.newCustomError(css.ParserError.invalid_value) }; + } + + const location = input.currentSourceLocation(); + const token = switch (input.next()) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + switch (token.*) { + .dimension => |*dim| { + const value = dim.num.value; + const unit = dim.unit; + // todo_stuff.match_ignore_ascii_case + if (bun.strings.eqlCaseInsensitiveASCIIICheckLength("deg", unit)) { + return .{ .result = Angle{ .deg = value } }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength("grad", unit)) { + return .{ .result = Angle{ .grad = value } }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength("turn", unit)) { + return .{ .result = Angle{ .turn = value } }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength("rad", unit)) { + return .{ .result = Angle{ .rad = value } }; + } else { + return .{ .err = location.newUnexpectedTokenError(token.*) }; + } + }, + .number => |num| { + if (num.value == 0.0 and allow_unitless_zero) return .{ .result = Angle.zero() }; + }, + else => {}, + } + return .{ .err = location.newUnexpectedTokenError(token.*) }; + } + + pub fn parseWithUnitlessZero(input: *css.Parser) Result(Angle) { + return Angle.parseInternal(input, true); + } + + pub fn toCss(this: *const This, comptime W: type, dest: *Printer(W)) PrintErr!void { + const value, const unit = switch (this.*) { + .deg => |val| .{ val, "deg" }, + .grad => |val| .{ val, "grad" }, + .rad => |val| brk: { + const deg = this.toDegrees(); + + // We print 5 digits of precision by default. + // Switch to degrees if there are an even number of them. + if (css.fract(std.math.round(deg * 100000.0)) == 0) { + break :brk .{ val, "deg" }; + } else { + break :brk .{ val, "rad" }; + } + }, + .turn => |val| .{ val, "turn" }, + }; + css.serializer.serializeDimension(value, unit, W, dest) catch return dest.addFmtError(); + } + + pub fn toCssWithUnitlessZero(this: *const This, comptime W: type, dest: *Printer(W)) PrintErr!void { + if (this.isZero()) { + const v: f32 = 0.0; + try CSSNumberFns.toCss(&v, W, dest); + } else { + return this.toCss(W, dest); + } + } + + pub fn tryFromAngle(angle: Angle) ?This { + return angle; + } + + pub fn tryFromToken(token: *const css.Token) css.Maybe(Angle, void) { + if (token.* == .dimension) { + const value = token.dimension.num.value; + const unit = token.dimension.unit; + if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(unit, "deg")) { + return .{ .result = .{ .deg = value } }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(unit, "grad")) { + return .{ .result = .{ .grad = value } }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(unit, "turn")) { + return .{ .result = .{ .turn = value } }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(unit, "rad")) { + return .{ .result = .{ .rad = value } }; + } + } + return .{ .err = {} }; + } + + /// Returns the angle in radians. + pub fn toRadians(this: *const Angle) CSSNumber { + const RAD_PER_DEG: f32 = std.math.pi / 180.0; + return switch (this.*) { + .deg => |deg| return deg * RAD_PER_DEG, + .rad => |rad| return rad, + .grad => |grad| return grad * 180.0 / 200.0 * RAD_PER_DEG, + .turn => |turn| return turn * 360.0 * RAD_PER_DEG, + }; + } + + /// Returns the angle in degrees. + pub fn toDegrees(this: *const Angle) CSSNumber { + const DEG_PER_RAD: f32 = 180.0 / std.math.pi; + switch (this.*) { + .deg => |deg| return deg, + .rad => |rad| return rad * DEG_PER_RAD, + .grad => |grad| return grad * 180.0 / 200.0, + .turn => |turn| return turn * 360.0, + } + } + + pub fn zero() Angle { + return .{ .deg = 0.0 }; + } + + pub fn isZero(this: *const Angle) bool { + const v = switch (this.*) { + .deg => |deg| deg, + .rad => |rad| rad, + .grad => |grad| grad, + .turn => |turn| turn, + }; + return v == 0.0; + } + + pub fn intoCalc(this: *const Angle, allocator: std.mem.Allocator) Calc(Angle) { + return Calc(Angle){ + .value = bun.create(allocator, Angle, this.*), + }; + } + + pub fn map(this: *const Angle, comptime opfn: *const fn (f32) f32) Angle { + return switch (this.*) { + .deg => |deg| .{ .deg = opfn(deg) }, + .rad => |rad| .{ .rad = opfn(rad) }, + .grad => |grad| .{ .grad = opfn(grad) }, + .turn => |turn| .{ .turn = opfn(turn) }, + }; + } + + pub fn tryMap(this: *const Angle, comptime opfn: *const fn (f32) f32) ?Angle { + return map(this, opfn); + } + + pub fn add(this: Angle, rhs: Angle) Angle { + const addfn = struct { + pub fn add(_: void, a: f32, b: f32) f32 { + return a + b; + } + }; + return Angle.op(&this, &rhs, {}, addfn.add); + } + + pub fn tryAdd(this: *const Angle, _: std.mem.Allocator, rhs: *const Angle) ?Angle { + return .{ .deg = this.toDegrees() + rhs.toDegrees() }; + } + + pub fn eql(lhs: *const Angle, rhs: *const Angle) bool { + return lhs.toDegrees() == rhs.toDegrees(); + } + + pub fn mulF32(this: Angle, _: std.mem.Allocator, other: f32) Angle { + // return Angle.op(&this, &other, Angle.mulF32); + return switch (this) { + .deg => |v| .{ .deg = v * other }, + .rad => |v| .{ .rad = v * other }, + .grad => |v| .{ .grad = v * other }, + .turn => |v| .{ .turn = v * other }, + }; + } + + pub fn partialCmp(this: *const Angle, other: *const Angle) ?std.math.Order { + return css.generic.partialCmpF32(&this.toDegrees(), &other.toDegrees()); + } + + pub fn tryOp( + this: *const Angle, + other: *const Angle, + ctx: anytype, + comptime op_fn: *const fn (@TypeOf(ctx), a: f32, b: f32) f32, + ) ?Angle { + return Angle.op(this, other, ctx, op_fn); + } + + pub fn tryOpTo( + this: *const Angle, + other: *const Angle, + comptime R: type, + ctx: anytype, + comptime op_fn: *const fn (@TypeOf(ctx), a: f32, b: f32) R, + ) ?R { + return Angle.opTo(this, other, R, ctx, op_fn); + } + + pub fn op( + this: *const Angle, + other: *const Angle, + ctx: anytype, + comptime op_fn: *const fn (@TypeOf(ctx), a: f32, b: f32) f32, + ) Angle { + // PERF: not sure if this is faster + const self_tag: u8 = @intFromEnum(this.*); + const other_tag: u8 = @intFromEnum(this.*); + const DEG: u8 = @intFromEnum(Tag.deg); + const GRAD: u8 = @intFromEnum(Tag.grad); + const RAD: u8 = @intFromEnum(Tag.rad); + const TURN: u8 = @intFromEnum(Tag.turn); + + const switch_val: u8 = self_tag | other_tag; + return switch (switch_val) { + DEG | DEG => Angle{ .deg = op_fn(ctx, this.deg, other.deg) }, + RAD | RAD => Angle{ .rad = op_fn(ctx, this.rad, other.rad) }, + GRAD | GRAD => Angle{ .grad = op_fn(ctx, this.grad, other.grad) }, + TURN | TURN => Angle{ .turn = op_fn(ctx, this.turn, other.turn) }, + else => Angle{ .deg = op_fn(ctx, this.toDegrees(), other.toDegrees()) }, + }; + } + + pub fn opTo( + this: *const Angle, + other: *const Angle, + comptime T: type, + ctx: anytype, + comptime op_fn: *const fn (@TypeOf(ctx), a: f32, b: f32) T, + ) T { + // PERF: not sure if this is faster + const self_tag: u8 = @intFromEnum(this.*); + const other_tag: u8 = @intFromEnum(this.*); + const DEG: u8 = @intFromEnum(Tag.deg); + const GRAD: u8 = @intFromEnum(Tag.grad); + const RAD: u8 = @intFromEnum(Tag.rad); + const TURN: u8 = @intFromEnum(Tag.turn); + + const switch_val: u8 = self_tag | other_tag; + return switch (switch_val) { + DEG | DEG => op_fn(ctx, this.deg, other.deg), + RAD | RAD => op_fn(ctx, this.rad, other.rad), + GRAD | GRAD => op_fn(ctx, this.grad, other.grad), + TURN | TURN => op_fn(ctx, this.turn, other.turn), + else => op_fn(ctx, this.toDegrees(), other.toDegrees()), + }; + } + + pub fn sign(this: *const Angle) f32 { + return switch (this.*) { + .deg, .rad, .grad, .turn => |v| CSSNumberFns.sign(&v), + }; + } + + pub fn hash(this: *const @This(), hasher: *std.hash.Wyhash) void { + return css.implementHash(@This(), this, hasher); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } +}; + +/// A CSS [``](https://www.w3.org/TR/css-values-4/#typedef-angle-percentage) value. +/// May be specified as either an angle or a percentage that resolves to an angle. +pub const AnglePercentage = css.css_values.percentage.DimensionPercentage(Angle); diff --git a/src/css/values/calc.zig b/src/css/values/calc.zig new file mode 100644 index 0000000000..cf4d213738 --- /dev/null +++ b/src/css/values/calc.zig @@ -0,0 +1,1850 @@ +const std = @import("std"); +const bun = @import("root").bun; +const Allocator = std.mem.Allocator; +pub const css = @import("../css_parser.zig"); +const Result = css.Result; +const ArrayList = std.ArrayListUnmanaged; +const Printer = css.Printer; +const PrintErr = css.PrintErr; +const Angle = css.css_values.angle.Angle; +const Length = css.css_values.length.Length; +const LengthValue = css.css_values.length.LengthValue; +const Percentage = css.css_values.percentage.Percentage; +const DimensionPercentage = css.css_values.percentage.DimensionPercentage; +const Time = css.css_values.time.Time; + +const CSSNumber = css.css_values.number.CSSNumber; +const CSSNumberFns = css.css_values.number.CSSNumberFns; + +const eql = css.generic.eql; +const deepClone = css.deepClone; + +pub fn needsDeinit(comptime V: type) bool { + return switch (V) { + Length => true, + DimensionPercentage(Angle) => true, + DimensionPercentage(LengthValue) => true, + Percentage => false, + Angle => false, + Time => false, + f32 => false, + else => @compileError("Can't tell if " ++ @typeName(V) ++ " needs deinit, please add it to the switch statement."), + }; +} + +pub fn needsDeepclone(comptime V: type) bool { + return switch (V) { + Length => true, + DimensionPercentage(Angle) => true, + DimensionPercentage(LengthValue) => true, + Percentage => false, + Angle => false, + Time => false, + f32 => false, + else => @compileError("Can't tell if " ++ @typeName(V) ++ " needs deepclone, please add it to the switch statement."), + }; +} + +/// A mathematical expression used within the `calc()` function. +/// +/// This type supports generic value types. Values such as `Length`, `Percentage`, +/// `Time`, and `Angle` support `calc()` expressions. +pub fn Calc(comptime V: type) type { + const needs_deinit = needsDeinit(V); + const needs_deepclone = needsDeepclone(V); + + return union(Tag) { + /// A literal value. + /// PERF: this pointer feels unnecessary if V is small + value: *V, + /// A literal number. + number: CSSNumber, + /// A sum of two calc expressions. + sum: struct { + left: *Calc(V), + right: *Calc(V), + }, + /// A product of a number and another calc expression. + product: struct { + number: CSSNumber, + expression: *Calc(V), + }, + /// A math function, such as `calc()`, `min()`, or `max()`. + function: *MathFunction(V), + + const Tag = enum(u8) { + /// A literal value. + value = 1, + /// A literal number. + number = 2, + /// A sum of two calc expressions. + sum = 4, + /// A product of a number and another calc expression. + product = 8, + /// A math function, such as `calc()`, `min()`, or `max()`. + function = 16, + }; + + const This = @This(); + + pub fn deepClone(this: *const This, allocator: Allocator) This { + return switch (this.*) { + .value => |v| { + return .{ + .value = bun.create( + allocator, + V, + if (needs_deepclone) v.deepClone(allocator) else v.*, + ), + }; + }, + .number => this.*, + .sum => |sum| { + return .{ .sum = .{ + .left = bun.create(allocator, This, sum.left.deepClone(allocator)), + .right = bun.create(allocator, This, sum.right.deepClone(allocator)), + } }; + }, + .product => |product| { + return .{ + .product = .{ + .number = product.number, + .expression = bun.create(allocator, This, product.expression.deepClone(allocator)), + }, + }; + }, + .function => |function| { + return .{ + .function = bun.create( + allocator, + MathFunction(V), + function.deepClone(allocator), + ), + }; + }, + }; + } + + pub fn deinit(this: *This, allocator: Allocator) void { + return switch (this.*) { + .value => |v| { + if (comptime needs_deinit) { + v.deinit(allocator); + } + allocator.destroy(this.value); + }, + .number => {}, + .sum => |sum| { + sum.left.deinit(allocator); + sum.right.deinit(allocator); + allocator.destroy(sum.left); + allocator.destroy(sum.right); + }, + .product => |product| { + product.expression.deinit(allocator); + allocator.destroy(product.expression); + }, + .function => |function| { + function.deinit(allocator); + allocator.destroy(function); + }, + }; + } + + pub fn eql(this: *const @This(), other: *const @This()) bool { + return switch (this.*) { + .value => |a| return other.* == .value and css.generic.eql(V, a, other.value), + .number => |*a| return other.* == .number and css.generic.eql(f32, a, &other.number), + .sum => |s| return other.* == .sum and s.left.eql(other.sum.left) and s.right.eql(other.sum.right), + .product => |p| return other.* == .product and p.number == other.product.number and p.expression.eql(other.product.expression), + .function => |f| return other.* == .function and f.eql(other.function), + }; + } + + fn mulValueF32(lhs: V, allocator: Allocator, rhs: f32) V { + return switch (V) { + f32 => lhs * rhs, + else => lhs.mulF32(allocator, rhs), + }; + } + + // TODO: addValueOwned + fn addValue(allocator: Allocator, lhs: V, rhs: V) V { + return switch (V) { + f32 => return lhs + rhs, + Angle => return lhs.add(rhs), + // CSSNumber => return lhs.add(rhs), + Length => return lhs.add(allocator, rhs), + Percentage => return lhs.add(allocator, rhs), + Time => return lhs.add(allocator, rhs), + else => lhs.add(allocator, rhs), + }; + } + + // TODO: intoValueOwned + fn intoValue(this: @This(), allocator: std.mem.Allocator) V { + switch (V) { + Angle => return switch (this) { + .value => |v| v.*, + // TODO: give a better error message + else => bun.unreachablePanic("", .{}), + }, + CSSNumber => return switch (this) { + .value => |v| v.*, + .number => |n| n, + // TODO: give a better error message + else => bun.unreachablePanic("", .{}), + }, + Length => return Length{ + .calc = bun.create(allocator, Calc(Length), this), + }, + Percentage => return switch (this) { + .value => |v| v.*, + // TODO: give a better error message + else => bun.unreachablePanic("", .{}), + }, + Time => return switch (this) { + .value => |v| v.*, + // TODO: give a better error message + else => bun.unreachablePanic("", .{}), + }, + DimensionPercentage(LengthValue) => return DimensionPercentage(LengthValue){ .calc = bun.create( + allocator, + Calc(DimensionPercentage(LengthValue)), + this, + ) }, + DimensionPercentage(Angle) => return DimensionPercentage(Angle){ .calc = bun.create( + allocator, + Calc(DimensionPercentage(Angle)), + this, + ) }, + else => @compileError("Unimplemented, intoValue() for V = " ++ @typeName(V)), + } + } + + // TODO: change to addOwned() + pub fn add(this: @This(), allocator: std.mem.Allocator, rhs: @This()) @This() { + if (this == .value and rhs == .value) { + // PERF: we can reuse the allocation here + return .{ .value = bun.create(allocator, V, addValue(allocator, this.value.*, rhs.value.*)) }; + } else if (this == .number and rhs == .number) { + return .{ .number = this.number + rhs.number }; + } else if (this == .value) { + // PERF: we can reuse the allocation here + return .{ .value = bun.create(allocator, V, addValue(allocator, this.value.*, intoValue(rhs, allocator))) }; + } else if (rhs == .value) { + // PERF: we can reuse the allocation here + return .{ .value = bun.create(allocator, V, addValue(allocator, intoValue(this, allocator), rhs.value.*)) }; + } else if (this == .function) { + return This{ + .sum = .{ + .left = bun.create(allocator, This, this), + .right = bun.create(allocator, This, rhs), + }, + }; + } else if (rhs == .function) { + return This{ + .sum = .{ + .left = bun.create(allocator, This, this), + .right = bun.create(allocator, This, rhs), + }, + }; + } else { + return .{ .value = bun.create( + allocator, + V, + addValue(allocator, intoValue(this, allocator), intoValue(rhs, allocator)), + ) }; + } + } + + // TODO: users of this and `parseWith` don't need the pointer and often throwaway heap allocated values immediately + // use temp allocator or something? + pub fn parse(input: *css.Parser) Result(This) { + const Fn = struct { + pub fn parseWithFn(_: void, _: []const u8) ?This { + return null; + } + }; + return parseWith(input, {}, Fn.parseWithFn); + } + + const CalcUnit = enum { + abs, + acos, + asin, + atan, + atan2, + calc, + clamp, + cos, + exp, + hypot, + log, + max, + min, + mod, + pow, + rem, + round, + sign, + sin, + sqrt, + tan, + + pub const Map = bun.ComptimeEnumMap(CalcUnit); + }; + + pub fn parseWith( + input: *css.Parser, + ctx: anytype, + comptime parseIdent: *const fn (@TypeOf(ctx), []const u8) ?This, + ) Result(This) { + const location = input.currentSourceLocation(); + const f = switch (input.expectFunction()) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + + switch (CalcUnit.Map.getAnyCase(f) orelse return .{ .err = location.newUnexpectedTokenError(.{ .ident = f }) }) { + .calc => { + const Closure = struct { + ctx: @TypeOf(ctx), + pub fn parseNestedBlockFn(self: *@This(), i: *css.Parser) Result(This) { + return This.parseSum(i, self.ctx, parseIdent); + } + }; + var closure = Closure{ .ctx = ctx }; + const calc = switch (input.parseNestedBlock(This, &closure, Closure.parseNestedBlockFn)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + if (calc == .value or calc == .number) return .{ .result = calc }; + return .{ .result = Calc(V){ + .function = bun.create( + input.allocator(), + MathFunction(V), + MathFunction(V){ .calc = calc }, + ), + } }; + }, + .min => { + const Closure = struct { + ctx: @TypeOf(ctx), + pub fn parseNestedBlockFn(self: *@This(), i: *css.Parser) Result(ArrayList(This)) { + return i.parseCommaSeparatedWithCtx(This, self, @This().parseOne); + } + pub fn parseOne(self: *@This(), i: *css.Parser) Result(This) { + return This.parseSum(i, self.ctx, parseIdent); + } + }; + var closure = Closure{ .ctx = ctx }; + var reduced = switch (input.parseNestedBlock(ArrayList(This), &closure, Closure.parseNestedBlockFn)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + // PERF(alloc): i don't like this additional allocation + // can we use stack fallback here if the common case is that there will be 1 argument? + This.reduceArgs(input.allocator(), &reduced, std.math.Order.lt); + // var reduced: ArrayList(This) = This.reduceArgs(&args, std.math.Order.lt); + if (reduced.items.len == 1) { + defer reduced.deinit(input.allocator()); + return .{ .result = reduced.swapRemove(0) }; + } + return .{ .result = This{ + .function = bun.create( + input.allocator(), + MathFunction(V), + MathFunction(V){ .min = reduced }, + ), + } }; + }, + .max => { + const Closure = struct { + ctx: @TypeOf(ctx), + pub fn parseNestedBlockFn(self: *@This(), i: *css.Parser) Result(ArrayList(This)) { + return i.parseCommaSeparatedWithCtx(This, self, @This().parseOne); + } + pub fn parseOne(self: *@This(), i: *css.Parser) Result(This) { + return This.parseSum(i, self.ctx, parseIdent); + } + }; + var closure = Closure{ .ctx = ctx }; + var reduced = switch (input.parseNestedBlock(ArrayList(This), &closure, Closure.parseNestedBlockFn)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + // PERF: i don't like this additional allocation + This.reduceArgs(input.allocator(), &reduced, std.math.Order.gt); + // var reduced: ArrayList(This) = This.reduceArgs(&args, std.math.Order.gt); + if (reduced.items.len == 1) { + return .{ .result = reduced.orderedRemove(0) }; + } + return .{ .result = This{ + .function = bun.create( + input.allocator(), + MathFunction(V), + MathFunction(V){ .max = reduced }, + ), + } }; + }, + .clamp => { + const ClosureResult = struct { ?This, This, ?This }; + const Closure = struct { + ctx: @TypeOf(ctx), + + pub fn parseNestedBlock(self: *@This(), i: *css.Parser) Result(ClosureResult) { + const min = switch (This.parseSum(i, self, parseIdentWrapper)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + if (i.expectComma().asErr()) |e| return .{ .err = e }; + const center = switch (This.parseSum(i, self, parseIdentWrapper)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + if (i.expectComma().asErr()) |e| return .{ .err = e }; + const max = switch (This.parseSum(i, self, parseIdentWrapper)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + return .{ .result = .{ min, center, max } }; + } + + pub fn parseIdentWrapper(self: *@This(), ident: []const u8) ?This { + return parseIdent(self.ctx, ident); + } + }; + var closure = Closure{ + .ctx = ctx, + }; + var min, var center, var max = switch (input.parseNestedBlock( + ClosureResult, + &closure, + Closure.parseNestedBlock, + )) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + + // According to the spec, the minimum should "win" over the maximum if they are in the wrong order. + const cmp = if (max != null and max.? == .value and center == .value) + css.generic.partialCmp(V, center.value, max.?.value) + else + null; + + // If center is known to be greater than the maximum, replace it with maximum and remove the max argument. + // Otherwise, if center is known to be less than the maximum, remove the max argument. + if (cmp) |cmp_val| { + if (cmp_val == std.math.Order.gt) { + const val = max.?; + center = val; + max = null; + } else { + min = null; + } + } + + const switch_val: u8 = (@as(u8, @intFromBool(min != null)) << 1) | (@as(u8, @intFromBool(min != null))); + // switch (min, max) + return .{ .result = switch (switch_val) { + 0b00 => center, + 0b10 => This{ + .function = bun.create( + input.allocator(), + MathFunction(V), + MathFunction(V){ + .max = arr2( + input.allocator(), + min.?, + center, + ), + }, + ), + }, + 0b01 => This{ + .function = bun.create( + input.allocator(), + MathFunction(V), + MathFunction(V){ + .min = arr2( + input.allocator(), + max.?, + center, + ), + }, + ), + }, + 0b11 => This{ + .function = bun.create( + input.allocator(), + MathFunction(V), + MathFunction(V){ + .clamp = .{ + .min = min.?, + .center = center, + .max = max.?, + }, + }, + ), + }, + else => unreachable, + } }; + }, + .round => { + const Closure = struct { + ctx: @TypeOf(ctx), + pub fn parseNestedBlockFn(self: *@This(), i: *css.Parser) Result(This) { + const strategy = if (i.tryParse(RoundingStrategy.parse, .{}).asValue()) |s| brk: { + if (i.expectComma().asErr()) |e| return .{ .err = e }; + break :brk s; + } else RoundingStrategy.default(); + + const OpAndFallbackCtx = struct { + strategy: RoundingStrategy, + + pub fn op(this: *const @This(), a: f32, b: f32) f32 { + return round({}, a, b, this.strategy); + } + + pub fn fallback(this: *const @This(), a: This, b: This) MathFunction(V) { + return MathFunction(V){ + .round = .{ + .strategy = this.strategy, + .value = a, + .interval = b, + }, + }; + } + }; + var ctx_for_op_and_fallback = OpAndFallbackCtx{ + .strategy = strategy, + }; + return This.parseMathFn( + i, + &ctx_for_op_and_fallback, + OpAndFallbackCtx.op, + OpAndFallbackCtx.fallback, + self.ctx, + parseIdent, + ); + } + }; + var closure = Closure{ + .ctx = ctx, + }; + return input.parseNestedBlock(This, &closure, Closure.parseNestedBlockFn); + }, + .rem => { + const Closure = struct { + ctx: @TypeOf(ctx), + + pub fn parseNestedBlockFn(self: *@This(), i: *css.Parser) Result(This) { + return This.parseMathFn( + i, + {}, + @This().rem, + mathFunctionRem, + self.ctx, + parseIdent, + ); + } + + pub fn rem(_: void, a: f32, b: f32) f32 { + return @mod(a, b); + } + pub fn mathFunctionRem(_: void, a: This, b: This) MathFunction(V) { + return MathFunction(V){ + .rem = .{ + .dividend = a, + .divisor = b, + }, + }; + } + }; + var closure = Closure{ + .ctx = ctx, + }; + return input.parseNestedBlock(This, &closure, Closure.parseNestedBlockFn); + }, + .mod => { + const Closure = struct { + ctx: @TypeOf(ctx), + + pub fn parseNestedBlockFn(self: *@This(), i: *css.Parser) Result(This) { + return This.parseMathFn( + i, + {}, + @This().modulo, + mathFunctionMod, + self.ctx, + parseIdent, + ); + } + + pub fn modulo(_: void, a: f32, b: f32) f32 { + // return ((a % b) + b) % b; + return @mod((@mod(a, b) + b), b); + } + pub fn mathFunctionMod(_: void, a: This, b: This) MathFunction(V) { + return MathFunction(V){ + .mod_ = .{ + .dividend = a, + .divisor = b, + }, + }; + } + }; + var closure = Closure{ + .ctx = ctx, + }; + return input.parseNestedBlock(This, &closure, Closure.parseNestedBlockFn); + }, + .sin => { + return This.parseTrig(input, .sin, false, ctx, parseIdent); + }, + .cos => { + return This.parseTrig(input, .cos, false, ctx, parseIdent); + }, + .tan => { + return This.parseTrig(input, .tan, false, ctx, parseIdent); + }, + .asin => { + return This.parseTrig(input, .asin, true, ctx, parseIdent); + }, + .acos => { + return This.parseTrig(input, .acos, true, ctx, parseIdent); + }, + .atan => { + return This.parseTrig(input, .atan, true, ctx, parseIdent); + }, + .atan2 => { + const Closure = struct { + ctx: @TypeOf(ctx), + pub fn parseNestedBlockFn(self: *@This(), i: *css.Parser) Result(This) { + const res = switch (This.parseAtan2(i, self.ctx, parseIdent)) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + if (css.generic.tryFromAngle(V, res)) |v| { + return .{ .result = This{ + .value = bun.create( + i.allocator(), + V, + v, + ), + } }; + } + + return .{ .err = i.newCustomError(css.ParserError{ .invalid_value = {} }) }; + } + }; + var closure = Closure{ .ctx = ctx }; + return input.parseNestedBlock(This, &closure, Closure.parseNestedBlockFn); + }, + .pow => { + const Closure = struct { + ctx: @TypeOf(ctx), + pub fn parseNestedBlockFn(self: *@This(), i: *css.Parser) Result(This) { + const a = switch (This.parseNumeric(i, self.ctx, parseIdent)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + + if (i.expectComma().asErr()) |e| return .{ .err = e }; + + const b = switch (This.parseNumeric(i, self.ctx, parseIdent)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + + return .{ .result = This{ + .number = bun.powf(a, b), + } }; + } + }; + var closure = Closure{ .ctx = ctx }; + return input.parseNestedBlock(This, &closure, Closure.parseNestedBlockFn); + }, + .log => { + const Closure = struct { + ctx: @TypeOf(ctx), + pub fn parseNestedBlockFn(self: *@This(), i: *css.Parser) Result(This) { + const value = switch (This.parseNumeric(i, self.ctx, parseIdent)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + if (i.tryParse(css.Parser.expectComma, .{}).isOk()) { + const base = switch (This.parseNumeric(i, self.ctx, parseIdent)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + return .{ .result = This{ .number = std.math.log(f32, base, value) } }; + } + return .{ .result = This{ .number = std.math.log(f32, std.math.e, value) } }; + } + }; + var closure = Closure{ .ctx = ctx }; + return input.parseNestedBlock(This, &closure, Closure.parseNestedBlockFn); + }, + .sqrt => { + return This.parseNumericFn(input, .sqrt, ctx, parseIdent); + }, + .exp => { + return This.parseNumericFn(input, .exp, ctx, parseIdent); + }, + .hypot => { + const Closure = struct { + ctx: @TypeOf(ctx), + pub fn parseNestedBlockFn(self: *@This(), i: *css.Parser) Result(This) { + var args = switch (i.parseCommaSeparatedWithCtx(This, self, parseOne)) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + const val = switch (This.parseHypot(i.allocator(), &args)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + + if (val) |v| return .{ .result = v }; + + return .{ .result = This{ + .function = bun.create( + i.allocator(), + MathFunction(V), + MathFunction(V){ .hypot = args }, + ), + } }; + } + + pub fn parseOne(self: *@This(), i: *css.Parser) Result(This) { + return This.parseSum(i, self.ctx, parseIdent); + } + }; + var closure = Closure{ .ctx = ctx }; + return input.parseNestedBlock(This, &closure, Closure.parseNestedBlockFn); + }, + .abs => { + const Closure = struct { + ctx: @TypeOf(ctx), + pub fn parseNestedBlockFn(self: *@This(), i: *css.Parser) Result(This) { + const v = switch (This.parseSum(i, self.ctx, parseIdent)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + return .{ + .result = if (This.applyMap(&v, i.allocator(), absf)) |vv| vv else This{ + .function = bun.create( + i.allocator(), + MathFunction(V), + MathFunction(V){ .abs = v }, + ), + }, + }; + } + }; + var closure = Closure{ .ctx = ctx }; + return input.parseNestedBlock(This, &closure, Closure.parseNestedBlockFn); + }, + .sign => { + const Closure = struct { + ctx: @TypeOf(ctx), + pub fn parseNestedBlockFn(self: *@This(), i: *css.Parser) Result(This) { + const v = switch (This.parseSum(i, self.ctx, parseIdent)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + switch (v) { + .number => |*n| return .{ .result = This{ .number = std.math.sign(n.*) } }, + .value => |v2| { + const MapFn = struct { + pub fn sign(s: f32) f32 { + return std.math.sign(s); + } + }; + // First map so we ignore percentages, which must be resolved to their + // computed value in order to determine the sign. + if (css.generic.tryMap(V, v2, MapFn.sign)) |new_v| { + // sign() alwasy resolves to a number. + return .{ + .result = This{ + // .number = css.generic.trySign(V, &new_v) orelse bun.unreachablePanic("sign always resolved to a number.", .{}), + .number = css.generic.trySign(V, &new_v) orelse @panic("sign() always resolves to a number."), + }, + }; + } + }, + else => {}, + } + + return .{ .result = This{ + .function = bun.create( + i.allocator(), + MathFunction(V), + MathFunction(V){ .sign = v }, + ), + } }; + } + }; + var closure = Closure{ .ctx = ctx }; + return input.parseNestedBlock(This, &closure, Closure.parseNestedBlockFn); + }, + } + } + + pub fn parseNumericFn(input: *css.Parser, comptime op: enum { sqrt, exp }, ctx: anytype, comptime parse_ident: *const fn (@TypeOf(ctx), []const u8) ?This) Result(This) { + const Closure = struct { ctx: @TypeOf(ctx) }; + var closure = Closure{ .ctx = ctx }; + return input.parseNestedBlock(This, &closure, struct { + pub fn parseNestedBlockFn(self: *Closure, i: *css.Parser) Result(This) { + const v = switch (This.parseNumeric(i, self.ctx, parse_ident)) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + + return .{ + .result = Calc(V){ + .number = switch (op) { + .sqrt => std.math.sqrt(v), + .exp => std.math.exp(v), + }, + }, + }; + } + }.parseNestedBlockFn); + } + + pub fn parseMathFn( + input: *css.Parser, + ctx_for_op_and_fallback: anytype, + comptime op: *const fn (@TypeOf(ctx_for_op_and_fallback), f32, f32) f32, + comptime fallback: *const fn (@TypeOf(ctx_for_op_and_fallback), This, This) MathFunction(V), + ctx_for_parse_ident: anytype, + comptime parse_ident: *const fn (@TypeOf(ctx_for_parse_ident), []const u8) ?This, + ) Result(This) { + const a = switch (This.parseSum(input, ctx_for_parse_ident, parse_ident)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + if (input.expectComma().asErr()) |e| return .{ .err = e }; + const b = switch (This.parseSum(input, ctx_for_parse_ident, parse_ident)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + + const val = This.applyOp(&a, &b, input.allocator(), ctx_for_op_and_fallback, op) orelse This{ + .function = bun.create( + input.allocator(), + MathFunction(V), + fallback(ctx_for_op_and_fallback, a, b), + ), + }; + + return .{ .result = val }; + } + + pub fn parseSum( + input: *css.Parser, + ctx: anytype, + comptime parse_ident: *const fn (@TypeOf(ctx), []const u8) ?This, + ) Result(This) { + var cur = switch (This.parseProduct(input, ctx, parse_ident)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + while (true) { + const start = input.state(); + const tok = switch (input.nextIncludingWhitespace()) { + .result => |vv| vv, + .err => { + input.reset(&start); + break; + }, + }; + + if (tok.* == .whitespace) { + if (input.isExhausted()) { + break; // allow trailing whitespace + } + const next_tok = switch (input.next()) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + if (next_tok.* == .delim and next_tok.delim == '+') { + const next = switch (Calc(V).parseProduct(input, ctx, parse_ident)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + cur = cur.add(input.allocator(), next); + } else if (next_tok.* == .delim and next_tok.delim == '-') { + var rhs = switch (This.parseProduct(input, ctx, parse_ident)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + rhs = rhs.mulF32(input.allocator(), -1.0); + cur = cur.add(input.allocator(), rhs); + } else { + return .{ .err = input.newUnexpectedTokenError(next_tok.*) }; + } + continue; + } + input.reset(&start); + break; + } + + return .{ .result = cur }; + } + + pub fn parseProduct( + input: *css.Parser, + ctx: anytype, + comptime parse_ident: *const fn (@TypeOf(ctx), []const u8) ?This, + ) Result(This) { + var node = switch (This.parseValue(input, ctx, parse_ident)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + while (true) { + const start = input.state(); + const tok = switch (input.next()) { + .result => |vv| vv, + .err => { + input.reset(&start); + break; + }, + }; + + if (tok.* == .delim and tok.delim == '*') { + // At least one of the operands must be a number. + const rhs = switch (This.parseValue(input, ctx, parse_ident)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + if (rhs == .number) { + node = node.mulF32(input.allocator(), rhs.number); + } else if (node == .number) { + const val = node.number; + node = rhs; + node = node.mulF32(input.allocator(), val); + } else { + return .{ .err = input.newUnexpectedTokenError(.{ .delim = '*' }) }; + } + } else if (tok.* == .delim and tok.delim == '/') { + const rhs = switch (This.parseValue(input, ctx, parse_ident)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + if (rhs == .number) { + const val = rhs.number; + node = node.mulF32(input.allocator(), 1.0 / val); + continue; + } + return .{ .err = input.newCustomError(css.ParserError{ .invalid_value = {} }) }; + } else { + input.reset(&start); + break; + } + } + return .{ .result = node }; + } + + pub fn parseValue( + input: *css.Parser, + ctx: anytype, + comptime parse_ident: *const fn (@TypeOf(ctx), []const u8) ?This, + ) Result(This) { + // Parse nested calc() and other math functions. + if (input.tryParse(This.parse, .{}).asValue()) |_calc| { + const calc: This = _calc; + switch (calc) { + .function => |f| return switch (f.*) { + .calc => |c| .{ .result = c }, + else => .{ .result = .{ .function = f } }, + }, + else => return .{ .result = calc }, + } + } + + if (input.tryParse(css.Parser.expectParenthesisBlock, .{}).isOk()) { + const Closure = struct { + ctx: @TypeOf(ctx), + pub fn parseNestedBlockFn(self: *@This(), i: *css.Parser) Result(This) { + return This.parseSum(i, self.ctx, parse_ident); + } + }; + var closure = Closure{ + .ctx = ctx, + }; + return input.parseNestedBlock(This, &closure, Closure.parseNestedBlockFn); + } + + if (input.tryParse(css.Parser.expectNumber, .{}).asValue()) |num| { + return .{ .result = .{ .number = num } }; + } + + if (input.tryParse(Constant.parse, .{}).asValue()) |constant| { + return .{ .result = .{ .number = constant.intoF32() } }; + } + + const location = input.currentSourceLocation(); + if (input.tryParse(css.Parser.expectIdent, .{}).asValue()) |ident| { + if (parse_ident(ctx, ident)) |c| { + return .{ .result = c }; + } + + return .{ .err = location.newUnexpectedTokenError(.{ .ident = ident }) }; + } + + const value = switch (input.tryParse(css.generic.parseFor(V), .{})) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + return .{ .result = .{ + .value = bun.create( + input.allocator(), + V, + value, + ), + } }; + } + + pub fn parseTrig( + input: *css.Parser, + comptime trig_fn_kind: enum { + sin, + cos, + tan, + asin, + acos, + atan, + }, + to_angle: bool, + ctx: anytype, + comptime parse_ident: *const fn (@TypeOf(ctx), []const u8) ?This, + ) Result(This) { + const trig_fn = struct { + pub fn run(x: f32) f32 { + const mathfn = comptime switch (trig_fn_kind) { + .sin => std.math.sin, + .cos => std.math.cos, + .tan => std.math.tan, + .asin => std.math.asin, + .acos => std.math.acos, + .atan => std.math.atan, + }; + return mathfn(x); + } + }; + const Closure = struct { + ctx: @TypeOf(ctx), + to_angle: bool, + + pub fn parseNestedBockFn(this: *@This(), i: *css.Parser) Result(This) { + const v = switch (Calc(Angle).parseSum( + i, + this, + @This().parseIdentFn, + )) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + + const rad = rad: { + switch (v) { + .value => |angle| { + if (!this.to_angle) break :rad trig_fn.run(angle.toRadians()); + }, + .number => break :rad trig_fn.run(v.number), + else => {}, + } + return .{ .err = i.newCustomError(css.ParserError{ .invalid_value = {} }) }; + }; + + if (this.to_angle and !std.math.isNan(rad)) { + if (css.generic.tryFromAngle(V, .{ .rad = rad })) |val| { + return .{ .result = .{ + .value = bun.create( + i.allocator(), + V, + val, + ), + } }; + } + return .{ .err = i.newCustomError(css.ParserError{ .invalid_value = {} }) }; + } else { + return .{ .result = .{ .number = rad } }; + } + } + + pub fn parseIdentFn(this: *@This(), ident: []const u8) ?Calc(Angle) { + const v = parse_ident(this.ctx, ident) orelse return null; + if (v == .number) return .{ .number = v.number }; + return null; + } + }; + var closure = Closure{ + .ctx = ctx, + .to_angle = to_angle, + }; + return input.parseNestedBlock(This, &closure, Closure.parseNestedBockFn); + } + + pub fn ParseIdentNone(comptime Ctx: type, comptime Value: type) type { + return struct { + pub fn func(_: Ctx, _: []const u8) ?Calc(Value) { + return null; + } + }; + } + + pub fn parseAtan2( + input: *css.Parser, + ctx: anytype, + comptime parse_ident: *const fn (@TypeOf(ctx), []const u8) ?This, + ) Result(Angle) { + const Ctx = @TypeOf(ctx); + + // atan2 supports arguments of any , , or , even ones that wouldn't + // normally be supported by V. The only requirement is that the arguments be of the same type. + // Try parsing with each type, and return the first one that parses successfully. + if (tryParseAtan2Args(Ctx, Length, input, ctx).asValue()) |v| { + return .{ .result = v }; + } + + if (tryParseAtan2Args(Ctx, Percentage, input, ctx).asValue()) |v| { + return .{ .result = v }; + } + + if (tryParseAtan2Args(Ctx, Angle, input, ctx).asValue()) |v| { + return .{ .result = v }; + } + + if (tryParseAtan2Args(Ctx, Time, input, ctx).asValue()) |v| { + return .{ .result = v }; + } + + const Closure = struct { + ctx: @TypeOf(ctx), + + pub fn parseIdentFn(self: *@This(), ident: []const u8) ?Calc(CSSNumber) { + const v = parse_ident(self.ctx, ident) orelse return null; + if (v == .number) return .{ .number = v.number }; + return null; + } + }; + var closure = Closure{ + .ctx = ctx, + }; + return Calc(CSSNumber).parseAtan2Args(input, &closure, Closure.parseIdentFn); + } + + inline fn tryParseAtan2Args( + comptime Ctx: type, + comptime Value: type, + input: *css.Parser, + ctx: Ctx, + ) Result(Angle) { + const func = ParseIdentNone(Ctx, Value).func; + return input.tryParseImpl(Result(Angle), Calc(Value).parseAtan2Args, .{ input, ctx, func }); + } + + pub fn parseAtan2Args( + input: *css.Parser, + ctx: anytype, + comptime parse_ident: *const fn (@TypeOf(ctx), []const u8) ?This, + ) Result(Angle) { + const a = switch (This.parseSum(input, ctx, parse_ident)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + if (input.expectComma().asErr()) |e| return .{ .err = e }; + const b = switch (This.parseSum(input, ctx, parse_ident)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + + if (a == .value and b == .value) { + const Fn = struct { + pub fn opToFn(_: void, x: f32, y: f32) Angle { + return .{ .rad = std.math.atan2(x, y) }; + } + }; + if (css.generic.tryOpTo(V, Angle, a.value, b.value, {}, Fn.opToFn)) |v| { + return .{ .result = v }; + } + } else if (a == .number and b == .number) { + return .{ .result = Angle{ .rad = std.math.atan2(a.number, b.number) } }; + } else { + // doo nothing + } + + // We don't have a way to represent arguments that aren't angles, so just error. + // This will fall back to an unparsed property, leaving the atan2() function intact. + return .{ .err = input.newCustomError(css.ParserError{ .invalid_value = {} }) }; + } + + pub fn parseNumeric( + input: *css.Parser, + ctx: anytype, + comptime parse_ident: *const fn (@TypeOf(ctx), []const u8) ?This, + ) Result(f32) { + const Closure = struct { + ctx: @TypeOf(ctx), + + pub fn parseIdentFn(self: *@This(), ident: []const u8) ?Calc(CSSNumber) { + const v = parse_ident(self.ctx, ident) orelse return null; + if (v == .number) return .{ .number = v.number }; + return null; + } + }; + var closure = Closure{ + .ctx = ctx, + }; + const v: Calc(CSSNumber) = switch (Calc(CSSNumber).parseSum(input, &closure, Closure.parseIdentFn)) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + const val = switch (v) { + .number => v.number, + .value => v.value.*, + else => return .{ .err = input.newCustomError(css.ParserError.invalid_value) }, + }; + return .{ .result = val }; + } + + pub fn parseHypot(allocator: Allocator, args: *ArrayList(This)) Result(?This) { + if (args.items.len == 1) { + const v = args.items[0]; + args.items[0] = This{ .number = 0 }; + return .{ .result = v }; + } + + if (args.items.len == 2) { + return .{ .result = This.applyOp(&args.items[0], &args.items[1], allocator, {}, hypot) }; + } + + var i: usize = 0; + const first = if (This.applyMap( + &args.items[0], + allocator, + powi2, + )) |v| v else return .{ .result = null }; + i += 1; + var errored: bool = false; + var sum: This = first; + for (args.items[i..]) |*arg| { + const Fn = struct { + pub fn applyOpFn(_: void, a: f32, b: f32) f32 { + return a + bun.powf(b, 2); + } + }; + sum = This.applyOp(&sum, arg, allocator, {}, Fn.applyOpFn) orelse { + errored = true; + break; + }; + } + + if (errored) return .{ .result = null }; + + return .{ .result = This.applyMap(&sum, allocator, sqrtf32) }; + } + + pub fn applyOp( + a: *const This, + b: *const This, + allocator: std.mem.Allocator, + ctx: anytype, + comptime op: *const fn (@TypeOf(ctx), f32, f32) f32, + ) ?This { + if (a.* == .value and b.* == .value) { + if (css.generic.tryOp(V, a.value, b.value, ctx, op)) |v| { + return This{ + .value = bun.create( + allocator, + V, + v, + ), + }; + } + return null; + } + + if (a.* == .number and b.* == .number) { + return This{ + .number = op(ctx, a.number, b.number), + }; + } + + return null; + } + + pub fn applyMap(this: *const This, allocator: Allocator, comptime op: *const fn (f32) f32) ?This { + switch (this.*) { + .number => |n| return This{ .number = op(n) }, + .value => |v| { + if (css.generic.tryMap(V, v, op)) |new_v| { + return This{ + .value = bun.create( + allocator, + V, + new_v, + ), + }; + } + }, + else => {}, + } + + return null; + } + + pub fn toCss(this: *const @This(), comptime W: type, dest: *css.Printer(W)) css.PrintErr!void { + const was_in_calc = dest.in_calc; + dest.in_calc = true; + + const res = toCssImpl(this, W, dest); + + dest.in_calc = was_in_calc; + return res; + } + + pub fn toCssImpl(this: *const @This(), comptime W: type, dest: *css.Printer(W)) css.PrintErr!void { + return switch (this.*) { + .value => |v| v.toCss(W, dest), + .number => |n| CSSNumberFns.toCss(&n, W, dest), + .sum => |sum| { + const a = sum.left; + const b = sum.right; + try a.toCss(W, dest); + // White space is always required. + if (b.isSignNegative()) { + try dest.writeStr(" - "); + var b2 = b.deepClone(dest.allocator).mulF32(dest.allocator, -1.0); + defer b2.deinit(dest.allocator); + try b2.toCss(W, dest); + } else { + try dest.writeStr(" + "); + try b.toCss(W, dest); + } + return; + }, + .product => { + const num = this.product.number; + const calc = this.product.expression; + if (@abs(num) < 1.0) { + const div = 1.0 / num; + try calc.toCss(W, dest); + try dest.delim('/', true); + try CSSNumberFns.toCss(&div, W, dest); + } else { + try CSSNumberFns.toCss(&num, W, dest); + try dest.delim('*', true); + try calc.toCss(W, dest); + } + }, + .function => |f| return f.toCss(W, dest), + }; + } + + pub fn trySign(this: *const @This()) ?f32 { + return switch (this.*) { + .value => |v| return switch (V) { + f32 => css.signfns.signF32(v), + else => v.trySign(), + }, + .number => |n| css.signfns.signF32(n), + else => null, + }; + } + + pub fn isSignNegative(this: *const @This()) bool { + return css.signfns.isSignNegative(this.trySign() orelse return false); + } + + pub fn mulF32(this: @This(), allocator: Allocator, other: f32) This { + if (other == 1.0) { + return this; + } + + return switch (this) { + // PERF: why not reuse the allocation here? + .value => This{ .value = bun.create(allocator, V, mulValueF32(this.value.*, allocator, other)) }, + .number => This{ .number = this.number * other }, + // PERF: why not reuse the allocation here? + .sum => This{ .sum = .{ + .left = bun.create( + allocator, + This, + this.sum.left.mulF32(allocator, other), + ), + .right = bun.create( + allocator, + This, + this.sum.right.mulF32(allocator, other), + ), + } }, + .product => { + const num = this.product.number * other; + if (num == 1.0) { + return this.product.expression.*; + } + return This{ + .product = .{ + .number = num, + .expression = this.product.expression, + }, + }; + }, + .function => switch (this.function.*) { + // PERF: why not reuse the allocation here? + .calc => This{ + .function = bun.create( + allocator, + MathFunction(V), + MathFunction(V){ + .calc = this.function.calc.mulF32(allocator, other), + }, + ), + }, + else => This{ + .product = .{ + .number = other, + .expression = bun.create(allocator, This, this), + }, + }, + }, + }; + } + + /// PERF: + /// I don't like how this function requires allocating a second ArrayList + /// I am pretty sure we could do this reduction in place, or do it as the + /// arguments are being parsed. + fn reduceArgs(allocator: Allocator, args: *ArrayList(This), order: std.math.Order) void { + // Reduces the arguments of a min() or max() expression, combining compatible values. + // e.g. min(1px, 1em, 2px, 3in) => min(1px, 1em) + var reduced = ArrayList(This){}; + + for (args.items) |*arg| { + var found: ??*Calc(V) = null; + switch (arg.*) { + .value => |val| { + for (reduced.items) |*b| { + switch (b.*) { + .value => |v| { + const result = css.generic.partialCmp(V, val, v); + if (result != null) { + if (result == order) { + found = b; + break; + } else { + found = @as(?*Calc(V), null); + break; + } + } + }, + else => {}, + } + } + }, + else => {}, + } + + if (found) |__r| { + if (__r) |r| { + r.* = arg.*; + // set to dummy value since we moved it into `reduced` + arg.* = This{ .number = 420 }; + continue; + } + } else { + reduced.append(allocator, arg.*) catch bun.outOfMemory(); + // set to dummy value since we moved it into `reduced` + arg.* = This{ .number = 420 }; + continue; + } + arg.deinit(allocator); + arg.* = This{ .number = 420 }; + } + + css.deepDeinit(This, allocator, args); + args.* = reduced; + } + + pub fn isCompatible(this: *const @This(), browsers: css.targets.Browsers) bool { + return switch (this.*) { + .sum => |*args| return args.left.isCompatible(browsers) and args.right.isCompatible(browsers), + .product => |*args| return args.expression.isCompatible(browsers), + .function => |f| f.isCompatible(browsers), + .value => |v| v.isCompatible(browsers), + .number => true, + }; + } + }; +} + +/// A CSS math function. +/// +/// Math functions may be used in most properties and values that accept numeric +/// values, including lengths, percentages, angles, times, etc. +pub fn MathFunction(comptime V: type) type { + return union(enum) { + /// The `calc()` function. + calc: Calc(V), + /// The `min()` function. + min: ArrayList(Calc(V)), + /// The `max()` function. + max: ArrayList(Calc(V)), + /// The `clamp()` function. + clamp: struct { + min: Calc(V), + center: Calc(V), + max: Calc(V), + }, + /// The `round()` function. + round: struct { + strategy: RoundingStrategy, + value: Calc(V), + interval: Calc(V), + }, + /// The `rem()` function. + rem: struct { + dividend: Calc(V), + divisor: Calc(V), + }, + /// The `mod()` function. + mod_: struct { + dividend: Calc(V), + divisor: Calc(V), + }, + /// The `abs()` function. + abs: Calc(V), + /// The `sign()` function. + sign: Calc(V), + /// The `hypot()` function. + hypot: ArrayList(Calc(V)), + + pub fn eql(this: *const @This(), other: *const @This()) bool { + return switch (this.*) { + .calc => |a| return other.* == .calc and a.eql(&other.calc), + .min => |*a| return other.* == .min and css.generic.eqlList(Calc(V), a, &other.min), + .max => |*a| return other.* == .max and css.generic.eqlList(Calc(V), a, &other.max), + .clamp => |*a| return other.* == .clamp and a.min.eql(&other.clamp.min) and a.center.eql(&other.clamp.center) and a.max.eql(&other.clamp.max), + .round => |*a| return other.* == .round and a.strategy == other.round.strategy and a.value.eql(&other.round.value) and a.interval.eql(&other.round.interval), + .rem => |*a| return other.* == .rem and a.dividend.eql(&other.rem.dividend) and a.divisor.eql(&other.rem.divisor), + .mod_ => |*a| return other.* == .mod_ and a.dividend.eql(&other.mod_.dividend) and a.divisor.eql(&other.mod_.divisor), + .abs => |*a| return other.* == .abs and a.eql(&other.abs), + .sign => |*a| return other.* == .sign and a.eql(&other.sign), + .hypot => |*a| return other.* == .hypot and css.generic.eqlList(Calc(V), a, &other.hypot), + }; + } + + pub fn deepClone(this: *const @This(), allocator: Allocator) @This() { + return switch (this.*) { + .calc => |*calc| .{ .calc = calc.deepClone(allocator) }, + .min => |*min| .{ .min = css.deepClone(Calc(V), allocator, min) }, + .max => |*max| .{ .max = css.deepClone(Calc(V), allocator, max) }, + .clamp => |*clamp| .{ + .clamp = .{ + .min = clamp.min.deepClone(allocator), + .center = clamp.center.deepClone(allocator), + .max = clamp.max.deepClone(allocator), + }, + }, + .round => |*rnd| .{ .round = .{ + .strategy = rnd.strategy, + .value = rnd.value.deepClone(allocator), + .interval = rnd.interval.deepClone(allocator), + } }, + .rem => |*rem| .{ .rem = .{ + .dividend = rem.dividend.deepClone(allocator), + .divisor = rem.divisor.deepClone(allocator), + } }, + .mod_ => |*mod_| .{ .mod_ = .{ + .dividend = mod_.dividend.deepClone(allocator), + .divisor = mod_.divisor.deepClone(allocator), + } }, + .abs => |*abs| .{ .abs = abs.deepClone(allocator) }, + .sign => |*sign| .{ .sign = sign.deepClone(allocator) }, + .hypot => |*hyp| .{ + .hypot = css.deepClone(Calc(V), allocator, hyp), + }, + }; + } + + pub fn deinit(this: *@This(), allocator: Allocator) void { + switch (this.*) { + .calc => |*calc| calc.deinit(allocator), + .min => |*min| css.deepDeinit(Calc(V), allocator, min), + .max => |*max| css.deepDeinit(Calc(V), allocator, max), + .clamp => |*clamp| { + clamp.min.deinit(allocator); + clamp.center.deinit(allocator); + clamp.max.deinit(allocator); + }, + .round => |*rnd| { + rnd.value.deinit(allocator); + rnd.interval.deinit(allocator); + }, + .rem => |*rem| { + rem.dividend.deinit(allocator); + rem.divisor.deinit(allocator); + }, + .mod_ => |*mod_| { + mod_.dividend.deinit(allocator); + mod_.divisor.deinit(allocator); + }, + .abs => |*abs| { + abs.deinit(allocator); + }, + .sign => |*sign| { + sign.deinit(allocator); + }, + .hypot => |*hyp| { + css.deepDeinit(Calc(V), allocator, hyp); + }, + } + } + + pub fn toCss(this: *const @This(), comptime W: type, dest: *css.Printer(W)) css.PrintErr!void { + return switch (this.*) { + .calc => |*calc| { + try dest.writeStr("calc("); + try calc.toCss(W, dest); + try dest.writeChar(')'); + }, + .min => |*args| { + try dest.writeStr("min("); + var first = true; + for (args.items) |*arg| { + if (first) { + first = false; + } else { + try dest.delim(',', false); + } + try arg.toCss(W, dest); + } + try dest.writeChar(')'); + }, + .max => |*args| { + try dest.writeStr("max("); + var first = true; + for (args.items) |*arg| { + if (first) { + first = false; + } else { + try dest.delim(',', false); + } + try arg.toCss(W, dest); + } + try dest.writeChar(')'); + }, + .clamp => |*clamp| { + try dest.writeStr("clamp("); + try clamp.min.toCss(W, dest); + try dest.delim(',', false); + try clamp.center.toCss(W, dest); + try dest.delim(',', false); + try clamp.max.toCss(W, dest); + try dest.writeChar(')'); + }, + .round => |*rnd| { + try dest.writeStr("round("); + if (rnd.strategy != RoundingStrategy.default()) { + try rnd.strategy.toCss(W, dest); + try dest.delim(',', false); + } + try rnd.value.toCss(W, dest); + try dest.delim(',', false); + try rnd.interval.toCss(W, dest); + try dest.writeChar(')'); + }, + .rem => |*rem| { + try dest.writeStr("rem("); + try rem.dividend.toCss(W, dest); + try dest.delim(',', false); + try rem.divisor.toCss(W, dest); + try dest.writeChar(')'); + }, + .mod_ => |*mod_| { + try dest.writeStr("mod("); + try mod_.dividend.toCss(W, dest); + try dest.delim(',', false); + try mod_.divisor.toCss(W, dest); + try dest.writeChar(')'); + }, + .abs => |*v| { + try dest.writeStr("abs("); + try v.toCss(W, dest); + try dest.writeChar(')'); + }, + .sign => |*v| { + try dest.writeStr("sign("); + try v.toCss(W, dest); + try dest.writeChar(')'); + }, + .hypot => |*args| { + try dest.writeStr("hypot("); + var first = true; + for (args.items) |*arg| { + if (first) { + first = false; + } else { + try dest.delim(',', false); + } + try arg.toCss(W, dest); + } + try dest.writeChar(')'); + }, + }; + } + + pub fn isCompatible(this: *const @This(), browsers: css.targets.Browsers) bool { + const F = css.compat.Feature; + return switch (this.*) { + .calc => |*c| F.isCompatible(F.calc_function, browsers) and c.isCompatible(browsers), + .min => |*m| F.isCompatible(F.min_function, browsers) and brk: { + for (m.items) |*arg| { + if (!arg.isCompatible(browsers)) { + break :brk false; + } + } + break :brk true; + }, + .max => |*m| F.isCompatible(F.max_function, browsers) and brk: { + for (m.items) |*arg| { + if (!arg.isCompatible(browsers)) { + break :brk false; + } + } + break :brk true; + }, + .clamp => |*c| F.isCompatible(F.clamp_function, browsers) and + c.min.isCompatible(browsers) and + c.center.isCompatible(browsers) and + c.max.isCompatible(browsers), + .round => |*r| F.isCompatible(F.round_function, browsers) and + r.value.isCompatible(browsers) and + r.interval.isCompatible(browsers), + .rem => |*r| F.isCompatible(F.rem_function, browsers) and + r.dividend.isCompatible(browsers) and + r.divisor.isCompatible(browsers), + .mod_ => |*m| F.isCompatible(F.mod_function, browsers) and + m.dividend.isCompatible(browsers) and + m.divisor.isCompatible(browsers), + .abs => |*a| F.isCompatible(F.abs_function, browsers) and + a.isCompatible(browsers), + .sign => |*s| F.isCompatible(F.sign_function, browsers) and + s.isCompatible(browsers), + .hypot => |*h| F.isCompatible(F.hypot_function, browsers) and brk: { + for (h.items) |*arg| { + if (!arg.isCompatible(browsers)) { + break :brk false; + } + } + break :brk true; + }, + }; + } + }; +} + +/// A [rounding strategy](https://www.w3.org/TR/css-values-4/#typedef-rounding-strategy), +/// as used in the `round()` function. +pub const RoundingStrategy = enum { + /// Round to the nearest integer. + nearest, + /// Round up (ceil). + up, + /// Round down (floor). + down, + /// Round toward zero (truncate). + @"to-zero", + + pub fn asStr(this: *const @This()) []const u8 { + return css.enum_property_util.asStr(@This(), this); + } + + pub fn parse(input: *css.Parser) Result(@This()) { + return css.enum_property_util.parse(@This(), input); + } + + pub fn toCss(this: *const @This(), comptime W: type, dest: *Printer(W)) PrintErr!void { + return css.enum_property_util.toCss(@This(), this, W, dest); + } + + pub fn default() RoundingStrategy { + return .nearest; + } +}; + +fn arr2(allocator: std.mem.Allocator, a: anytype, b: anytype) ArrayList(@TypeOf(a)) { + const T = @TypeOf(a); + if (T != @TypeOf(b)) { + @compileError("arr2: types must match"); + } + var arr = ArrayList(T){}; + arr.appendSlice(allocator, &.{ a, b }) catch bun.outOfMemory(); + return arr; +} + +fn round(_: void, value: f32, to: f32, strategy: RoundingStrategy) f32 { + const v = value / to; + return switch (strategy) { + .down => @floor(v) * to, + .up => @ceil(v) * to, + .nearest => @round(v) * to, + .@"to-zero" => @trunc(v) * to, + }; +} + +fn hypot(_: void, a: f32, b: f32) f32 { + return std.math.hypot(a, b); +} + +fn powi2(v: f32) f32 { + return bun.powf(v, 2); +} + +fn sqrtf32(v: f32) f32 { + return std.math.sqrt(v); +} +/// A mathematical constant. +pub const Constant = enum { + /// The base of the natural logarithm + e, + /// The ratio of a circle's circumference to its diameter + pi, + /// infinity + infinity, + /// -infinity + @"-infinity", + /// Not a number. + nan, + + pub fn asStr(this: *const @This()) []const u8 { + return css.enum_property_util.asStr(@This(), this); + } + + pub fn parse(input: *css.Parser) Result(@This()) { + return css.enum_property_util.parse(@This(), input); + } + + pub fn toCss(this: *const @This(), comptime W: type, dest: *Printer(W)) PrintErr!void { + return css.enum_property_util.toCss(@This(), this, W, dest); + } + + pub fn intoF32(this: *const @This()) f32 { + return switch (this.*) { + .e => std.math.e, + .pi => std.math.pi, + .infinity => std.math.inf(f32), + .@"-infinity" => -std.math.inf(f32), + .nan => std.math.nan(f32), + }; + } +}; + +fn absf(a: f32) f32 { + return @abs(a); +} diff --git a/src/css/values/color.zig b/src/css/values/color.zig new file mode 100644 index 0000000000..e742af54b7 --- /dev/null +++ b/src/css/values/color.zig @@ -0,0 +1,4445 @@ +const std = @import("std"); +const Allocator = std.mem.Allocator; +const bun = @import("root").bun; +const logger = bun.logger; +const Log = logger.Log; + +pub const css = @import("../css_parser.zig"); +pub const Result = css.Result; + +const Percentage = css.css_values.percentage.Percentage; +const CSSNumberFns = css.css_values.number.CSSNumberFns; +const Calc = css.css_values.calc.Calc; +const Angle = css.css_values.angle.Angle; + +const Printer = css.Printer; +const PrintErr = css.PrintErr; + +pub fn UnboundedColorGamut(comptime T: type) type { + return struct { + pub fn inGamut(_: *const T) bool { + return true; + } + + pub fn clip(this: *const T) T { + return this.*; + } + }; +} + +pub fn HslHwbColorGamut(comptime T: type, comptime a: []const u8, comptime b: []const u8) type { + return struct { + pub fn inGamut(this: *const T) bool { + return @field(this, a) >= 0.0 and + @field(this, a) <= 1.0 and + @field(this, b) >= 0.0 and + @field(this, b) <= 1.0; + } + + pub fn clip(this: *const T) T { + var result: T = this.*; + // result.h = this.h % 360.0; + result.h = @mod(this.h, 360.0); + @field(result, a) = bun.clamp(@field(this, a), 0.0, 1.0); + @field(result, b) = bun.clamp(@field(this, b), 0.0, 1.0); + result.alpha = bun.clamp(this.alpha, 0.0, 1.0); + return result; + } + }; +} + +/// A CSS `` value. +/// +/// CSS supports many different color spaces to represent colors. The most common values +/// are stored as RGBA using a single byte per component. Less common values are stored +/// using a `Box` to reduce the amount of memory used per color. +/// +/// Each color space is represented as a struct that implements the `From` and `Into` traits +/// for all other color spaces, so it is possible to convert between color spaces easily. +/// In addition, colors support interpolation as in the `color-mix()` function. +pub const CssColor = union(enum) { + /// The `currentColor` keyword. + current_color, + /// A value in the RGB color space, including values parsed as hex colors, or the `rgb()`, `hsl()`, and `hwb()` functions. + rgba: RGBA, + /// A value in a LAB color space, including the `lab()`, `lch()`, `oklab()`, and `oklch()` functions. + lab: *LABColor, + /// A value in a predefined color space, e.g. `display-p3`. + predefined: *PredefinedColor, + /// A floating point representation of an RGB, HSL, or HWB color when it contains `none` components. + float: *FloatColor, + /// The `light-dark()` function. + light_dark: struct { + // TODO: why box the two fields separately? why not one allocation? + light: *CssColor, + dark: *CssColor, + + pub fn takeLightFreeDark(this: *const @This(), allocator: Allocator) *CssColor { + const ret = this.light; + this.dark.deinit(allocator); + allocator.destroy(this.dark); + return ret; + } + + pub fn takeDarkFreeLight(this: *const @This(), allocator: Allocator) *CssColor { + const ret = this.dark; + this.light.deinit(allocator); + allocator.destroy(this.light); + return ret; + } + + pub fn __generateHash() void {} + }, + /// A system color keyword. + system: SystemColor, + + const This = @This(); + + pub const jsFunctionColor = @import("./color_js.zig").jsFunctionColor; + + pub fn isCompatible(this: *const CssColor, browsers: css.targets.Browsers) bool { + return switch (this.*) { + .current_color, .rgba, .float => true, + .lab => |lab| switch (lab.*) { + .lab, .lch => css.Feature.isCompatible(.lab_colors, browsers), + .oklab, .oklch => css.Feature.isCompatible(.oklab_colors, browsers), + }, + .predefined => |predefined| switch (predefined.*) { + .display_p3 => css.Feature.isCompatible(.p3_colors, browsers), + else => css.Feature.isCompatible(.color_function, browsers), + }, + .light_dark => |light_dark| css.Feature.isCompatible(.light_dark, browsers) and + light_dark.light.isCompatible(browsers) and + light_dark.dark.isCompatible(browsers), + .system => |system| system.isCompatible(browsers), + }; + } + + pub fn toCss( + this: *const This, + comptime W: type, + dest: *Printer(W), + ) PrintErr!void { + switch (this.*) { + .current_color => try dest.writeStr("currentColor"), + .rgba => |*color| { + if (color.alpha == 255) { + const hex: u32 = (@as(u32, color.red) << 16) | (@as(u32, color.green) << 8) | @as(u32, color.blue); + if (shortColorName(hex)) |name| return dest.writeStr(name); + + const compact = compactHex(hex); + if (hex == expandHex(compact)) { + try dest.writeFmt("#{x:0>3}", .{compact}); + } else { + try dest.writeFmt("#{x:0>6}", .{hex}); + } + } else { + // If the #rrggbbaa syntax is not supported by the browser targets, output rgba() + if (dest.targets.shouldCompileSame(.hex_alpha_colors)) { + // If the browser doesn't support `#rrggbbaa` color syntax, it is converted to `transparent` when compressed(minify = true). + // https://www.w3.org/TR/css-color-4/#transparent-black + if (dest.minify and color.red == 0 and color.green == 0 and color.blue == 0 and color.alpha == 0) { + return dest.writeStr("transparent"); + } else { + try dest.writeFmt("rgba({d}", .{color.red}); + try dest.delim(',', false); + try dest.writeFmt("{d}", .{color.green}); + try dest.delim(',', false); + try dest.writeFmt("{d}", .{color.blue}); + try dest.delim(',', false); + + // Try first with two decimal places, then with three. + var rounded_alpha = @round(color.alphaF32() * 100.0) / 100.0; + const clamped: u8 = @intFromFloat(@min( + @max( + @round(rounded_alpha * 255.0), + 0.0, + ), + 255.0, + )); + if (clamped != color.alpha) { + rounded_alpha = @round(color.alphaF32() * 1000.0) / 1000.0; + } + + try CSSNumberFns.toCss(&rounded_alpha, W, dest); + try dest.writeChar(')'); + return; + } + } + + const hex: u32 = (@as(u32, color.red) << 24) | + (@as(u32, color.green) << 16) | + (@as(u32, color.blue) << 8) | + (@as(u32, color.alpha)); + const compact = compactHex(hex); + if (hex == expandHex(compact)) { + try dest.writeFmt("#{x:0>4}", .{compact}); + } else { + try dest.writeFmt("#{x:0>8}", .{hex}); + } + } + return; + }, + .lab => |_lab| { + return switch (_lab.*) { + .lab => |*lab| writeComponents( + "lab", + lab.l, + lab.a, + lab.b, + lab.alpha, + W, + dest, + ), + .lch => |*lch| writeComponents( + "lch", + lch.l, + lch.c, + lch.h, + lch.alpha, + W, + dest, + ), + .oklab => |*oklab| writeComponents( + "oklab", + oklab.l, + oklab.a, + oklab.b, + oklab.alpha, + W, + dest, + ), + .oklch => |*oklch| writeComponents( + "oklch", + oklch.l, + oklch.c, + oklch.h, + oklch.alpha, + W, + dest, + ), + }; + }, + .predefined => |predefined| return writePredefined(predefined, W, dest), + .float => |*float| { + // Serialize as hex. + const srgb = SRGB.fromFloatColor(float.*); + const as_css_color = srgb.intoCssColor(dest.allocator); + defer as_css_color.deinit(dest.allocator); + try as_css_color.toCss(W, dest); + }, + .light_dark => |*light_dark| { + if (!dest.targets.isCompatible(css.compat.Feature.light_dark)) { + // TODO(zack): lightningcss -> buncss + try dest.writeStr("var(--lightningcss-light"); + try dest.delim(',', false); + try light_dark.light.toCss(W, dest); + try dest.writeChar(')'); + try dest.whitespace(); + try dest.writeStr("var(--lightningcss-dark"); + try dest.delim(',', false); + try light_dark.dark.toCss(W, dest); + try light_dark.dark.toCss(W, dest); + return dest.writeChar(')'); + } + + try dest.writeStr("light-dark("); + try light_dark.light.toCss(W, dest); + try dest.delim(',', false); + try light_dark.dark.toCss(W, dest); + return dest.writeChar(')'); + }, + .system => |*system| return system.toCss(W, dest), + } + } + + pub const ParseResult = Result(CssColor); + pub fn parse(input: *css.Parser) ParseResult { + const location = input.currentSourceLocation(); + const token = switch (input.next()) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + + switch (token.*) { + .hash, .idhash => |v| { + const r, const g, const b, const a = css.color.parseHashColor(v) orelse return .{ .err = location.newUnexpectedTokenError(token.*) }; + return .{ .result = .{ + .rgba = RGBA.new(r, g, b, a), + } }; + }, + .ident => |value| { + if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(value, "currentcolor")) { + return .{ .result = .current_color }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(value, "transparent")) { + return .{ .result = .{ + .rgba = RGBA.transparent(), + } }; + } else { + if (css.color.parseNamedColor(value)) |named| { + const r, const g, const b = named; + return .{ .result = .{ .rgba = RGBA.new(r, g, b, 255.0) } }; + // } else if (SystemColor.parseString(value)) |system_color| { + } else if (css.parse_utility.parseString(input.allocator(), SystemColor, value, SystemColor.parse).asValue()) |system_color| { + return .{ .result = .{ .system = system_color } }; + } else return .{ .err = location.newUnexpectedTokenError(token.*) }; + } + }, + .function => |name| return parseColorFunction(location, name, input), + else => return .{ + .err = location.newUnexpectedTokenError(token.*), + }, + } + } + + pub fn deinit(this: CssColor, allocator: Allocator) void { + switch (this) { + .current_color => {}, + .rgba => {}, + .lab => { + allocator.destroy(this.lab); + }, + .predefined => { + allocator.destroy(this.predefined); + }, + .float => { + allocator.destroy(this.float); + }, + .light_dark => { + this.light_dark.light.deinit(allocator); + this.light_dark.dark.deinit(allocator); + allocator.destroy(this.light_dark.light); + allocator.destroy(this.light_dark.dark); + }, + .system => {}, + } + } + + pub fn deepClone(this: *const CssColor, allocator: Allocator) CssColor { + return switch (this.*) { + .current_color => .current_color, + .rgba => |rgba| CssColor{ .rgba = rgba }, + .lab => |lab| CssColor{ .lab = bun.create(allocator, LABColor, lab.*) }, + .predefined => |pre| CssColor{ .predefined = bun.create(allocator, PredefinedColor, pre.*) }, + .float => |float| CssColor{ .float = bun.create(allocator, FloatColor, float.*) }, + .light_dark => CssColor{ + .light_dark = .{ + .light = bun.create(allocator, CssColor, this.light_dark.light.deepClone(allocator)), + .dark = bun.create(allocator, CssColor, this.light_dark.dark.deepClone(allocator)), + }, + }, + .system => |sys| CssColor{ .system = sys }, + }; + } + + pub fn toLightDark(this: *const CssColor, allocator: Allocator) CssColor { + return switch (this.*) { + .light_dark => this.deepClone(allocator), + else => .{ + .light_dark = .{ + .light = bun.create(allocator, CssColor, this.deepClone(allocator)), + .dark = bun.create(allocator, CssColor, this.deepClone(allocator)), + }, + }, + }; + } + + /// Mixes this color with another color, including the specified amount of each. + /// Implemented according to the [`color-mix()`](https://www.w3.org/TR/css-color-5/#color-mix) function. + // PERF: these little allocations feel bad + pub fn interpolate( + this: *const CssColor, + allocator: Allocator, + comptime T: type, + p1_: f32, + other: *const CssColor, + p2_: f32, + method: HueInterpolationMethod, + ) ?CssColor { + var p1 = p1_; + var p2 = p2_; + + if (this.* == .current_color or other.* == .current_color) { + return null; + } + + if (this.* == .light_dark or other.* == .light_dark) { + const this_light_dark = this.toLightDark(allocator); + const other_light_dark = this.toLightDark(allocator); + + const al = this_light_dark.light_dark.light; + const ad = this_light_dark.light_dark.dark; + + const bl = other_light_dark.light_dark.light; + const bd = other_light_dark.light_dark.dark; + + return .{ + .light_dark = .{ + .light = bun.create( + allocator, + CssColor, + al.interpolate(allocator, T, p1, bl, p2, method) orelse return null, + ), + .dark = bun.create( + allocator, + CssColor, + ad.interpolate(allocator, T, p1, bd, p2, method) orelse return null, + ), + }, + }; + } + + const check_converted = struct { + fn run(color: *const CssColor) bool { + bun.debugAssert(color.* != .light_dark and color.* != .current_color and color.* != .system); + return switch (color.*) { + .rgba => T == RGBA, + .lab => |lab| switch (lab.*) { + .lab => T == LAB, + .lch => T == LCH, + .oklab => T == OKLAB, + .oklch => T == OKLCH, + }, + .predefined => |pre| switch (pre.*) { + .srgb => T == SRGB, + .srgb_linear => T == SRGBLinear, + .display_p3 => T == P3, + .a98 => T == A98, + .prophoto => T == ProPhoto, + .rec2020 => T == Rec2020, + .xyz_d50 => T == XYZd50, + .xyz_d65 => T == XYZd65, + }, + .float => |f| switch (f.*) { + .rgb => T == SRGB, + .hsl => T == HSL, + .hwb => T == HWB, + }, + .system => bun.Output.panic("Unreachable code: system colors cannot be converted to a color.\n\nThis is a bug in Bun's CSS color parser. Please file a bug report at https://github.com/oven-sh/bun/issues/new/choose", .{}), + // We checked these above + .light_dark, .current_color => unreachable, + }; + } + }; + + const converted_first = check_converted.run(this); + const converted_second = check_converted.run(other); + + // https://drafts.csswg.org/css-color-5/#color-mix-result + var first_color = T.tryFromCssColor(this) orelse return null; + var second_color = T.tryFromCssColor(other) orelse return null; + + if (converted_first and !first_color.inGamut()) { + first_color = mapGamut(T, first_color); + } + + if (converted_second and !second_color.inGamut()) { + second_color = mapGamut(T, second_color); + } + + // https://www.w3.org/TR/css-color-4/#powerless + if (converted_first) { + first_color.adjustPowerlessComponents(); + } + + if (converted_second) { + second_color.adjustPowerlessComponents(); + } + + // https://drafts.csswg.org/css-color-4/#interpolation-missing + first_color.fillMissingComponents(&second_color); + second_color.fillMissingComponents(&first_color); + + // https://www.w3.org/TR/css-color-4/#hue-interpolation + first_color.adjustHue(&second_color, method); + + // https://www.w3.org/TR/css-color-4/#interpolation-alpha + first_color.premultiply(); + second_color.premultiply(); + + // https://drafts.csswg.org/css-color-5/#color-mix-percent-norm + var alpha_multiplier = p1 + p2; + if (alpha_multiplier != 1.0) { + p1 = p1 / alpha_multiplier; + p2 = p2 / alpha_multiplier; + if (alpha_multiplier > 1.0) { + alpha_multiplier = 1.0; + } + } + + var result_color = first_color.interpolate(p1, &second_color, p2); + + result_color.unpremultiply(alpha_multiplier); + + return result_color.intoCssColor(allocator); + } + + pub fn lightDarkOwned(allocator: Allocator, light: CssColor, dark: CssColor) CssColor { + return CssColor{ + .light_dark = .{ + .light = bun.create(allocator, CssColor, light), + .dark = bun.create(allocator, CssColor, dark), + }, + }; + } + + pub fn getFallback(this: *const @This(), allocator: Allocator, kind: ColorFallbackKind) CssColor { + if (this.* == .rgba) return this.deepClone(allocator); + + return switch (kind.asBits()) { + ColorFallbackKind.RGB.asBits() => this.toRGB(allocator).?, + ColorFallbackKind.P3.asBits() => this.toP3(allocator).?, + ColorFallbackKind.LAB.asBits() => this.toLAB(allocator).?, + else => bun.unreachablePanic("Expected RGBA, P3, LAB fallback. This is a bug in Bun.", .{}), + }; + } + + pub fn getFallbacks(this: *@This(), allocator: Allocator, targets: css.targets.Targets) css.SmallList(CssColor, 2) { + const fallbacks = this.getNecessaryFallbacks(targets); + + var res = css.SmallList(CssColor, 2){}; + + if (fallbacks.contains(ColorFallbackKind{ .rgb = true })) { + res.appendAssumeCapacity(this.toRGB(allocator).?); + } + + if (fallbacks.contains(ColorFallbackKind{ .p3 = true })) { + res.appendAssumeCapacity(this.toP3(allocator).?); + } + + if (fallbacks.contains(ColorFallbackKind{ .lab = true })) { + this.* = this.toLAB(allocator).?; + } + + return res; + } + + /// Returns the color fallback types needed for the given browser targets. + pub fn getNecessaryFallbacks(this: *const @This(), targets: css.targets.Targets) ColorFallbackKind { + // Get the full set of possible fallbacks, and remove the highest one, which + // will replace the original declaration. The remaining fallbacks need to be added. + const fallbacks = this.getPossibleFallbacks(targets); + return fallbacks.difference(fallbacks.highest()); + } + + pub fn getPossibleFallbacks(this: *const @This(), targets: css.targets.Targets) ColorFallbackKind { + // Fallbacks occur in levels: Oklab -> Lab -> P3 -> RGB. We start with all levels + // below and including the authored color space, and remove the ones that aren't + // compatible with our browser targets. + var fallbacks = switch (this.*) { + .current_color, .rgba, .float, .system => return ColorFallbackKind.empty(), + .lab => |lab| brk: { + if (lab.* == .lab or lab.* == .lch and targets.shouldCompileSame(.lab_colors)) + break :brk ColorFallbackKind.andBelow(.{ .lab = true }); + if (lab.* == .oklab or lab.* == .oklch and targets.shouldCompileSame(.oklab_colors)) + break :brk ColorFallbackKind.andBelow(.{ .lab = true }); + return ColorFallbackKind.empty(); + }, + .predefined => |predefined| brk: { + if (predefined.* == .display_p3 and targets.shouldCompileSame(.p3_colors)) break :brk ColorFallbackKind.andBelow(.{ .p3 = true }); + if (targets.shouldCompileSame(.color_function)) break :brk ColorFallbackKind.andBelow(.{ .lab = true }); + return ColorFallbackKind.empty(); + }, + .light_dark => |*ld| { + return ld.light.getPossibleFallbacks(targets).bitwiseOr(ld.dark.getPossibleFallbacks(targets)); + }, + }; + + if (fallbacks.contains(.{ .oklab = true })) { + if (!targets.shouldCompileSame(.oklab_colors)) { + fallbacks.remove(ColorFallbackKind.andBelow(.{ .lab = true })); + } + } + + if (fallbacks.contains(.{ .lab = true })) { + if (!targets.shouldCompileSame(.lab_colors)) { + fallbacks = fallbacks.difference(ColorFallbackKind.andBelow(.{ .p3 = true })); + } else if (targets.browsers != null and css.compat.Feature.isPartiallyCompatible(&css.compat.Feature.lab_colors, targets.browsers.?)) { + // We don't need P3 if Lab is supported by some of our targets. + // No browser implements Lab but not P3. + fallbacks.remove(.{ .p3 = true }); + } + } + + if (fallbacks.contains(.{ .p3 = true })) { + if (!targets.shouldCompileSame(.p3_colors)) { + fallbacks.remove(.{ .rgb = true }); + } else if (fallbacks.highest().asBits() != ColorFallbackKind.asBits(.{ .p3 = true }) and + (targets.browsers == null or !css.compat.Feature.isPartiallyCompatible(&css.compat.Feature.p3_colors, targets.browsers.?))) + { + // Remove P3 if it isn't supported by any targets, and wasn't the + // original authored color. + fallbacks.remove(.{ .p3 = true }); + } + } + + return fallbacks; + } + + pub fn default() @This() { + return .{ .rgba = RGBA.transparent() }; + } + + pub fn eql(this: *const This, other: *const This) bool { + if (@intFromEnum(this.*) != @intFromEnum(other.*)) return false; + + return switch (this.*) { + .current_color => true, + .rgba => std.meta.eql(this.rgba, other.rgba), + .lab => std.meta.eql(this.lab.*, other.lab.*), + .predefined => std.meta.eql(this.predefined.*, other.predefined.*), + .float => std.meta.eql(this.float.*, other.float.*), + .light_dark => this.light_dark.light.eql(other.light_dark.light) and this.light_dark.dark.eql(other.light_dark.dark), + .system => this.system == other.system, + }; + } + + pub fn hash(this: *const @This(), hasher: *std.hash.Wyhash) void { + return css.implementHash(@This(), this, hasher); + } + + pub fn toRGB(this: *const @This(), allocator: Allocator) ?CssColor { + if (this.* == .light_dark) { + return CssColor{ .light_dark = .{ + .light = bun.create(allocator, CssColor, this.light_dark.light.toRGB(allocator) orelse return null), + .dark = bun.create(allocator, CssColor, this.light_dark.dark.toRGB(allocator) orelse return null), + } }; + } + return CssColor{ .rgba = RGBA.tryFromCssColor(this) orelse return null }; + } + + pub fn toP3(this: *const @This(), allocator: Allocator) ?CssColor { + return switch (this.*) { + .light_dark => |ld| blk: { + const light = ld.light.toP3(allocator) orelse break :blk null; + const dark = ld.dark.toP3(allocator) orelse break :blk null; + break :blk .{ + .light_dark = .{ + .light = bun.create(allocator, CssColor, light), + .dark = bun.create(allocator, CssColor, dark), + }, + }; + }, + else => return .{ .predefined = bun.create(allocator, PredefinedColor, .{ .display_p3 = P3.tryFromCssColor(this) orelse return null }) }, + }; + } + + pub fn toLAB(this: *const @This(), allocator: Allocator) ?CssColor { + return switch (this.*) { + .light_dark => |ld| blk: { + const light = ld.light.toLAB(allocator) orelse break :blk null; + const dark = ld.dark.toLAB(allocator) orelse break :blk null; + break :blk .{ + .light_dark = .{ + .light = bun.create(allocator, CssColor, light), + .dark = bun.create(allocator, CssColor, dark), + }, + }; + }, + else => .{ .lab = bun.create(allocator, LABColor, .{ .lab = LAB.tryFromCssColor(this) orelse return null }) }, + }; + } +}; + +pub fn parseColorFunction(location: css.SourceLocation, function: []const u8, input: *css.Parser) Result(CssColor) { + var parser = ComponentParser.new(true); + + const ColorFunctions = enum { lab, oklab, lch, oklch, color, hsl, hsla, hwb, rgb, rgba, @"color-mix", @"light-dark" }; + const Map = bun.ComptimeEnumMap(ColorFunctions); + + if (Map.getASCIIICaseInsensitive(function)) |val| { + return switch (val) { + .lab => parseLab(LAB, input, &parser, struct { + fn callback(l: f32, a: f32, b: f32, alpha: f32) LABColor { + return .{ .lab = .{ .l = l, .a = a, .b = b, .alpha = alpha } }; + } + }.callback), + .oklab => parseLab(OKLAB, input, &parser, struct { + fn callback(l: f32, a: f32, b: f32, alpha: f32) LABColor { + return .{ .oklab = .{ .l = l, .a = a, .b = b, .alpha = alpha } }; + } + }.callback), + .lch => parseLch(LCH, input, &parser, struct { + fn callback(l: f32, c: f32, h: f32, alpha: f32) LABColor { + return .{ .lch = .{ .l = l, .c = c, .h = h, .alpha = alpha } }; + } + }.callback), + .oklch => parseLch(OKLCH, input, &parser, struct { + fn callback(l: f32, c: f32, h: f32, alpha: f32) LABColor { + return .{ .oklch = .{ .l = l, .c = c, .h = h, .alpha = alpha } }; + } + }.callback), + .color => parsePredefined(input, &parser), + .hsl, .hsla => parseHslHwb(HSL, input, &parser, true, struct { + fn callback(allocator: Allocator, h: f32, s: f32, l: f32, a: f32) CssColor { + const hsl = HSL{ .h = h, .s = s, .l = l, .alpha = a }; + if (!std.math.isNan(h) and !std.math.isNan(s) and !std.math.isNan(l) and !std.math.isNan(a)) { + return CssColor{ .rgba = hsl.intoRGBA() }; + } else { + return CssColor{ .float = bun.create(allocator, FloatColor, .{ .hsl = hsl }) }; + } + } + }.callback), + .hwb => parseHslHwb(HWB, input, &parser, false, struct { + fn callback(allocator: Allocator, h: f32, w: f32, b: f32, a: f32) CssColor { + const hwb = HWB{ .h = h, .w = w, .b = b, .alpha = a }; + if (!std.math.isNan(h) and !std.math.isNan(w) and !std.math.isNan(b) and !std.math.isNan(a)) { + return CssColor{ .rgba = hwb.intoRGBA() }; + } else { + return CssColor{ .float = bun.create(allocator, FloatColor, .{ .hwb = hwb }) }; + } + } + }.callback), + .rgb, .rgba => parseRgb(input, &parser), + .@"color-mix" => input.parseNestedBlock(CssColor, {}, struct { + pub fn parseFn(_: void, i: *css.Parser) Result(CssColor) { + return parseColorMix(i); + } + }.parseFn), + .@"light-dark" => input.parseNestedBlock(CssColor, {}, struct { + fn callback(_: void, i: *css.Parser) Result(CssColor) { + const light = switch (switch (CssColor.parse(i)) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }) { + .light_dark => |ld| ld.takeLightFreeDark(i.allocator()), + else => |v| bun.create(i.allocator(), CssColor, v), + }; + if (i.expectComma().asErr()) |e| return .{ .err = e }; + const dark = switch (switch (CssColor.parse(i)) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }) { + .light_dark => |ld| ld.takeDarkFreeLight(i.allocator()), + else => |v| bun.create(i.allocator(), CssColor, v), + }; + return .{ .result = .{ + .light_dark = .{ + .light = light, + .dark = dark, + }, + } }; + } + }.callback), + }; + } + return .{ .err = location.newUnexpectedTokenError(.{ .ident = function }) }; +} + +pub fn parseRGBComponents(input: *css.Parser, parser: *ComponentParser) Result(struct { f32, f32, f32, bool }) { + const red = switch (parser.parseNumberOrPercentage(input)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }; + + const is_legacy_syntax = parser.from == null and + !std.math.isNan(red.unitValue()) and + input.tryParse(css.Parser.expectComma, .{}).isOk(); + + const r, const g, const b = if (is_legacy_syntax) switch (red) { + .number => |v| brk: { + const r = bun.clamp(@round(v.value), 0.0, 255.0); + const g = switch (parser.parseNumber(input)) { + .err => |e| return .{ .err = e }, + .result => |vv| bun.clamp(@round(vv), 0.0, 255.0), + }; + if (input.expectComma().asErr()) |e| return .{ .err = e }; + const b = switch (parser.parseNumber(input)) { + .err => |e| return .{ .err = e }, + .result => |vv| bun.clamp(@round(vv), 0.0, 255.0), + }; + break :brk .{ r, g, b }; + }, + .percentage => |v| brk: { + const r = bun.clamp(@round(v.unit_value * 255.0), 0.0, 255.0); + const g = switch (parser.parsePercentage(input)) { + .err => |e| return .{ .err = e }, + .result => |vv| bun.clamp(@round(vv * 255.0), 0.0, 255.0), + }; + if (input.expectComma().asErr()) |e| return .{ .err = e }; + const b = switch (parser.parsePercentage(input)) { + .err => |e| return .{ .err = e }, + .result => |vv| bun.clamp(@round(vv * 255.0), 0.0, 255.0), + }; + break :brk .{ r, g, b }; + }, + } else blk: { + const getComponent = struct { + fn get(value: NumberOrPercentage) f32 { + return switch (value) { + .number => |v| if (std.math.isNan(v.value)) v.value else bun.clamp(@round(v.value), 0.0, 255.0) / 255.0, + .percentage => |v| bun.clamp(v.unit_value, 0.0, 1.0), + }; + } + }.get; + + const r = getComponent(red); + const g = getComponent(switch (parser.parseNumberOrPercentage(input)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }); + const b = getComponent(switch (parser.parseNumberOrPercentage(input)) { + .err => |e| return .{ .err = e }, + .result => |v| v, + }); + break :blk .{ r, g, b }; + }; + + if (is_legacy_syntax and (std.math.isNan(g) or std.math.isNan(b))) { + return .{ .err = input.newCustomError(css.ParserError.invalid_value) }; + } + return .{ .result = .{ r, g, b, is_legacy_syntax } }; +} + +pub fn parseHSLHWBComponents(comptime T: type, input: *css.Parser, parser: *ComponentParser, allows_legacy: bool) Result(struct { f32, f32, f32, bool }) { + _ = T; // autofix + const h = switch (parseAngleOrNumber(input, parser)) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + const is_legacy_syntax = allows_legacy and + parser.from == null and + !std.math.isNan(h) and + input.tryParse(css.Parser.expectComma, .{}).isOk(); + const a = switch (parser.parsePercentage(input)) { + .result => |v| bun.clamp(v, 0.0, 1.0), + .err => |e| return .{ .err = e }, + }; + if (is_legacy_syntax) { + if (input.expectColon().asErr()) |e| return .{ .err = e }; + } + const b = switch (parser.parsePercentage(input)) { + .result => |v| bun.clamp(v, 0.0, 1.0), + .err => |e| return .{ .err = e }, + }; + if (is_legacy_syntax and (std.math.isNan(a) or std.math.isNan(b))) { + return .{ .err = input.newCustomError(css.ParserError.invalid_value) }; + } + return .{ .result = .{ h, a, b, is_legacy_syntax } }; +} + +pub fn mapGamut(comptime T: type, color: T) T { + const conversion_function_name = "into" ++ comptime bun.meta.typeName(T); + const JND: f32 = 0.02; + const EPSILON: f32 = 0.00001; + + // https://www.w3.org/TR/css-color-4/#binsearch + var current: OKLCH = color.intoOKLCH(); + + // If lightness is >= 100%, return pure white. + if (@abs(current.l - 1.0) < EPSILON or current.l > 1.0) { + const oklch = OKLCH{ + .l = 1.0, + .c = 0.0, + .h = 0.0, + .alpha = current.alpha, + }; + return @call(.auto, @field(OKLCH, conversion_function_name), .{&oklch}); + } + + // If lightness <= 0%, return pure black. + if (current.l < EPSILON) { + const oklch = OKLCH{ + .l = 0.0, + .c = 0.0, + .h = 0.0, + .alpha = current.alpha, + }; + return @call(.auto, @field(OKLCH, conversion_function_name), .{&oklch}); + } + + var min: f32 = 0.0; + var max = current.c; + + while ((max - min) > EPSILON) { + const chroma = (min + max) / 2.0; + current.c = chroma; + + const converted = @call(.auto, @field(OKLCH, conversion_function_name), .{¤t}); + if (converted.inGamut()) { + min = chroma; + continue; + } + + const clipped = converted.clip(); + const delta_e = deltaEok(T, clipped, current); + if (delta_e < JND) { + return clipped; + } + + max = chroma; + } + + return @call(.auto, @field(OKLCH, conversion_function_name), .{¤t}); +} + +pub fn deltaEok(comptime T: type, _a: T, _b: OKLCH) f32 { + // https://www.w3.org/TR/css-color-4/#color-difference-OK + const a = T.intoOKLAB(&_a); + const b: OKLAB = _b.intoOKLAB(); + + const delta_l = a.l - b.l; + const delta_a = a.a - b.a; + const delta_b = a.b - b.b; + + return @sqrt( + bun.powf(delta_l, 2) + + bun.powf(delta_a, 2) + + bun.powf(delta_b, 2), + ); +} + +pub fn parseLab( + comptime T: type, + input: *css.Parser, + parser: *ComponentParser, + comptime func: *const fn (f32, f32, f32, f32) LABColor, +) Result(CssColor) { + const Closure = struct { + parser: *ComponentParser, + + pub fn parsefn(this: *@This(), i: *css.Parser) Result(CssColor) { + return this.parser.parseRelative(i, T, CssColor, @This().innerfn, .{}); + } + + pub fn innerfn(i: *css.Parser, p: *ComponentParser) Result(CssColor) { + // f32::max() does not propagate NaN, so use clamp for now until f32::maximum() is stable. + const l = bun.clamp( + switch (p.parsePercentage(i)) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }, + 0.0, + std.math.floatMax(f32), + ); + const a = switch (p.parseNumber(i)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + const b = switch (p.parseNumber(i)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + const alpha = switch (parseAlpha(i, p)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + const lab = func(l, a, b, alpha); + const heap_lab = bun.create(i.allocator(), LABColor, lab); + heap_lab.* = lab; + return .{ .result = CssColor{ .lab = heap_lab } }; + } + }; + var closure = Closure{ + .parser = parser, + }; + // https://www.w3.org/TR/css-color-4/#funcdef-lab + return input.parseNestedBlock( + CssColor, + &closure, + Closure.parsefn, + ); +} + +pub fn parseLch( + comptime T: type, + input: *css.Parser, + parser: *ComponentParser, + comptime func: *const fn ( + f32, + f32, + f32, + f32, + ) LABColor, +) Result(CssColor) { + const Closure = struct { + parser: *ComponentParser, + + pub fn parseNestedBlockFn(this: *@This(), i: *css.Parser) Result(CssColor) { + return this.parser.parseRelative(i, T, CssColor, @This().parseRelativeFn, .{this}); + } + + pub fn parseRelativeFn(i: *css.Parser, p: *ComponentParser, this: *@This()) Result(CssColor) { + _ = this; // autofix + if (p.from) |*from| { + // Relative angles should be normalized. + // https://www.w3.org/TR/css-color-5/#relative-LCH + // from.components[2] %= 360.0; + from.components[2] = @mod(from.components[2], 360.0); + if (from.components[2] < 0.0) { + from.components[2] += 360.0; + } + } + + const l = bun.clamp( + switch (p.parsePercentage(i)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }, + 0.0, + std.math.floatMax(f32), + ); + const c = bun.clamp( + switch (p.parseNumber(i)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }, + 0.0, + std.math.floatMax(f32), + ); + const h = switch (parseAngleOrNumber(i, p)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + const alpha = switch (parseAlpha(i, p)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + const lab = func(l, c, h, alpha); + return .{ + .result = .{ + .lab = bun.create(i.allocator(), LABColor, lab), + }, + }; + } + }; + + var closure = Closure{ + .parser = parser, + }; + + return input.parseNestedBlock(CssColor, &closure, Closure.parseNestedBlockFn); +} + +/// Parses the hsl() and hwb() functions. +/// The results of this function are stored as floating point if there are any `none` components. +/// https://drafts.csswg.org/css-color-4/#the-hsl-notation +pub fn parseHslHwb( + comptime T: type, + input: *css.Parser, + parser: *ComponentParser, + allows_legacy: bool, + comptime func: *const fn ( + Allocator, + f32, + f32, + f32, + f32, + ) CssColor, +) Result(CssColor) { + const Closure = struct { + parser: *ComponentParser, + allows_legacy: bool, + + pub fn parseNestedBlockFn(this: *@This(), i: *css.Parser) Result(CssColor) { + return this.parser.parseRelative(i, T, CssColor, @This().parseRelativeFn, .{this}); + } + + pub fn parseRelativeFn(i: *css.Parser, p: *ComponentParser, this: *@This()) Result(CssColor) { + const h, const a, const b, const is_legacy = switch (parseHslHwbComponents(T, i, p, this.allows_legacy)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + const alpha = switch (if (is_legacy) parseLegacyAlpha(i, p) else parseAlpha(i, p)) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + + return .{ .result = func(i.allocator(), h, a, b, alpha) }; + } + }; + + var closure = Closure{ + .parser = parser, + .allows_legacy = allows_legacy, + }; + + return input.parseNestedBlock(CssColor, &closure, Closure.parseNestedBlockFn); +} + +pub fn parseHslHwbComponents( + comptime T: type, + input: *css.Parser, + parser: *ComponentParser, + allows_legacy: bool, +) Result(struct { f32, f32, f32, bool }) { + _ = T; // autofix + const h = switch (parseAngleOrNumber(input, parser)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + const is_legacy_syntax = allows_legacy and + parser.from == null and + !std.math.isNan(h) and + input.tryParse(css.Parser.expectComma, .{}).isOk(); + + const a = bun.clamp( + switch (parser.parsePercentage(input)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }, + 0.0, + 1.0, + ); + + if (is_legacy_syntax) { + if (input.expectComma().asErr()) |e| return .{ .err = e }; + } + + const b = bun.clamp( + switch (parser.parsePercentage(input)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }, + 0.0, + 1.0, + ); + + if (is_legacy_syntax and (std.math.isNan(a) or std.math.isNan(b))) { + return .{ .err = input.newCustomError(css.ParserError.invalid_value) }; + } + + return .{ .result = .{ h, a, b, is_legacy_syntax } }; +} + +pub fn parseAngleOrNumber(input: *css.Parser, parser: *const ComponentParser) Result(f32) { + const result = switch (parser.parseAngleOrNumber(input)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + return .{ + .result = switch (result) { + .number => |v| v.value, + .angle => |v| v.degrees, + }, + }; +} + +fn parseRgb(input: *css.Parser, parser: *ComponentParser) Result(CssColor) { + // https://drafts.csswg.org/css-color-4/#rgb-functions + + const Closure = struct { + p: *ComponentParser, + + pub fn parseNestedBlockFn(this: *@This(), i: *css.Parser) Result(CssColor) { + return this.p.parseRelative(i, SRGB, CssColor, @This().parseRelativeFn, .{this}); + } + + pub fn parseRelativeFn(i: *css.Parser, p: *ComponentParser, this: *@This()) Result(CssColor) { + _ = this; // autofix + const r, const g, const b, const is_legacy = switch (parseRGBComponents(i, p)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + const alpha = switch (if (is_legacy) parseLegacyAlpha(i, p) else parseAlpha(i, p)) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + + if (!std.math.isNan(r) and + !std.math.isNan(g) and + !std.math.isNan(b) and + !std.math.isNan(alpha)) + { + if (is_legacy) return .{ + .result = .{ + .rgba = RGBA.new( + @intFromFloat(r), + @intFromFloat(g), + @intFromFloat(b), + alpha, + ), + }, + }; + + return .{ + .result = .{ + .rgba = RGBA.fromFloats( + r, + g, + b, + alpha, + ), + }, + }; + } else { + return .{ + .result = .{ + .float = bun.create( + i.allocator(), + FloatColor, + .{ + .rgb = .{ + .r = r, + .g = g, + .b = b, + .alpha = alpha, + }, + }, + ), + }, + }; + } + } + }; + var closure = Closure{ + .p = parser, + }; + return input.parseNestedBlock(CssColor, &closure, Closure.parseNestedBlockFn); +} + +// pub fn parseRgbComponents(input: *css.Parser, parser: *ComponentParser) Result(struct { +// f32, +// f32, +// f32, +// bool, +// }) { +// const red = switch (parser.parseNumberOrPercentage(input)) { +// .result => |vv| vv, +// .err => |e| return .{ .err = e }, +// }; +// const is_legacy_syntax = parser.from == null and !std.math.isNan(red.unitValue()) and input.tryParse(css.Parser.expectComma, .{}).isOk(); + +// const r, const g, const b = if (is_legacy_syntax) switch (red) { +// .number => |num| brk: { +// const r = bun.clamp(@round(num.value), 0.0, 255.0); +// const g = bun.clamp( +// @round( +// switch (parser.parseNumber(input)) { +// .result => |vv| vv, +// .err => |e| return .{ .err = e }, +// }, +// ), +// 0.0, +// 255.0, +// ); +// if (input.expectComma().asErr()) |e| return .{ .err = e }; +// const b = bun.clamp( +// @round( +// switch (parser.parseNumber(input)) { +// .result => |vv| vv, +// .err => |e| return .{ .err = e }, +// }, +// ), +// 0.0, +// 255.0, +// ); +// break :brk .{ r, g, b }; +// }, +// .percentage => |per| brk: { +// const unit_value = per.unit_value; +// const r = bun.clamp(@round(unit_value * 255.0), 0.0, 255.0); +// const g = bun.clamp( +// @round( +// switch (parser.parsePercentage(input)) { +// .result => |vv| vv, +// .err => |e| return .{ .err = e }, +// } * 255.0, +// ), +// 0.0, +// 255.0, +// ); +// if (input.expectComma().asErr()) |e| return .{ .err = e }; +// const b = bun.clamp( +// @round( +// switch (parser.parsePercentage(input)) { +// .result => |vv| vv, +// .err => |e| return .{ .err = e }, +// } * 255.0, +// ), +// 0.0, +// 255.0, +// ); +// break :brk .{ r, g, b }; +// }, +// } else brk: { +// const get = struct { +// pub fn component(value: NumberOrPercentage) f32 { +// return switch (value) { +// .number => |num| { +// const v = num.value; +// if (std.math.isNan(v)) return v; +// return bun.clamp(@round(v), 0.0, 255.0) / 255.0; +// }, +// .percentage => |per| bun.clamp(per.unit_value, 0.0, 1.0), +// }; +// } +// }; +// const r = get.component(red); +// const g = get.component( +// switch (parser.parseNumberOrPercentage(input)) { +// .result => |vv| vv, +// .err => |e| return .{ .err = e }, +// }, +// ); +// const b = get.component( +// switch (parser.parseNumberOrPercentage(input)) { +// .result => |vv| vv, +// .err => |e| return .{ .err = e }, +// }, +// ); +// break :brk .{ r, g, b }; +// }; + +// if (is_legacy_syntax and (std.math.isNan(g) or std.math.isNan(b))) { +// return .{ .err = input.newCustomError(css.ParserError.invalid_value) }; +// } + +// return .{ .result = .{ r, g, b, is_legacy_syntax } }; +// } + +fn parseLegacyAlpha(input: *css.Parser, parser: *const ComponentParser) Result(f32) { + if (!input.isExhausted()) { + if (input.expectComma().asErr()) |e| return .{ .err = e }; + return .{ .result = bun.clamp( + switch (parseNumberOrPercentage(input, parser)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }, + 0.0, + 1.0, + ) }; + } + return .{ .result = 1.0 }; +} + +fn parseAlpha(input: *css.Parser, parser: *const ComponentParser) Result(f32) { + const res = if (input.tryParse(css.Parser.expectDelim, .{'/'}).isOk()) + bun.clamp(switch (parseNumberOrPercentage(input, parser)) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }, 0.0, 1.0) + else + 1.0; + + return .{ .result = res }; +} + +pub fn parseNumberOrPercentage(input: *css.Parser, parser: *const ComponentParser) Result(f32) { + const result = switch (parser.parseNumberOrPercentage(input)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + return switch (result) { + .number => |value| .{ .result = value.value }, + .percentage => |value| .{ .result = value.unit_value }, + }; +} + +// Copied from an older version of cssparser. +/// A color with red, green, blue, and alpha components, in a byte each. +pub const RGBA = struct { + /// The red component. + red: u8, + /// The green component. + green: u8, + /// The blue component. + blue: u8, + /// The alpha component. + alpha: u8, + + pub usingnamespace ColorspaceConversions(@This()); + pub usingnamespace color_conversions.convert_RGBA; + + pub fn new(red: u8, green: u8, blue: u8, alpha: f32) RGBA { + return RGBA{ + .red = red, + .green = green, + .blue = blue, + .alpha = clamp_unit_f32(alpha), + }; + } + + /// Constructs a new RGBA value from float components. It expects the red, + /// green, blue and alpha channels in that order, and all values will be + /// clamped to the 0.0 ... 1.0 range. + pub fn fromFloats(red: f32, green: f32, blue: f32, alpha: f32) RGBA { + return RGBA.new( + clamp_unit_f32(red), + clamp_unit_f32(green), + clamp_unit_f32(blue), + alpha, + ); + } + + pub fn transparent() RGBA { + return RGBA.new(0, 0, 0, 0.0); + } + + /// Returns the red channel in a floating point number form, from 0 to 1. + pub fn redF32(this: *const RGBA) f32 { + return @as(f32, @floatFromInt(this.red)) / 255.0; + } + + /// Returns the green channel in a floating point number form, from 0 to 1. + pub fn greenF32(this: *const RGBA) f32 { + return @as(f32, @floatFromInt(this.green)) / 255.0; + } + + /// Returns the blue channel in a floating point number form, from 0 to 1. + pub fn blueF32(this: *const RGBA) f32 { + return @as(f32, @floatFromInt(this.blue)) / 255.0; + } + + /// Returns the alpha channel in a floating point number form, from 0 to 1. + pub fn alphaF32(this: *const RGBA) f32 { + return @as(f32, @floatFromInt(this.alpha)) / 255.0; + } + + pub fn intoSRGB(rgb: *const RGBA) SRGB { + return SRGB{ + .r = rgb.redF32(), + .g = rgb.greenF32(), + .b = rgb.blueF32(), + .alpha = rgb.alphaF32(), + }; + } +}; + +fn clamp_unit_f32(val: f32) u8 { + // Whilst scaling by 256 and flooring would provide + // an equal distribution of integers to percentage inputs, + // this is not what Gecko does so we instead multiply by 255 + // and round (adding 0.5 and flooring is equivalent to rounding) + // + // Chrome does something similar for the alpha value, but not + // the rgb values. + // + // See https://bugzilla.mozilla.org/show_bug.cgi?id=1340484 + // + // Clamping to 256 and rounding after would let 1.0 map to 256, and + // `256.0_f32 as u8` is undefined behavior: + // + // https://github.com/rust-lang/rust/issues/10184 + return clamp_floor_256_f32(val * 255.0); +} + +fn clamp_floor_256_f32(val: f32) u8 { + return @intFromFloat(@min(255.0, @max(0.0, @round(val)))); + // val.round().max(0.).min(255.) as u8 +} + +/// A color in a LAB color space, including the `lab()`, `lch()`, `oklab()`, and `oklch()` functions. +pub const LABColor = union(enum) { + /// A `lab()` color. + lab: LAB, + /// An `lch()` color. + lch: LCH, + /// An `oklab()` color. + oklab: OKLAB, + /// An `oklch()` color. + oklch: OKLCH, + + pub fn newLAB(l: f32, a: f32, b: f32, alpha: f32) LABColor { + return LABColor{ + .lab = LAB.new(l, a, b, alpha), + }; + } + + pub fn newOKLAB(l: f32, a: f32, b: f32, alpha: f32) LABColor { + return LABColor{ + .lab = OKLAB.new(l, a, b, alpha), + }; + } + + pub fn newLCH(l: f32, a: f32, b: f32, alpha: f32) LABColor { + return LABColor{ + .lab = LCH.new(l, a, b, alpha), + }; + } + + pub fn newOKLCH(l: f32, a: f32, b: f32, alpha: f32) LABColor { + return LABColor{ + .lab = LCH.new(l, a, b, alpha), + }; + } + + pub fn hash(this: *const @This(), hasher: *std.hash.Wyhash) void { + return css.implementHash(@This(), this, hasher); + } +}; + +/// A color in a predefined color space, e.g. `display-p3`. +pub const PredefinedColor = union(enum) { + /// A color in the `srgb` color space. + srgb: SRGB, + /// A color in the `srgb-linear` color space. + srgb_linear: SRGBLinear, + /// A color in the `display-p3` color space. + display_p3: P3, + /// A color in the `a98-rgb` color space. + a98: A98, + /// A color in the `prophoto-rgb` color space. + prophoto: ProPhoto, + /// A color in the `rec2020` color space. + rec2020: Rec2020, + /// A color in the `xyz-d50` color space. + xyz_d50: XYZd50, + /// A color in the `xyz-d65` color space. + xyz_d65: XYZd65, + + pub fn hash(this: *const @This(), hasher: *std.hash.Wyhash) void { + return css.implementHash(@This(), this, hasher); + } +}; + +/// A floating point representation of color types that +/// are usually stored as RGBA. These are used when there +/// are any `none` components, which are represented as NaN. +pub const FloatColor = union(enum) { + /// An RGB color. + rgb: SRGB, + /// An HSL color. + hsl: HSL, + /// An HWB color. + hwb: HWB, + + pub fn hash(this: *const @This(), hasher: *std.hash.Wyhash) void { + return css.implementHash(@This(), this, hasher); + } +}; + +/// A CSS [system color](https://drafts.csswg.org/css-color/#css-system-colors) keyword. +/// *NOTE* these are intentionally in flat case +pub const SystemColor = enum { + /// Background of accented user interface controls. + accentcolor, + /// Text of accented user interface controls. + accentcolortext, + /// Text in active links. For light backgrounds, traditionally red. + activetext, + /// The base border color for push buttons. + buttonborder, + /// The face background color for push buttons. + buttonface, + /// Text on push buttons. + buttontext, + /// Background of application content or documents. + canvas, + /// Text in application content or documents. + canvastext, + /// Background of input fields. + field, + /// Text in input fields. + fieldtext, + /// Disabled text. (Often, but not necessarily, gray.) + graytext, + /// Background of selected text, for example from ::selection. + highlight, + /// Text of selected text. + highlighttext, + /// Text in non-active, non-visited links. For light backgrounds, traditionally blue. + linktext, + /// Background of text that has been specially marked (such as by the HTML mark element). + mark, + /// Text that has been specially marked (such as by the HTML mark element). + marktext, + /// Background of selected items, for example a selected checkbox. + selecteditem, + /// Text of selected items. + selecteditemtext, + /// Text in visited links. For light backgrounds, traditionally purple. + visitedtext, + + // Deprecated colors: https://drafts.csswg.org/css-color/#deprecated-system-colors + + /// Active window border. Same as ButtonBorder. + activeborder, + /// Active window caption. Same as Canvas. + activecaption, + /// Background color of multiple document interface. Same as Canvas. + appworkspace, + /// Desktop background. Same as Canvas. + background, + /// The color of the border facing the light source for 3-D elements that appear 3-D due to one layer of surrounding border. Same as ButtonFace. + buttonhighlight, + /// The color of the border away from the light source for 3-D elements that appear 3-D due to one layer of surrounding border. Same as ButtonFace. + buttonshadow, + /// Text in caption, size box, and scrollbar arrow box. Same as CanvasText. + captiontext, + /// Inactive window border. Same as ButtonBorder. + inactiveborder, + /// Inactive window caption. Same as Canvas. + inactivecaption, + /// Color of text in an inactive caption. Same as GrayText. + inactivecaptiontext, + /// Background color for tooltip controls. Same as Canvas. + infobackground, + /// Text color for tooltip controls. Same as CanvasText. + infotext, + /// Menu background. Same as Canvas. + menu, + /// Text in menus. Same as CanvasText. + menutext, + /// Scroll bar gray area. Same as Canvas. + scrollbar, + /// The color of the darker (generally outer) of the two borders away from the light source for 3-D elements that appear 3-D due to two concentric layers of surrounding border. Same as ButtonBorder. + threeddarkshadow, + /// The face background color for 3-D elements that appear 3-D due to two concentric layers of surrounding border. Same as ButtonFace. + threedface, + /// The color of the lighter (generally outer) of the two borders facing the light source for 3-D elements that appear 3-D due to two concentric layers of surrounding border. Same as ButtonBorder. + threedhighlight, + /// The color of the darker (generally inner) of the two borders facing the light source for 3-D elements that appear 3-D due to two concentric layers of surrounding border. Same as ButtonBorder. + threedlightshadow, + /// The color of the lighter (generally inner) of the two borders away from the light source for 3-D elements that appear 3-D due to two concentric layers of surrounding border. Same as ButtonBorder. + threedshadow, + /// Window background. Same as Canvas. + window, + /// Window frame. Same as ButtonBorder. + windowframe, + /// Text in windows. Same as CanvasText. + windowtext, + + pub fn isCompatible(this: SystemColor, browsers: css.targets.Browsers) bool { + return switch (this) { + .accentcolor, .accentcolortext => css.Feature.isCompatible(.accent_system_color, browsers), + else => true, + }; + } + + pub fn asStr(this: *const @This()) []const u8 { + return css.enum_property_util.asStr(@This(), this); + } + + pub fn parse(input: *css.Parser) Result(@This()) { + return css.enum_property_util.parse(@This(), input); + } + + pub fn toCss(this: *const @This(), comptime W: type, dest: *Printer(W)) PrintErr!void { + return css.enum_property_util.toCss(@This(), this, W, dest); + } +}; + +/// A color in the [CIE Lab](https://www.w3.org/TR/css-color-4/#cie-lab) color space. +pub const LAB = struct { + /// The lightness component. + l: f32, + /// The a component. + a: f32, + /// The b component. + b: f32, + /// The alpha component. + alpha: f32, + + pub usingnamespace DefineColorspace(@This()); + pub usingnamespace ColorspaceConversions(@This()); + pub usingnamespace UnboundedColorGamut(@This()); + + pub usingnamespace AdjustPowerlessLAB(@This()); + pub usingnamespace DeriveInterpolate(@This(), "l", "a", "b"); + pub usingnamespace RecangularPremultiply(@This(), "l", "a", "b"); + + pub usingnamespace color_conversions.convert_LAB; + + pub const ChannelTypeMap = .{ + .l = ChannelType{ .percentage = true }, + .a = ChannelType{ .number = true }, + .b = ChannelType{ .number = true }, + }; + + pub fn adjustHue(_: *@This(), _: *@This(), _: HueInterpolationMethod) void {} +}; + +/// A color in the [`sRGB`](https://www.w3.org/TR/css-color-4/#predefined-sRGB) color space. +pub const SRGB = struct { + /// The red component. + r: f32, + /// The green component. + g: f32, + /// The blue component. + b: f32, + /// The alpha component. + alpha: f32, + + pub usingnamespace DefineColorspace(@This()); + pub usingnamespace ColorspaceConversions(@This()); + pub usingnamespace BoundedColorGamut(@This()); + + pub usingnamespace DeriveInterpolate(@This(), "r", "g", "b"); + pub usingnamespace RecangularPremultiply(@This(), "r", "g", "b"); + + pub usingnamespace color_conversions.convert_SRGB; + + pub const ChannelTypeMap = .{ + .r = ChannelType{ .percentage = true }, + .g = ChannelType{ .percentage = true }, + .b = ChannelType{ .percentage = true }, + }; + + pub fn adjustPowerlessComponents(_: *@This()) void {} + pub fn adjustHue(_: *@This(), _: *@This(), _: HueInterpolationMethod) void {} + + pub fn intoRGBA(_rgb: *const SRGB) RGBA { + const rgb = _rgb.resolve(); + return RGBA.fromFloats( + rgb.r, + rgb.g, + rgb.b, + rgb.alpha, + ); + } +}; + +/// A color in the [`hsl`](https://www.w3.org/TR/css-color-4/#the-hsl-notation) color space. +pub const HSL = struct { + /// The hue component. + h: f32, + /// The saturation component. + s: f32, + /// The lightness component. + l: f32, + /// The alpha component. + alpha: f32, + + pub usingnamespace DefineColorspace(@This()); + pub usingnamespace ColorspaceConversions(@This()); + pub usingnamespace HslHwbColorGamut(@This(), "s", "l"); + + pub usingnamespace PolarPremultiply(@This(), "s", "l"); + pub usingnamespace DeriveInterpolate(@This(), "h", "s", "l"); + + pub usingnamespace color_conversions.convert_HSL; + + pub const ChannelTypeMap = .{ + .h = ChannelType{ .angle = true }, + .s = ChannelType{ .percentage = true }, + .l = ChannelType{ .percentage = true }, + }; + + pub fn adjustPowerlessComponents(this: *HSL) void { + // If the saturation of an HSL color is 0%, then the hue component is powerless. + // If the lightness of an HSL color is 0% or 100%, both the saturation and hue components are powerless. + if (@abs(this.s) < std.math.floatEps(f32)) { + this.h = std.math.nan(f32); + } + + if (@abs(this.l) < std.math.floatEps(f32) or @abs(this.l - 1.0) < std.math.floatEps(f32)) { + this.h = std.math.nan(f32); + this.s = std.math.nan(f32); + } + } + + pub fn adjustHue(this: *HSL, other: *HSL, method: HueInterpolationMethod) void { + _ = method.interpolate(&this.h, &other.h); + } +}; + +/// A color in the [`hwb`](https://www.w3.org/TR/css-color-4/#the-hwb-notation) color space. +pub const HWB = struct { + /// The hue component. + h: f32, + /// The whiteness component. + w: f32, + /// The blackness component. + b: f32, + /// The alpha component. + alpha: f32, + + pub usingnamespace DefineColorspace(@This()); + pub usingnamespace ColorspaceConversions(@This()); + pub usingnamespace HslHwbColorGamut(@This(), "w", "b"); + + pub usingnamespace PolarPremultiply(@This(), "w", "b"); + pub usingnamespace DeriveInterpolate(@This(), "h", "w", "b"); + + pub usingnamespace color_conversions.convert_HWB; + + pub const ChannelTypeMap = .{ + .h = ChannelType{ .angle = true }, + .w = ChannelType{ .percentage = true }, + .b = ChannelType{ .percentage = true }, + }; + + pub fn adjustPowerlessComponents(this: *HWB) void { + // If white+black is equal to 100% (after normalization), it defines an achromatic color, + // i.e. some shade of gray, without any hint of the chosen hue. In this case, the hue component is powerless. + if (@abs(this.w + this.b - 1.0) < std.math.floatEps(f32)) { + this.h = std.math.nan(f32); + } + } + + pub fn adjustHue(this: *HWB, other: *HWB, method: HueInterpolationMethod) void { + _ = method.interpolate(&this.h, &other.h); + } +}; + +/// A color in the [`sRGB-linear`](https://www.w3.org/TR/css-color-4/#predefined-sRGB-linear) color space. +pub const SRGBLinear = struct { + /// The red component. + r: f32, + /// The green component. + g: f32, + /// The blue component. + b: f32, + /// The alpha component. + alpha: f32, + + pub usingnamespace DefineColorspace(@This()); + pub usingnamespace ColorspaceConversions(@This()); + pub usingnamespace BoundedColorGamut(@This()); + + pub usingnamespace DeriveInterpolate(@This(), "r", "g", "b"); + pub usingnamespace RecangularPremultiply(@This(), "r", "g", "b"); + + pub usingnamespace color_conversions.convert_SRGBLinear; + + pub const ChannelTypeMap = .{ + .r = ChannelType{ .angle = true }, + .g = ChannelType{ .percentage = true }, + .b = ChannelType{ .percentage = true }, + }; + + pub fn adjustPowerlessComponents(_: *@This()) void {} + pub fn adjustHue(_: *@This(), _: *@This(), _: HueInterpolationMethod) void {} +}; + +/// A color in the [`display-p3`](https://www.w3.org/TR/css-color-4/#predefined-display-p3) color space. +pub const P3 = struct { + /// The red component. + r: f32, + /// The green component. + g: f32, + /// The blue component. + b: f32, + /// The alpha component. + alpha: f32, + + pub usingnamespace DefineColorspace(@This()); + pub usingnamespace ColorspaceConversions(@This()); + pub usingnamespace BoundedColorGamut(@This()); + + pub usingnamespace color_conversions.convert_P3; + + pub const ChannelTypeMap = .{ + .r = ChannelType{ .percentage = true }, + .g = ChannelType{ .percentage = true }, + .b = ChannelType{ .percentage = true }, + }; +}; + +/// A color in the [`a98-rgb`](https://www.w3.org/TR/css-color-4/#predefined-a98-rgb) color space. +pub const A98 = struct { + /// The red component. + r: f32, + /// The green component. + g: f32, + /// The blue component. + b: f32, + /// The alpha component. + alpha: f32, + + pub usingnamespace DefineColorspace(@This()); + pub usingnamespace ColorspaceConversions(@This()); + pub usingnamespace BoundedColorGamut(@This()); + + pub usingnamespace color_conversions.convert_A98; + + pub const ChannelTypeMap = .{ + .r = ChannelType{ .percentage = true }, + .g = ChannelType{ .percentage = true }, + .b = ChannelType{ .percentage = true }, + }; +}; + +/// A color in the [`prophoto-rgb`](https://www.w3.org/TR/css-color-4/#predefined-prophoto-rgb) color space. +pub const ProPhoto = struct { + /// The red component. + r: f32, + /// The green component. + g: f32, + /// The blue component. + b: f32, + /// The alpha component. + alpha: f32, + + pub usingnamespace DefineColorspace(@This()); + pub usingnamespace ColorspaceConversions(@This()); + pub usingnamespace BoundedColorGamut(@This()); + + pub usingnamespace color_conversions.convert_ProPhoto; + + pub const ChannelTypeMap = .{ + .r = ChannelType{ .percentage = true }, + .g = ChannelType{ .percentage = true }, + .b = ChannelType{ .percentage = true }, + }; +}; + +/// A color in the [`rec2020`](https://www.w3.org/TR/css-color-4/#predefined-rec2020) color space. +pub const Rec2020 = struct { + /// The red component. + r: f32, + /// The green component. + g: f32, + /// The blue component. + b: f32, + /// The alpha component. + alpha: f32, + + pub usingnamespace DefineColorspace(@This()); + pub usingnamespace ColorspaceConversions(@This()); + pub usingnamespace BoundedColorGamut(@This()); + + pub usingnamespace color_conversions.convert_Rec2020; + + pub const ChannelTypeMap = .{ + .r = ChannelType{ .percentage = true }, + .g = ChannelType{ .percentage = true }, + .b = ChannelType{ .percentage = true }, + }; +}; + +/// A color in the [`xyz-d50`](https://www.w3.org/TR/css-color-4/#predefined-xyz) color space. +pub const XYZd50 = struct { + /// The x component. + x: f32, + /// The y component. + y: f32, + /// The z component. + z: f32, + /// The alpha component. + alpha: f32, + + pub usingnamespace DefineColorspace(@This()); + pub usingnamespace ColorspaceConversions(@This()); + pub usingnamespace UnboundedColorGamut(@This()); + + pub usingnamespace DeriveInterpolate(@This(), "x", "y", "z"); + pub usingnamespace RecangularPremultiply(@This(), "x", "y", "z"); + + pub usingnamespace color_conversions.convert_XYZd50; + + pub const ChannelTypeMap = .{ + .x = ChannelType{ .percentage = true }, + .y = ChannelType{ .percentage = true }, + .z = ChannelType{ .percentage = true }, + }; +}; + +/// A color in the [`xyz-d65`](https://www.w3.org/TR/css-color-4/#predefined-xyz) color space. +pub const XYZd65 = struct { + /// The x component. + x: f32, + /// The y component. + y: f32, + /// The z component. + z: f32, + /// The alpha component. + alpha: f32, + + pub usingnamespace DefineColorspace(@This()); + pub usingnamespace ColorspaceConversions(@This()); + pub usingnamespace UnboundedColorGamut(@This()); + + pub usingnamespace DeriveInterpolate(@This(), "x", "y", "z"); + pub usingnamespace RecangularPremultiply(@This(), "x", "y", "z"); + + pub usingnamespace color_conversions.convert_XYZd65; + + pub const ChannelTypeMap = .{ + .x = ChannelType{ .percentage = true }, + .y = ChannelType{ .percentage = true }, + .z = ChannelType{ .percentage = true }, + }; + + pub fn adjustPowerlessComponents(_: *@This()) void {} + pub fn adjustHue(_: *@This(), _: *@This(), _: HueInterpolationMethod) void {} +}; + +/// A color in the [CIE LCH](https://www.w3.org/TR/css-color-4/#cie-lab) color space. +pub const LCH = struct { + /// The lightness component. + l: f32, + /// The chroma component. + c: f32, + /// The hue component. + h: f32, + /// The alpha component. + alpha: f32, + + pub usingnamespace DefineColorspace(@This()); + pub usingnamespace ColorspaceConversions(@This()); + pub usingnamespace UnboundedColorGamut(@This()); + + pub usingnamespace AdjustPowerlessLCH(@This()); + pub usingnamespace DeriveInterpolate(@This(), "l", "c", "h"); + pub usingnamespace RecangularPremultiply(@This(), "l", "c", "h"); + + pub usingnamespace color_conversions.convert_LCH; + + pub const ChannelTypeMap = .{ + .l = ChannelType{ .percentage = true }, + .c = ChannelType{ .number = true }, + .h = ChannelType{ .angle = true }, + }; +}; + +/// A color in the [OKLab](https://www.w3.org/TR/css-color-4/#ok-lab) color space. +pub const OKLAB = struct { + /// The lightness component. + l: f32, + /// The a component. + a: f32, + /// The b component. + b: f32, + /// The alpha component. + alpha: f32, + + pub usingnamespace DefineColorspace(@This()); + pub usingnamespace ColorspaceConversions(@This()); + pub usingnamespace UnboundedColorGamut(@This()); + + pub usingnamespace AdjustPowerlessLAB(@This()); + pub usingnamespace DeriveInterpolate(@This(), "l", "a", "b"); + pub usingnamespace RecangularPremultiply(@This(), "l", "a", "b"); + + pub usingnamespace color_conversions.convert_OKLAB; + + pub const ChannelTypeMap = .{ + .l = ChannelType{ .percentage = true }, + .a = ChannelType{ .number = true }, + .b = ChannelType{ .number = true }, + }; + + pub fn adjustHue(_: *@This(), _: *@This(), _: HueInterpolationMethod) void {} +}; + +/// A color in the [OKLCH](https://www.w3.org/TR/css-color-4/#ok-lab) color space. +pub const OKLCH = struct { + /// The lightness component. + l: f32, + /// The chroma component. + c: f32, + /// The hue component. + h: f32, + /// The alpha component. + alpha: f32, + + pub usingnamespace DefineColorspace(@This()); + pub usingnamespace ColorspaceConversions(@This()); + pub usingnamespace UnboundedColorGamut(@This()); + + pub usingnamespace AdjustPowerlessLCH(@This()); + pub usingnamespace DeriveInterpolate(@This(), "l", "c", "h"); + pub usingnamespace RecangularPremultiply(@This(), "l", "c", "h"); + + pub usingnamespace color_conversions.convert_OKLCH; + + pub const ChannelTypeMap = .{ + .l = ChannelType{ .percentage = true }, + .c = ChannelType{ .number = true }, + .h = ChannelType{ .angle = true }, + }; +}; + +pub const ComponentParser = struct { + allow_none: bool, + from: ?RelativeComponentParser, + + pub fn new(allow_none: bool) ComponentParser { + return ComponentParser{ + .allow_none = allow_none, + .from = null, + }; + } + + /// `func` must be a function like: + /// fn (*css.Parser, *ComponentParser, ...args) + pub fn parseRelative( + this: *ComponentParser, + input: *css.Parser, + comptime T: type, + comptime C: type, + comptime func: anytype, + args_: anytype, + ) Result(C) { + if (input.tryParse(css.Parser.expectIdentMatching, .{"from"}).isOk()) { + const from = switch (CssColor.parse(input)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + return this.parseFrom(from, input, T, C, func, args_); + } + + const args = bun.meta.ConcatArgs2(func, input, this, args_); + return @call(.auto, func, args); + } + + pub fn parseFrom( + this: *ComponentParser, + from: CssColor, + input: *css.Parser, + comptime T: type, + comptime C: type, + comptime func: anytype, + args_: anytype, + ) Result(C) { + if (from == .light_dark) { + const state = input.state(); + const light = switch (this.parseFrom(from.light_dark.light.*, input, T, C, func, args_)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + input.reset(&state); + const dark = switch (this.parseFrom(from.light_dark.dark.*, input, T, C, func, args_)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + return .{ .result = C.lightDarkOwned(input.allocator(), light, dark) }; + } + + const new_from = if (T.tryFromCssColor(&from)) |v| v.resolve() else return .{ .err = input.newCustomError(css.ParserError.invalid_value) }; + + this.from = RelativeComponentParser.new(&new_from); + + const args = bun.meta.ConcatArgs2(func, input, this, args_); + return @call(.auto, func, args); + } + + pub fn parseNumberOrPercentage(this: *const ComponentParser, input: *css.Parser) Result(NumberOrPercentage) { + if (this.from) |*from| { + if (input.tryParse(RelativeComponentParser.parseNumberOrPercentage, .{from}).asValue()) |res| { + return .{ .result = res }; + } + } + + if (input.tryParse(CSSNumberFns.parse, .{}).asValue()) |value| { + return .{ .result = NumberOrPercentage{ .number = .{ .value = value } } }; + } else if (input.tryParse(Percentage.parse, .{}).asValue()) |value| { + return .{ + .result = NumberOrPercentage{ + .percentage = .{ .unit_value = value.v }, + }, + }; + } else if (this.allow_none) { + if (input.expectIdentMatching("none").asErr()) |e| return .{ .err = e }; + return .{ .result = NumberOrPercentage{ + .number = .{ + .value = std.math.nan(f32), + }, + } }; + } else { + return .{ .err = input.newCustomError(css.ParserError.invalid_value) }; + } + } + + pub fn parseAngleOrNumber(this: *const ComponentParser, input: *css.Parser) Result(css.color.AngleOrNumber) { + if (this.from) |*from| { + if (input.tryParse(RelativeComponentParser.parseAngleOrNumber, .{from}).asValue()) |res| { + return .{ .result = res }; + } + } + + if (input.tryParse(Angle.parse, .{}).asValue()) |angle| { + return .{ + .result = .{ + .angle = .{ + .degrees = angle.toDegrees(), + }, + }, + }; + } else if (input.tryParse(CSSNumberFns.parse, .{}).asValue()) |value| { + return .{ + .result = .{ + .number = .{ + .value = value, + }, + }, + }; + } else if (this.allow_none) { + if (input.expectIdentMatching("none").asErr()) |e| return .{ .err = e }; + return .{ + .result = .{ + .number = .{ + .value = std.math.nan(f32), + }, + }, + }; + } else { + return .{ .err = input.newCustomError(css.ParserError.invalid_value) }; + } + } + + pub fn parsePercentage(this: *const ComponentParser, input: *css.Parser) Result(f32) { + if (this.from) |*from| { + if (input.tryParse(RelativeComponentParser.parsePercentage, .{from}).asValue()) |res| { + return .{ .result = res }; + } + } + + if (input.tryParse(Percentage.parse, .{}).asValue()) |val| { + return .{ .result = val.v }; + } else if (this.allow_none) { + if (input.expectIdentMatching("none").asErr()) |e| return .{ .err = e }; + return .{ .result = std.math.nan(f32) }; + } else { + return .{ .err = input.newCustomError(css.ParserError.invalid_value) }; + } + } + + pub fn parseNumber(this: *const ComponentParser, input: *css.Parser) Result(f32) { + if (this.from) |*from| { + if (input.tryParse(RelativeComponentParser.parseNumber, .{from}).asValue()) |res| { + return .{ .result = res }; + } + } + + if (input.tryParse(CSSNumberFns.parse, .{}).asValue()) |val| { + return .{ .result = val }; + } else if (this.allow_none) { + if (input.expectIdentMatching("none").asErr()) |e| return .{ .err = e }; + return .{ .result = std.math.nan(f32) }; + } else { + return .{ .err = input.newCustomError(css.ParserError.invalid_value) }; + } + } +}; + +/// Either a number or a percentage. +pub const NumberOrPercentage = union(enum) { + /// ``. + number: struct { + /// The numeric value parsed, as a float. + value: f32, + }, + /// `` + percentage: struct { + /// The value as a float, divided by 100 so that the nominal range is + /// 0.0 to 1.0. + unit_value: f32, + }, + + /// Return the value as a percentage. + pub fn unitValue(this: *const NumberOrPercentage) f32 { + return switch (this.*) { + .number => |v| v.value, + .percentage => |v| v.unit_value, + }; + } + + /// Return the value as a number with a percentage adjusted to the + /// `percentage_basis`. + pub fn value(this: *const NumberOrPercentage, percentage_basis: f32) f32 { + return switch (this.*) { + .number => |v| v.value, + .percentage => |v| v.unit_value * percentage_basis, + }; + } +}; + +const RelativeComponentParser = struct { + names: struct { []const u8, []const u8, []const u8 }, + components: struct { f32, f32, f32, f32 }, + types: struct { ChannelType, ChannelType, ChannelType }, + + pub fn new(color: anytype) RelativeComponentParser { + return RelativeComponentParser{ + .names = color.channels(), + .components = color.components(), + .types = color.types(), + }; + } + + pub fn parseAngleOrNumber(input: *css.Parser, this: *const RelativeComponentParser) Result(css.color.AngleOrNumber) { + if (input.tryParse( + RelativeComponentParser.parseIdent, + .{ + this, + ChannelType{ .angle = true, .number = true }, + }, + ).asValue()) |value| { + return .{ .result = .{ + .number = .{ + .value = value, + }, + } }; + } + + if (input.tryParse( + RelativeComponentParser.parseCalc, + .{ + this, + ChannelType{ .angle = true, .number = true }, + }, + ).asValue()) |value| { + return .{ .result = .{ + .number = .{ + .value = value, + }, + } }; + } + + const Closure = struct { + angle: Angle, + parser: *const RelativeComponentParser, + pub fn tryParseFn(i: *css.Parser, t: *@This()) Result(Angle) { + if (Calc(Angle).parseWith(i, t, @This().calcParseIdentFn).asValue()) |val| { + if (val == .value) { + return .{ .result = val.value.* }; + } + } + return .{ .err = i.newCustomError(css.ParserError.invalid_value) }; + } + + pub fn calcParseIdentFn(t: *@This(), ident: []const u8) ?Calc(Angle) { + const value = t.parser.getIdent(ident, ChannelType{ .angle = true, .number = true }) orelse return null; + t.angle = .{ .deg = value }; + return Calc(Angle){ + .value = &t.angle, + }; + } + }; + var closure = Closure{ + .angle = undefined, + .parser = this, + }; + if (input.tryParse(Closure.tryParseFn, .{&closure}).asValue()) |value| { + return .{ .result = .{ + .angle = .{ + .degrees = value.toDegrees(), + }, + } }; + } + + return .{ .err = input.newErrorForNextToken() }; + } + + pub fn parseNumberOrPercentage(input: *css.Parser, this: *const RelativeComponentParser) Result(NumberOrPercentage) { + if (input.tryParse(RelativeComponentParser.parseIdent, .{ this, ChannelType{ .percentage = true, .number = true } }).asValue()) |value| { + return .{ .result = NumberOrPercentage{ .percentage = .{ .unit_value = value } } }; + } + + if (input.tryParse(RelativeComponentParser.parseCalc, .{ this, ChannelType{ .percentage = true, .number = true } }).asValue()) |value| { + return .{ .result = NumberOrPercentage{ + .percentage = .{ + .unit_value = value, + }, + } }; + } + + { + const Closure = struct { + parser: *const RelativeComponentParser, + percentage: Percentage = .{ .v = 0 }, + + pub fn parsefn(i: *css.Parser, self: *@This()) Result(Percentage) { + if (Calc(Percentage).parseWith(i, self, @This().calcparseident).asValue()) |calc_value| { + if (calc_value == .value) return .{ .result = calc_value.value.* }; + } + return .{ .err = i.newCustomError(css.ParserError.invalid_value) }; + } + + pub fn calcparseident(self: *@This(), ident: []const u8) ?Calc(Percentage) { + const v = self.parser.getIdent(ident, ChannelType{ .percentage = true, .number = true }) orelse return null; + self.percentage = .{ .v = v }; + // value variant is a *Percentage + // but we immediately dereference it and discard the pointer + // so using a field on this closure struct instead of making a gratuitous allocation + return .{ + .value = &self.percentage, + }; + } + }; + var closure = Closure{ + .parser = this, + }; + if (input.tryParse(Closure.parsefn, .{ + &closure, + }).asValue()) |value| { + return .{ .result = NumberOrPercentage{ + .percentage = .{ + .unit_value = value.v, + }, + } }; + } + } + + return .{ .err = input.newErrorForNextToken() }; + } + + pub fn parsePercentage( + input: *css.Parser, + this: *const RelativeComponentParser, + ) Result(f32) { + if (input.tryParse(RelativeComponentParser.parseIdent, .{ this, ChannelType{ .percentage = true } }).asValue()) |value| { + return .{ .result = value }; + } + + const Closure = struct { self: *const RelativeComponentParser, temp: Percentage = .{ .v = 0 } }; + var _closure = Closure{ .self = this }; + if (input.tryParse(struct { + pub fn parseFn(i: *css.Parser, closure: *Closure) Result(Percentage) { + const calc_value = switch (Calc(Percentage).parseWith(i, closure, parseIdentFn)) { + .result => |v| v, + .err => return .{ .err = i.newCustomError(css.ParserError.invalid_value) }, + }; + if (calc_value == .value) return .{ .result = calc_value.value.* }; + return .{ .err = i.newCustomError(css.ParserError.invalid_value) }; + } + + pub fn parseIdentFn(closure: *Closure, ident: []const u8) ?Calc(Percentage) { + const v = closure.self.getIdent(ident, ChannelType{ .percentage = true }) orelse return null; + closure.temp = .{ .v = v }; + return Calc(Percentage){ .value = &closure.temp }; + } + }.parseFn, .{&_closure}).asValue()) |value| { + return .{ .result = value.v }; + } + + return .{ .err = input.newErrorForNextToken() }; + } + + pub fn parseNumber( + input: *css.Parser, + this: *const RelativeComponentParser, + ) Result(f32) { + if (input.tryParse( + RelativeComponentParser.parseIdent, + .{ this, ChannelType{ .number = true } }, + ).asValue()) |value| { + return .{ .result = value }; + } + + if (input.tryParse( + RelativeComponentParser.parseCalc, + .{ this, ChannelType{ .number = true } }, + ).asValue()) |value| { + return .{ .result = value }; + } + + return .{ .err = input.newErrorForNextToken() }; + } + + pub fn parseIdent( + input: *css.Parser, + this: *const RelativeComponentParser, + allowed_types: ChannelType, + ) Result(f32) { + const v = this.getIdent( + switch (input.expectIdent()) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }, + allowed_types, + ) orelse return .{ .err = input.newErrorForNextToken() }; + return .{ .result = v }; + } + + pub fn parseCalc( + input: *css.Parser, + this: *const RelativeComponentParser, + allowed_types: ChannelType, + ) Result(f32) { + const Closure = struct { + p: *const RelativeComponentParser, + allowed_types: ChannelType, + + pub fn parseIdentFn(self: *@This(), ident: []const u8) ?Calc(f32) { + const v = self.p.getIdent(ident, self.allowed_types) orelse return null; + return .{ .number = v }; + } + }; + var closure = Closure{ + .p = this, + .allowed_types = allowed_types, + }; + if (Calc(f32).parseWith(input, &closure, Closure.parseIdentFn).asValue()) |calc_val| { + // PERF: I don't like this redundant allocation + if (calc_val == .value) return .{ .result = calc_val.value.* }; + if (calc_val == .number) return .{ .result = calc_val.number }; + } + return .{ .err = input.newCustomError(css.ParserError.invalid_value) }; + } + + pub fn getIdent( + this: *const RelativeComponentParser, + ident: []const u8, + allowed_types: ChannelType, + ) ?f32 { + if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(ident, this.names[0]) and allowed_types.intersects(this.types[0])) { + return this.components[0]; + } + + if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(ident, this.names[1]) and allowed_types.intersects(this.types[1])) { + return this.components[1]; + } + + if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(ident, this.names[2]) and allowed_types.intersects(this.types[2])) { + return this.components[2]; + } + + if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(ident, "alpha") and allowed_types.intersects(ChannelType{ .percentage = true })) { + return this.components[3]; + } + + return null; + } +}; + +/// A channel type for a color space. +/// TODO(zack): why tf is this bitflags? +pub const ChannelType = packed struct(u8) { + /// Channel represents a percentage. + percentage: bool = false, + /// Channel represents an angle. + angle: bool = false, + /// Channel represents a number. + number: bool = false, + __unused: u5 = 0, + + pub usingnamespace css.Bitflags(@This()); +}; + +pub fn parsePredefined(input: *css.Parser, parser: *ComponentParser) Result(CssColor) { + const Closure = struct { p: *ComponentParser }; + var closure = Closure{ + .p = parser, + }; + const res = switch (input.parseNestedBlock(CssColor, &closure, struct { + // https://www.w3.org/TR/css-color-4/#color-function + pub fn parseFn(this: *Closure, i: *css.Parser) Result(CssColor) { + const from: ?CssColor = if (i.tryParse(css.Parser.expectIdentMatching, .{"from"}).isOk()) + switch (CssColor.parse(i)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + } + else + null; + + const colorspace = switch (i.expectIdent()) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + + if (from) |f| { + if (f == .light_dark) { + const state = i.state(); + const light = switch (parsePredefinedRelative(i, this.p, colorspace, f.light_dark.light)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + i.reset(&state); + const dark = switch (parsePredefinedRelative(i, this.p, colorspace, f.light_dark.dark)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + return .{ .result = CssColor{ + .light_dark = .{ + .light = bun.create( + i.allocator(), + CssColor, + light, + ), + .dark = bun.create( + i.allocator(), + CssColor, + dark, + ), + }, + } }; + } + } + + return parsePredefinedRelative(i, this.p, colorspace, if (from) |*f| f else null); + } + }.parseFn)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + + return .{ .result = res }; +} + +pub fn parsePredefinedRelative( + input: *css.Parser, + parser: *ComponentParser, + colorspace: []const u8, + _from: ?*const CssColor, +) Result(CssColor) { + const location = input.currentSourceLocation(); + if (_from) |from| { + parser.from = set_from: { + // todo_stuff.match_ignore_ascii_case + if (bun.strings.eqlCaseInsensitiveASCIIICheckLength("srgb", colorspace)) { + break :set_from RelativeComponentParser.new( + if (SRGB.tryFromCssColor(from)) |v| v.resolveMissing() else return .{ .err = input.newCustomError(css.ParserError.invalid_value) }, + ); + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength("srgb-linear", colorspace)) { + break :set_from RelativeComponentParser.new( + if (SRGBLinear.tryFromCssColor(from)) |v| v.resolveMissing() else return .{ .err = input.newCustomError(css.ParserError.invalid_value) }, + ); + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength("display-p3", colorspace)) { + break :set_from RelativeComponentParser.new( + if (P3.tryFromCssColor(from)) |v| v.resolveMissing() else return .{ .err = input.newCustomError(css.ParserError.invalid_value) }, + ); + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength("a98-rgb", colorspace)) { + break :set_from RelativeComponentParser.new( + if (A98.tryFromCssColor(from)) |v| v.resolveMissing() else return .{ .err = input.newCustomError(css.ParserError.invalid_value) }, + ); + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength("prophoto-rgb", colorspace)) { + break :set_from RelativeComponentParser.new( + if (ProPhoto.tryFromCssColor(from)) |v| v.resolveMissing() else return .{ .err = input.newCustomError(css.ParserError.invalid_value) }, + ); + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength("rec2020", colorspace)) { + break :set_from RelativeComponentParser.new( + if (Rec2020.tryFromCssColor(from)) |v| v.resolveMissing() else return .{ .err = input.newCustomError(css.ParserError.invalid_value) }, + ); + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength("xyz-d50", colorspace)) { + break :set_from RelativeComponentParser.new( + if (XYZd50.tryFromCssColor(from)) |v| v.resolveMissing() else return .{ .err = input.newCustomError(css.ParserError.invalid_value) }, + ); + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength("xyz", colorspace) or + bun.strings.eqlCaseInsensitiveASCIIICheckLength("xyz-d65", colorspace)) + { + break :set_from RelativeComponentParser.new( + if (XYZd65.tryFromCssColor(from)) |v| v.resolveMissing() else return .{ .err = input.newCustomError(css.ParserError.invalid_value) }, + ); + } else { + return .{ .err = location.newUnexpectedTokenError(.{ .ident = colorspace }) }; + } + }; + } + + // Out of gamut values should not be clamped, i.e. values < 0 or > 1 should be preserved. + // The browser will gamut-map the color for the target device that it is rendered on. + const a = switch (input.tryParse(parseNumberOrPercentage, .{parser})) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + const b = switch (input.tryParse(parseNumberOrPercentage, .{parser})) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + const c = switch (input.tryParse(parseNumberOrPercentage, .{parser})) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + const alpha = switch (parseAlpha(input, parser)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + + const predefined: PredefinedColor = predefined: { + const Variants = enum { + srgb, + @"srgb-linear", + @"display-p3", + @"a99-rgb", + @"prophoto-rgb", + rec2020, + @"xyz-d50", + @"xyz-d65", + xyz, + }; + const Map = bun.ComptimeEnumMap(Variants); + if (Map.getAnyCase(colorspace)) |ret| { + switch (ret) { + .srgb => break :predefined .{ .srgb = SRGB{ + .r = a, + .g = b, + .b = c, + .alpha = alpha, + } }, + .@"srgb-linear" => break :predefined .{ .srgb_linear = SRGBLinear{ + .r = a, + .g = b, + .b = c, + .alpha = alpha, + } }, + .@"display-p3" => break :predefined .{ .display_p3 = P3{ + .r = a, + .g = b, + .b = c, + .alpha = alpha, + } }, + .@"a99-rgb" => break :predefined .{ .a98 = A98{ + .r = a, + .g = b, + .b = c, + .alpha = alpha, + } }, + .@"prophoto-rgb" => break :predefined .{ .prophoto = ProPhoto{ + .r = a, + .g = b, + .b = c, + .alpha = alpha, + } }, + .rec2020 => break :predefined .{ .rec2020 = Rec2020{ + .r = a, + .g = b, + .b = c, + .alpha = alpha, + } }, + .@"xyz-d50" => break :predefined .{ .xyz_d50 = XYZd50{ + .x = a, + .y = b, + .z = c, + .alpha = alpha, + } }, + .@"xyz-d65", .xyz => break :predefined .{ .xyz_d65 = XYZd65{ + .x = a, + .y = b, + .z = c, + .alpha = alpha, + } }, + } + } else return .{ .err = location.newUnexpectedTokenError(.{ .ident = colorspace }) }; + }; + + return .{ .result = .{ + .predefined = bun.create( + input.allocator(), + PredefinedColor, + predefined, + ), + } }; +} + +/// A color type that is used as a fallback when compiling colors for older browsers. +pub const ColorFallbackKind = packed struct(u8) { + rgb: bool = false, + p3: bool = false, + lab: bool = false, + oklab: bool = false, + __unused: u4 = 0, + + pub const P3 = ColorFallbackKind{ .p3 = true }; + pub const RGB = ColorFallbackKind{ .rgb = true }; + pub const LAB = ColorFallbackKind{ .lab = true }; + pub const OKLAB = ColorFallbackKind{ .oklab = true }; + + pub usingnamespace css.Bitflags(@This()); + + pub fn lowest(this: @This()) ColorFallbackKind { + return this.bitwiseAnd(ColorFallbackKind.fromBitsTruncate(bun.wrappingNegation(this.asBits()))); + } + + pub fn highest(this: @This()) ColorFallbackKind { + // This finds the highest set bit. + if (this.isEmpty()) return ColorFallbackKind.empty(); + + const zeroes: u3 = @intCast(@as(u4, 7) - this.leadingZeroes()); + return ColorFallbackKind.fromBitsTruncate(@as(u8, 1) << zeroes); + } + + pub fn andBelow(this: @This()) ColorFallbackKind { + if (this.isEmpty()) return ColorFallbackKind.empty(); + + return this.bitwiseOr(ColorFallbackKind.fromBitsTruncate(this.asBits() - 1)); + } + + pub fn supportsCondition(this: @This()) css.SupportsCondition { + const s = switch (this.asBits()) { + ColorFallbackKind.P3.asBits() => "color(display-p3 0 0 0)", + ColorFallbackKind.LAB.asBits() => "lab(0% 0 0)", + else => bun.unreachablePanic("Expected P3 or LAB. This is a bug in Bun.", .{}), + }; + + return css.SupportsCondition{ + .declaration = .{ + .property_id = .color, + .value = s, + }, + }; + } +}; + +/// A [color space](https://www.w3.org/TR/css-color-4/#interpolation-space) keyword +/// used in interpolation functions such as `color-mix()`. +pub const ColorSpaceName = enum { + srgb, + @"srgb-linear", + lab, + oklab, + xyz, + @"xyz-d50", + @"xyz-d65", + hsl, + hwb, + lch, + oklch, + + pub fn asStr(this: *const @This()) []const u8 { + return css.enum_property_util.asStr(@This(), this); + } + + pub fn parse(input: *css.Parser) Result(@This()) { + return css.enum_property_util.parse(@This(), input); + } + + pub fn toCss(this: *const @This(), comptime W: type, dest: *Printer(W)) PrintErr!void { + return css.enum_property_util.toCss(@This(), this, W, dest); + } +}; + +pub fn parseColorMix(input: *css.Parser) Result(CssColor) { + if (input.expectIdentMatching("in").asErr()) |e| return .{ .err = e }; + const method = switch (ColorSpaceName.parse(input)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + + const hue_method_: Result(HueInterpolationMethod) = if (switch (method) { + .hsl, .hwb, .lch, .oklch => true, + else => false, + }) brk: { + const hue_method = input.tryParse(HueInterpolationMethod.parse, .{}); + if (hue_method.isOk()) { + if (input.expectIdentMatching("hue").asErr()) |e| return .{ .err = e }; + } + break :brk hue_method; + } else .{ .result = HueInterpolationMethod.shorter }; + + const hue_method = hue_method_.unwrapOr(HueInterpolationMethod.shorter); + + const first_percent_ = input.tryParse(css.Parser.expectPercentage, .{}); + const first_color = switch (CssColor.parse(input)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + const first_percent = switch (first_percent_) { + .result => |v| v, + .err => switch (input.tryParse(css.Parser.expectPercentage, .{})) { + .result => |vv| vv, + .err => null, + }, + }; + if (input.expectComma().asErr()) |e| return .{ .err = e }; + + const second_percent_ = input.tryParse(css.Parser.expectPercentage, .{}); + const second_color = switch (CssColor.parse(input)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + const second_percent = switch (second_percent_) { + .result => |vv| vv, + .err => switch (input.tryParse(css.Parser.expectPercentage, .{})) { + .result => |vv| vv, + .err => null, + }, + }; + + // https://drafts.csswg.org/css-color-5/#color-mix-percent-norm + const p1, const p2 = if (first_percent == null and second_percent == null) .{ 0.5, 0.5 } else brk: { + const p2 = second_percent orelse (1.0 - first_percent.?); + const p1 = first_percent orelse (1.0 - second_percent.?); + break :brk .{ p1, p2 }; + }; + + if ((p1 + p2) == 0.0) return .{ .err = input.newCustomError(css.ParserError.invalid_value) }; + + const result = switch (method) { + .srgb => first_color.interpolate(input.allocator(), SRGB, p1, &second_color, p2, hue_method), + .@"srgb-linear" => first_color.interpolate(input.allocator(), SRGBLinear, p1, &second_color, p2, hue_method), + .hsl => first_color.interpolate(input.allocator(), HSL, p1, &second_color, p2, hue_method), + .hwb => first_color.interpolate(input.allocator(), HWB, p1, &second_color, p2, hue_method), + .lab => first_color.interpolate(input.allocator(), LAB, p1, &second_color, p2, hue_method), + .lch => first_color.interpolate(input.allocator(), LCH, p1, &second_color, p2, hue_method), + .oklab => first_color.interpolate(input.allocator(), OKLAB, p1, &second_color, p2, hue_method), + .oklch => first_color.interpolate(input.allocator(), OKLCH, p1, &second_color, p2, hue_method), + .xyz, .@"xyz-d65" => first_color.interpolate(input.allocator(), XYZd65, p1, &second_color, p2, hue_method), + .@"xyz-d50" => first_color.interpolate(input.allocator(), XYZd65, p1, &second_color, p2, hue_method), + } orelse return .{ .err = input.newCustomError(css.ParserError.invalid_value) }; + + return .{ .result = result }; +} + +/// A hue [interpolation method](https://www.w3.org/TR/css-color-4/#typedef-hue-interpolation-method) +/// used in interpolation functions such as `color-mix()`. +pub const HueInterpolationMethod = enum { + /// Angles are adjusted so that θ₂ - θ₁ ∈ [-180, 180]. + shorter, + /// Angles are adjusted so that θ₂ - θ₁ ∈ {0, [180, 360)}. + longer, + /// Angles are adjusted so that θ₂ - θ₁ ∈ [0, 360). + increasing, + /// Angles are adjusted so that θ₂ - θ₁ ∈ (-360, 0]. + decreasing, + /// No fixup is performed. Angles are interpolated in the same way as every other component. + specified, + + pub fn asStr(this: *const @This()) []const u8 { + return css.enum_property_util.asStr(@This(), this); + } + + pub fn parse(input: *css.Parser) Result(@This()) { + return css.enum_property_util.parse(@This(), input); + } + + pub fn toCss(this: *const @This(), comptime W: type, dest: *Printer(W)) PrintErr!void { + return css.enum_property_util.toCss(@This(), this, W, dest); + } + + pub fn interpolate( + this: *const HueInterpolationMethod, + a: *f32, + b: *f32, + ) void { + // https://drafts.csswg.org/css-color/#hue-interpolation + if (this.* == .specified) { + // a.* = ((a.* % 360.0) + 360.0) % 360.0; + // b.* = ((b.* % 360.0) + 360.0) % 360.0; + a.* = @mod((@mod(a.*, 360.0) + 360.0), 360.0); + b.* = @mod((@mod(b.*, 360.0) + 360.0), 360.0); + } + + switch (this.*) { + .shorter => { + // https://www.w3.org/TR/css-color-4/#hue-shorter + const delta = b.* - a.*; + if (delta > 180.0) { + a.* += 360.0; + } else if (delta < -180.0) { + b.* += 360.0; + } + }, + .longer => { + // https://www.w3.org/TR/css-color-4/#hue-longer + const delta = b.* - a.*; + if (0.0 < delta and delta < 180.0) { + a.* += 360.0; + } else if (-180.0 < delta and delta < 0.0) { + b.* += 360.0; + } + }, + .increasing => { + // https://www.w3.org/TR/css-color-4/#hue-decreasing + if (b.* < a.*) { + b.* += 360.0; + } + }, + .decreasing => { + // https://www.w3.org/TR/css-color-4/#hue-decreasing + if (a.* < b.*) { + a.* += 360.0; + } + }, + .specified => {}, + } + } +}; + +fn rectangularToPolar(l: f32, a: f32, b: f32) struct { f32, f32, f32 } { + // https://github.com/w3c/csswg-drafts/blob/fba005e2ce9bcac55b49e4aa19b87208b3a0631e/css-color-4/conversions.js#L375 + + var h = std.math.atan2(b, a) * 180.0 / std.math.pi; + if (h < 0.0) { + h += 360.0; + } + + // const c = @sqrt(std.math.powi(f32, a, 2) + std.math.powi(f32, b, 2)); + // PERF: Zig does not have Rust's f32::powi + const c = @sqrt(bun.powf(a, 2) + bun.powf(b, 2)); + + // h = h % 360.0; + h = @mod(h, 360.0); + return .{ l, c, h }; +} + +pub fn ColorspaceConversions(comptime T: type) type { + // e.g. T = LAB, so then: into_this_function_name = "intoLAB" + const into_this_function_name = "into" ++ comptime bun.meta.typeName(T); + + return struct { + pub fn fromLABColor(color: *const LABColor) T { + return switch (color.*) { + .lab => |*v| { + if (comptime @TypeOf(v.*) == T) return v.*; + return @call(.auto, @field(@TypeOf(v.*), into_this_function_name), .{v}); + }, + .lch => |*v| { + if (comptime @TypeOf(v.*) == T) return v.*; + return @call(.auto, @field(@TypeOf(v.*), into_this_function_name), .{v}); + }, + .oklab => |*v| { + if (comptime @TypeOf(v.*) == T) return v.*; + return @call(.auto, @field(@TypeOf(v.*), into_this_function_name), .{v}); + }, + .oklch => |*v| { + if (comptime @TypeOf(v.*) == T) return v.*; + return @call(.auto, @field(@TypeOf(v.*), into_this_function_name), .{v}); + }, + }; + } + + pub fn fromPredefinedColor(color: *const PredefinedColor) T { + return switch (color.*) { + .srgb => |*v| { + if (comptime @TypeOf(v.*) == T) return v.*; + return @call(.auto, @field(@TypeOf(v.*), into_this_function_name), .{v}); + }, + .srgb_linear => |*v| { + if (comptime @TypeOf(v.*) == T) return v.*; + return @call(.auto, @field(@TypeOf(v.*), into_this_function_name), .{v}); + }, + .display_p3 => |*v| { + if (comptime @TypeOf(v.*) == T) return v.*; + return @call(.auto, @field(@TypeOf(v.*), into_this_function_name), .{v}); + }, + .a98 => |*v| { + if (comptime @TypeOf(v.*) == T) return v.*; + return @call(.auto, @field(@TypeOf(v.*), into_this_function_name), .{v}); + }, + .prophoto => |*v| { + if (comptime @TypeOf(v.*) == T) return v.*; + return @call(.auto, @field(@TypeOf(v.*), into_this_function_name), .{v}); + }, + .rec2020 => |*v| { + if (comptime @TypeOf(v.*) == T) return v.*; + return @call(.auto, @field(@TypeOf(v.*), into_this_function_name), .{v}); + }, + .xyz_d50 => |*v| { + if (comptime @TypeOf(v.*) == T) return v.*; + return @call(.auto, @field(@TypeOf(v.*), into_this_function_name), .{v}); + }, + .xyz_d65 => |*v| { + if (comptime @TypeOf(v.*) == T) return v.*; + return @call(.auto, @field(@TypeOf(v.*), into_this_function_name), .{v}); + }, + }; + } + + pub fn fromFloatColor(color: *const FloatColor) T { + return switch (color.*) { + .rgb => |*v| { + if (comptime T == SRGB) return v.*; + return @call(.auto, @field(@TypeOf(v.*), into_this_function_name), .{v}); + }, + .hsl => |*v| { + if (comptime T == HSL) return v.*; + return @call(.auto, @field(@TypeOf(v.*), into_this_function_name), .{v}); + }, + .hwb => |*v| { + if (comptime T == HWB) return v.*; + return @call(.auto, @field(@TypeOf(v.*), into_this_function_name), .{v}); + }, + }; + } + + pub fn tryFromCssColor(color: *const CssColor) ?T { + return switch (color.*) { + .rgba => |*rgba| { + if (comptime T == RGBA) return rgba.*; + return @call(.auto, @field(@TypeOf(rgba.*), into_this_function_name), .{rgba}); + }, + .lab => |lab| fromLABColor(lab), + .predefined => |predefined| fromPredefinedColor(predefined), + .float => |float| fromFloatColor(float), + .current_color => null, + .light_dark => null, + .system => null, + }; + } + + pub fn hash(this: *const T, hasher: *std.hash.Wyhash) void { + return css.implementHash(T, this, hasher); + } + }; +} + +pub fn DefineColorspace(comptime T: type) type { + if (!@hasDecl(T, "ChannelTypeMap")) { + @compileError("A Colorspace must define a ChannelTypeMap"); + } + const ChannelTypeMap = T.ChannelTypeMap; + + const fields: []const std.builtin.Type.StructField = std.meta.fields(T); + const a = fields[0].name; + const b = fields[1].name; + const c = fields[2].name; + const alpha = "alpha"; + if (!@hasField(T, "alpha")) { + @compileError("A Colorspace must define an alpha field"); + } + + if (!@hasField(@TypeOf(ChannelTypeMap), a)) { + @compileError("A Colorspace must define a field for each channel, missing: " ++ a); + } + if (!@hasField(@TypeOf(ChannelTypeMap), b)) { + @compileError("A Colorspace must define a field for each channel, missing: " ++ b); + } + if (!@hasField(@TypeOf(ChannelTypeMap), c)) { + @compileError("A Colorspace must define a field for each channel, missing: " ++ c); + } + + return struct { + pub fn components(this: *const T) struct { f32, f32, f32, f32 } { + return .{ + @field(this, a), + @field(this, b), + @field(this, c), + @field(this, alpha), + }; + } + + pub fn channels(_: *const T) struct { []const u8, []const u8, []const u8 } { + return .{ a, b, c }; + } + + pub fn types(_: *const T) struct { ChannelType, ChannelType, ChannelType } { + return .{ + @field(ChannelTypeMap, a), + @field(ChannelTypeMap, b), + @field(ChannelTypeMap, c), + }; + } + + pub fn resolveMissing(this: *const T) T { + var result: T = this.*; + @field(result, a) = if (std.math.isNan(@field(this, a))) 0.0 else @field(this, a); + @field(result, b) = if (std.math.isNan(@field(this, b))) 0.0 else @field(this, b); + @field(result, c) = if (std.math.isNan(@field(this, c))) 0.0 else @field(this, c); + @field(result, alpha) = if (std.math.isNan(@field(this, alpha))) 0.0 else @field(this, alpha); + return result; + } + + pub fn resolve(this: *const T) T { + var resolved = resolveMissing(this); + if (!resolved.inGamut()) { + resolved = mapGamut(T, resolved); + } + return resolved; + } + }; +} + +pub fn BoundedColorGamut(comptime T: type) type { + const fields: []const std.builtin.Type.StructField = std.meta.fields(T); + const a = fields[0].name; + const b = fields[1].name; + const c = fields[2].name; + return struct { + pub fn inGamut(this: *const T) bool { + return @field(this, a) >= 0.0 and + @field(this, a) <= 1.0 and + @field(this, b) >= 0.0 and + @field(this, b) <= 1.0 and + @field(this, c) >= 0.0 and + @field(this, c) <= 1.0; + } + + pub fn clip(this: *const T) T { + var result: T = this.*; + @field(result, a) = bun.clamp(@field(this, a), 0.0, 1.0); + @field(result, b) = bun.clamp(@field(this, b), 0.0, 1.0); + @field(result, c) = bun.clamp(@field(this, c), 0.0, 1.0); + result.alpha = bun.clamp(this.alpha, 0.0, 1.0); + return result; + } + }; +} + +pub fn DeriveInterpolate( + comptime T: type, + comptime a: []const u8, + comptime b: []const u8, + comptime c: []const u8, +) type { + if (!@hasField(T, a)) @compileError("Missing field: " ++ a); + if (!@hasField(T, b)) @compileError("Missing field: " ++ b); + if (!@hasField(T, c)) @compileError("Missing field: " ++ c); + + return struct { + pub fn fillMissingComponents(this: *T, other: *T) void { + if (std.math.isNan(@field(this, a))) { + @field(this, a) = @field(other, a); + } + + if (std.math.isNan(@field(this, b))) { + @field(this, b) = @field(other, b); + } + + if (std.math.isNan(@field(this, c))) { + @field(this, c) = @field(other, c); + } + + if (std.math.isNan(this.alpha)) { + this.alpha = other.alpha; + } + } + + pub fn interpolate(this: *const T, p1: f32, other: *const T, p2: f32) T { + var result: T = undefined; + @field(result, a) = @field(this, a) * p1 + @field(other, a) * p2; + @field(result, b) = @field(this, b) * p1 + @field(other, b) * p2; + @field(result, c) = @field(this, c) * p1 + @field(other, c) * p2; + result.alpha = this.alpha * p1 + other.alpha * p2; + return result; + } + }; +} + +// pub fn DerivePredefined(comptime T: type, comptime predefined_color_field: []const u8) type { +// return struct { +// pub fn +// }; +// } + +pub fn RecangularPremultiply( + comptime T: type, + comptime a: []const u8, + comptime b: []const u8, + comptime c: []const u8, +) type { + if (!@hasField(T, a)) @compileError("Missing field: " ++ a); + if (!@hasField(T, b)) @compileError("Missing field: " ++ b); + if (!@hasField(T, c)) @compileError("Missing field: " ++ c); + return struct { + pub fn premultiply(this: *T) void { + if (!std.math.isNan(this.alpha)) { + @field(this, a) *= this.alpha; + @field(this, b) *= this.alpha; + @field(this, c) *= this.alpha; + } + } + + pub fn unpremultiply(this: *T, alpha_multiplier: f32) void { + if (!std.math.isNan(this.alpha) and this.alpha != 0.0) { + // PERF: precalculate 1/alpha? + @field(this, a) /= this.alpha; + @field(this, b) /= this.alpha; + @field(this, c) /= this.alpha; + this.alpha *= alpha_multiplier; + } + } + }; +} + +pub fn PolarPremultiply( + comptime T: type, + comptime a: []const u8, + comptime b: []const u8, +) type { + if (!@hasField(T, a)) @compileError("Missing field: " ++ a); + if (!@hasField(T, b)) @compileError("Missing field: " ++ b); + return struct { + pub fn premultiply(this: *T) void { + if (!std.math.isNan(this.alpha)) { + @field(this, a) *= this.alpha; + @field(this, b) *= this.alpha; + } + } + + pub fn unpremultiply(this: *T, alpha_multiplier: f32) void { + // this.h %= 360.0; + this.h = @mod(this.h, 360.0); + if (!std.math.isNan(this.alpha)) { + // PERF: precalculate 1/alpha? + @field(this, a) /= this.alpha; + @field(this, b) /= this.alpha; + this.alpha *= alpha_multiplier; + } + } + }; +} + +pub fn AdjustPowerlessLAB(comptime T: type) type { + return struct { + pub fn adjustPowerlessComponents(this: *T) void { + // If the lightness of a LAB color is 0%, both the a and b components are powerless. + if (@abs(this.l) < std.math.floatEps(f32)) { + this.a = std.math.nan(f32); + this.b = std.math.nan(f32); + } + } + }; +} + +pub fn AdjustPowerlessLCH(comptime T: type) type { + return struct { + pub fn adjustPowerlessComponents(this: *T) void { + // If the chroma of an LCH color is 0%, the hue component is powerless. + // If the lightness of an LCH color is 0%, both the hue and chroma components are powerless. + if (@abs(this.c) < std.math.floatEps(f32)) { + this.h = std.math.nan(f32); + } + + if (@abs(this.l) < std.math.floatEps(f32)) { + this.c = std.math.nan(f32); + this.h = std.math.nan(f32); + } + } + + pub fn adjustHue(this: *T, other: *T, method: HueInterpolationMethod) void { + _ = method.interpolate(&this.h, &other.h); + } + }; +} + +pub fn shortColorName(v: u32) ?[]const u8 { + // These names are shorter than their hex codes + return switch (v) { + 0x000080 => "navy", + 0x008000 => "green", + 0x008080 => "teal", + 0x4b0082 => "indigo", + 0x800000 => "maroon", + 0x800080 => "purple", + 0x808000 => "olive", + 0x808080 => "gray", + 0xa0522d => "sienna", + 0xa52a2a => "brown", + 0xc0c0c0 => "silver", + 0xcd853f => "peru", + 0xd2b48c => "tan", + 0xda70d6 => "orchid", + 0xdda0dd => "plum", + 0xee82ee => "violet", + 0xf0e68c => "khaki", + 0xf0ffff => "azure", + 0xf5deb3 => "wheat", + 0xf5f5dc => "beige", + 0xfa8072 => "salmon", + 0xfaf0e6 => "linen", + 0xff0000 => "red", + 0xff6347 => "tomato", + 0xff7f50 => "coral", + 0xffa500 => "orange", + 0xffc0cb => "pink", + 0xffd700 => "gold", + 0xffe4c4 => "bisque", + 0xfffafa => "snow", + 0xfffff0 => "ivory", + else => return null, + }; +} + +// From esbuild: https://github.com/evanw/esbuild/blob/18e13bdfdca5cd3c7a2fae1a8bd739f8f891572c/internal/css_parser/css_decls_color.go#L218 +// 0xAABBCCDD => 0xABCD +pub fn compactHex(v: u32) u32 { + return ((v & 0x0FF00000) >> 12) | ((v & 0x00000FF0) >> 4); +} + +// 0xABCD => 0xAABBCCDD +pub fn expandHex(v: u32) u32 { + return ((v & 0xF000) << 16) | + ((v & 0xFF00) << 12) | + ((v & 0x0FF0) << 8) | + ((v & 0x00FF) << 4) | + (v & 0x000F); +} + +pub fn writeComponents( + name: []const u8, + a: f32, + b: f32, + c: f32, + alpha: f32, + comptime W: type, + dest: *Printer(W), +) PrintErr!void { + try dest.writeStr(name); + try dest.writeChar('('); + if (std.math.isNan(a)) { + try dest.writeStr("none"); + } else { + try (Percentage{ .v = a }).toCss(W, dest); + } + try dest.writeChar(' '); + try writeComponent(b, W, dest); + try dest.writeChar(' '); + try writeComponent(c, W, dest); + if (std.math.isNan(alpha) or @abs(alpha - 1.0) > std.math.floatEps(f32)) { + try dest.delim('/', true); + try writeComponent(alpha, W, dest); + } + return dest.writeChar(')'); +} + +pub fn writeComponent(c: f32, comptime W: type, dest: *Printer(W)) PrintErr!void { + if (std.math.isNan(c)) { + return dest.writeStr("none"); + } else { + return CSSNumberFns.toCss(&c, W, dest); + } +} + +pub fn writePredefined( + predefined: *const PredefinedColor, + comptime W: type, + dest: *Printer(W), +) PrintErr!void { + const name, const a, const b, const c, const alpha = switch (predefined.*) { + .srgb => |*rgb| .{ "srgb", rgb.r, rgb.g, rgb.b, rgb.alpha }, + .srgb_linear => |*rgb| .{ "srgb-linear", rgb.r, rgb.g, rgb.b, rgb.alpha }, + .display_p3 => |*rgb| .{ "display-p3", rgb.r, rgb.g, rgb.b, rgb.alpha }, + .a98 => |*rgb| .{ "a98-rgb", rgb.r, rgb.g, rgb.b, rgb.alpha }, + .prophoto => |*rgb| .{ "prophoto-rgb", rgb.r, rgb.g, rgb.b, rgb.alpha }, + .rec2020 => |*rgb| .{ "rec2020", rgb.r, rgb.g, rgb.b, rgb.alpha }, + .xyz_d50 => |*xyz| .{ "xyz-d50", xyz.x, xyz.y, xyz.z, xyz.alpha }, + // "xyz" has better compatibility (Safari 15) than "xyz-d65", and it is shorter. + .xyz_d65 => |*xyz| .{ "xyz", xyz.x, xyz.y, xyz.z, xyz.alpha }, + }; + + try dest.writeStr("color("); + try dest.writeStr(name); + try dest.writeChar(' '); + try writeComponent(a, W, dest); + try dest.writeChar(' '); + try writeComponent(b, W, dest); + try dest.writeChar(' '); + try writeComponent(c, W, dest); + + if (std.math.isNan(alpha) or @abs(alpha - 1.0) > std.math.floatEps(f32)) { + try dest.delim('/', true); + try writeComponent(alpha, W, dest); + } + + return dest.writeChar(')'); +} + +extern "c" fn powf(f32, f32) f32; + +pub fn gamSrgb(r: f32, g: f32, b: f32) struct { f32, f32, f32 } { + // https://github.com/w3c/csswg-drafts/blob/fba005e2ce9bcac55b49e4aa19b87208b3a0631e/css-color-4/conversions.js#L31 + // convert an array of linear-light sRGB values in the range 0.0-1.0 + // to gamma corrected form + // https://en.wikipedia.org/wiki/SRGB + // Extended transfer function: + // For negative values, linear portion extends on reflection + // of axis, then uses reflected pow below that + + const Helpers = struct { + pub fn gamSrgbComponent(c: f32) f32 { + const abs = @abs(c); + if (abs > 0.0031308) { + const sign: f32 = if (c < 0.0) @as(f32, -1.0) else @as(f32, 1.0); + // const x: f32 = bun.powf( abs, 1.0 / 2.4); + const x: f32 = powf(abs, 1.0 / 2.4); + const y: f32 = 1.055 * x; + const z: f32 = y - 0.055; + // return sign * (1.055 * bun.powf( abs, 1.0 / 2.4) - 0.055); + return sign * z; + } + + return 12.92 * c; + } + }; + + const rr = Helpers.gamSrgbComponent(r); + const gg = Helpers.gamSrgbComponent(g); + const bb = Helpers.gamSrgbComponent(b); + return .{ + rr, + gg, + bb, + }; +} + +pub fn linSrgb(r: f32, g: f32, b: f32) struct { f32, f32, f32 } { + // https://github.com/w3c/csswg-drafts/blob/fba005e2ce9bcac55b49e4aa19b87208b3a0631e/css-color-4/conversions.js#L11 + // convert sRGB values where in-gamut values are in the range [0 - 1] + // to linear light (un-companded) form. + // https://en.wikipedia.org/wiki/SRGB + // Extended transfer function: + // for negative values, linear portion is extended on reflection of axis, + // then reflected power function is used. + + const H = struct { + pub fn linSrgbComponent(c: f32) f32 { + const abs = @abs(c); + if (abs < 0.04045) { + return c / 12.92; + } + + const sign: f32 = if (c < 0.0) -1.0 else 1.0; + return sign * bun.powf( + ((abs + 0.055) / 1.055), + 2.4, + ); + } + }; + + return .{ + H.linSrgbComponent(r), + H.linSrgbComponent(g), + H.linSrgbComponent(b), + }; +} + +/// PERF: SIMD? +pub fn multiplyMatrix(m: *const [9]f32, x: f32, y: f32, z: f32) struct { f32, f32, f32 } { + const a = m[0] * x + m[1] * y + m[2] * z; + const b = m[3] * x + m[4] * y + m[5] * z; + const c = m[6] * x + m[7] * y + m[8] * z; + return .{ a, b, c }; +} + +pub fn polarToRectangular(l: f32, c: f32, h: f32) struct { f32, f32, f32 } { + // https://github.com/w3c/csswg-drafts/blob/fba005e2ce9bcac55b49e4aa19b87208b3a0631e/css-color-4/conversions.js#L385 + + const a = c * @cos(h * std.math.pi / 180.0); + const b = c * @sin(h * std.math.pi / 180.0); + return .{ l, a, b }; +} + +const D50: []const f32 = &.{ @floatCast(@as(f64, 0.3457) / @as(f64, 0.3585)), 1.00000, @floatCast((@as(f64, 1.0) - @as(f64, 0.3457) - @as(f64, 0.3585)) / @as(f64, 0.3585)) }; +// const D50: []const f32 = &.{ 0.9642956, 1.0, 0.82510453 }; + +const color_conversions = struct { + const generated = @import("./color_generated.zig").generated_color_conversions; + + pub const convert_RGBA = struct { + pub usingnamespace generated.convert_RGBA; + }; + + pub const convert_LAB = struct { + pub usingnamespace generated.convert_LAB; + + pub fn intoCssColor(c: *const LAB, allocator: Allocator) CssColor { + return CssColor{ .lab = bun.create( + allocator, + LABColor, + LABColor{ .lab = c.* }, + ) }; + } + + pub fn intoLCH(_lab: *const LAB) LCH { + const lab = _lab.resolveMissing(); + const l, const c, const h = rectangularToPolar(lab.l, lab.a, lab.b); + return LCH{ + .l = l, + .c = c, + .h = h, + .alpha = lab.alpha, + }; + } + + pub fn intoXYZd50(_lab: *const LAB) XYZd50 { + // https://github.com/w3c/csswg-drafts/blob/fba005e2ce9bcac55b49e4aa19b87208b3a0631e/css-color-4/conversions.js#L352 + const K: f32 = @floatCast(@as(f64, 24389.0) / @as(f64, 27.0)); // 29^3/3^3 + const E: f32 = @floatCast(@as(f64, 216.0) / @as(f64, 24389.0)); // 6^3/29^3 + + const lab = _lab.resolveMissing(); + const l = lab.l * 100.0; + const a = lab.a; + const b = lab.b; + + // compute f, starting with the luminance-related term + const f1: f32 = (l + 16.0) / 116.0; + const f0: f32 = a / 500.0 + f1; + const f2: f32 = f1 - b / 200.0; + + // compute xyz + const x = if (bun.powf(f0, 3) > E) + bun.powf(f0, 3) + else + (116.0 * f0 - 16.0) / K; + + const y = if (l > K * E) bun.powf((l + 16.0) / 116.0, 3) else l / K; + + const z = if (bun.powf(f2, 3) > E) + bun.powf(f2, 3) + else + (@as(f32, 116.0) * f2 - 16.0) / K; + + const final_x = x * D50[0]; + const final_y = y * D50[1]; + const final_z = z * D50[2]; + + // Compute XYZ by scaling xyz by reference white + return XYZd50{ + .x = final_x, + .y = final_y, + .z = final_z, + .alpha = lab.alpha, + }; + } + }; + + pub const convert_SRGB = struct { + pub usingnamespace generated.convert_SRGB; + + pub fn intoCssColor(srgb: *const SRGB, _: Allocator) CssColor { + // TODO: should we serialize as color(srgb, ...)? + // would be more precise than 8-bit color. + return CssColor{ .rgba = srgb.intoRGBA() }; + } + + pub fn intoSRGBLinear(rgb: *const SRGB) SRGBLinear { + const srgb = rgb.resolveMissing(); + const r, const g, const b = linSrgb(srgb.r, srgb.g, srgb.b); + return SRGBLinear{ + .r = r, + .g = g, + .b = b, + .alpha = srgb.alpha, + }; + } + + pub fn intoHSL(_rgb: *const SRGB) HSL { + // https://drafts.csswg.org/css-color/#rgb-to-hsl + const rgb = _rgb.resolve(); + const r = rgb.r; + const g = rgb.g; + const b = rgb.b; + const max = @max( + @max(r, g), + b, + ); + const min = @min(@min(r, g), b); + var h = std.math.nan(f32); + var s: f32 = 0.0; + const l = (min + max) / 2.0; + const d = max - min; + + if (d != 0.0) { + s = if (l == 0.0 or l == 1.0) + 0.0 + else + (max - l) / @min(l, 1.0 - l); + + if (max == r) { + h = (g - b) / d + (if (g < b) @as(f32, 6.0) else @as(f32, 0.0)); + } else if (max == g) { + h = (b - r) / d + 2.0; + } else if (max == b) { + h = (r - g) / d + 4.0; + } + + h = h * 60.0; + } + + return HSL{ + .h = h, + .s = s, + .l = l, + .alpha = rgb.alpha, + }; + } + + pub fn intoHWB(_rgb: *const SRGB) HWB { + const rgb = _rgb.resolve(); + const hsl = rgb.intoHSL(); + const r = rgb.r; + const g = rgb.g; + const _b = rgb.b; + const w = @min(@min(r, g), _b); + const b = 1.0 - @max(@max(r, g), _b); + return HWB{ + .h = hsl.h, + .w = w, + .b = b, + .alpha = rgb.alpha, + }; + } + }; + + pub const convert_HSL = struct { + pub usingnamespace generated.convert_HSL; + + pub fn intoCssColor(c: *const HSL, _: Allocator) CssColor { + // TODO: should we serialize as color(srgb, ...)? + // would be more precise than 8-bit color. + return CssColor{ .rgba = c.intoRGBA() }; + } + + pub fn intoSRGB(hsl_: *const HSL) SRGB { + // https://drafts.csswg.org/css-color/#hsl-to-rgb + const hsl = hsl_.resolveMissing(); + const h = (hsl.h - 360.0 * @floor(hsl.h / 360.0)) / 360.0; + const r, const g, const b = css.color.hslToRgb(h, hsl.s, hsl.l); + return SRGB{ + .r = r, + .g = g, + .b = b, + .alpha = hsl.alpha, + }; + } + }; + + pub const convert_HWB = struct { + pub usingnamespace generated.convert_HWB; + + pub fn intoCssColor(c: *const HWB, _: Allocator) CssColor { + // TODO: should we serialize as color(srgb, ...)? + // would be more precise than 8-bit color. + return CssColor{ .rgba = c.intoRGBA() }; + } + + pub fn intoSRGB(_hwb: *const HWB) SRGB { + // https://drafts.csswg.org/css-color/#hwb-to-rgb + const hwb = _hwb.resolveMissing(); + const h = hwb.h; + const w = hwb.w; + const b = hwb.b; + + if (w + b >= 1.0) { + const gray = w / (w + b); + return SRGB{ + .r = gray, + .g = gray, + .b = gray, + .alpha = hwb.alpha, + }; + } + + var rgba = (HSL{ .h = h, .s = 1.0, .l = 0.5, .alpha = hwb.alpha }).intoSRGB(); + const x = 1.0 - w - b; + rgba.r = rgba.r * x + w; + rgba.g = rgba.g * x + w; + rgba.b = rgba.b * x + w; + return rgba; + } + }; + + pub const convert_SRGBLinear = struct { + pub usingnamespace generated.convert_SRGBLinear; + + pub fn intoPredefinedColor(rgb: *const SRGBLinear) PredefinedColor { + return PredefinedColor{ .srgb_linear = rgb.* }; + } + + pub fn intoCssColor(rgb: *const SRGBLinear, allocator: Allocator) CssColor { + return CssColor{ + .predefined = bun.create( + allocator, + PredefinedColor, + rgb.intoPredefinedColor(), + ), + }; + } + + pub fn intoSRGB(_rgb: *const SRGBLinear) SRGB { + const rgb = _rgb.resolveMissing(); + const r, const g, const b = gamSrgb(rgb.r, rgb.g, rgb.b); + return SRGB{ + .r = r, + .g = g, + .b = b, + .alpha = rgb.alpha, + }; + } + + pub fn intoXYZd65(_rgb: *const SRGBLinear) XYZd65 { + // https://github.com/w3c/csswg-drafts/blob/fba005e2ce9bcac55b49e4aa19b87208b3a0631e/css-color-4/conversions.js#L50 + // convert an array of linear-light sRGB values to CIE XYZ + // using sRGB's own white, D65 (no chromatic adaptation) + const MATRIX: [9]f32 = .{ + 0.41239079926595934, + 0.357584339383878, + 0.1804807884018343, + 0.21263900587151027, + 0.715168678767756, + 0.07219231536073371, + 0.01933081871559182, + 0.11919477979462598, + 0.9505321522496607, + }; + + const rgb = _rgb.resolveMissing(); + const x, const y, const z = multiplyMatrix(&MATRIX, rgb.r, rgb.g, rgb.b); + return XYZd65{ + .x = x, + .y = y, + .z = z, + .alpha = rgb.alpha, + }; + } + }; + + pub const convert_P3 = struct { + pub usingnamespace generated.convert_P3; + + pub fn intoPredefinedColor(rgb: *const P3) PredefinedColor { + return PredefinedColor{ .display_p3 = rgb.* }; + } + + pub fn intoCssColor(rgb: *const P3, allocator: Allocator) CssColor { + return CssColor{ + .predefined = bun.create( + allocator, + PredefinedColor, + rgb.intoPredefinedColor(), + ), + }; + } + + pub fn intoXYZd65(_p3: *const P3) XYZd65 { + // https://github.com/w3c/csswg-drafts/blob/fba005e2ce9bcac55b49e4aa19b87208b3a0631e/css-color-4/conversions.js#L91 + // convert linear-light display-p3 values to CIE XYZ + // using D65 (no chromatic adaptation) + // http://www.brucelindbloom.com/index.html?Eqn_RGB_XYZ_Matrix.html + const MATRIX: [9]f32 = .{ + 0.4865709486482162, + 0.26566769316909306, + 0.1982172852343625, + 0.2289745640697488, + 0.6917385218365064, + 0.079286914093745, + 0.0000000000000000, + 0.04511338185890264, + 1.043944368900976, + }; + + const p3 = _p3.resolveMissing(); + const r, const g, const b = linSrgb(p3.r, p3.g, p3.b); + const x, const y, const z = multiplyMatrix(&MATRIX, r, g, b); + return XYZd65{ + .x = x, + .y = y, + .z = z, + .alpha = p3.alpha, + }; + } + }; + + pub const convert_A98 = struct { + pub usingnamespace generated.convert_A98; + + pub fn intoPredefinedColor(rgb: *const A98) PredefinedColor { + return PredefinedColor{ .a98 = rgb.* }; + } + + pub fn intoCssColor(rgb: *const A98, allocator: Allocator) CssColor { + return CssColor{ + .predefined = bun.create( + allocator, + PredefinedColor, + rgb.intoPredefinedColor(), + ), + }; + } + + pub fn intoXYZd65(_a98: *const A98) XYZd65 { + // https://github.com/w3c/csswg-drafts/blob/fba005e2ce9bcac55b49e4aa19b87208b3a0631e/css-color-4/conversions.js#L181 + const H = struct { + pub fn linA98rgbComponent(c: f32) f32 { + const sign: f32 = if (c < 0.0) @as(f32, -1.0) else @as(f32, 1.0); + return sign * bun.powf(@abs(c), 563.0 / 256.0); + } + }; + + // convert an array of a98-rgb values in the range 0.0 - 1.0 + // to linear light (un-companded) form. + // negative values are also now accepted + const a98 = _a98.resolveMissing(); + const r = H.linA98rgbComponent(a98.r); + const g = H.linA98rgbComponent(a98.g); + const b = H.linA98rgbComponent(a98.b); + + // convert an array of linear-light a98-rgb values to CIE XYZ + // http://www.brucelindbloom.com/index.html?Eqn_RGB_XYZ_Matrix.html + // has greater numerical precision than section 4.3.5.3 of + // https://www.adobe.com/digitalimag/pdfs/AdobeRGB1998.pdf + // but the values below were calculated from first principles + // from the chromaticity coordinates of R G B W + // see matrixmaker.html + const MATRIX: [9]f32 = .{ + 0.5766690429101305, + 0.1855582379065463, + 0.1882286462349947, + 0.29734497525053605, + 0.6273635662554661, + 0.07529145849399788, + 0.02703136138641234, + 0.07068885253582723, + 0.9913375368376388, + }; + + const x, const y, const z = multiplyMatrix(&MATRIX, r, g, b); + return XYZd65{ + .x = x, + .y = y, + .z = z, + .alpha = a98.alpha, + }; + } + }; + + pub const convert_ProPhoto = struct { + pub usingnamespace generated.convert_ProPhoto; + + pub fn intoPredefinedColor(rgb: *const ProPhoto) PredefinedColor { + return PredefinedColor{ .prophoto = rgb.* }; + } + + pub fn intoCssColor(rgb: *const ProPhoto, allocator: Allocator) CssColor { + return CssColor{ + .predefined = bun.create( + allocator, + PredefinedColor, + rgb.intoPredefinedColor(), + ), + }; + } + + pub fn intoXYZd50(_prophoto: *const ProPhoto) XYZd50 { + // https://github.com/w3c/csswg-drafts/blob/fba005e2ce9bcac55b49e4aa19b87208b3a0631e/css-color-4/conversions.js#L118 + // convert an array of prophoto-rgb values + // where in-gamut colors are in the range [0.0 - 1.0] + // to linear light (un-companded) form. + // Transfer curve is gamma 1.8 with a small linear portion + // Extended transfer function + + const H = struct { + pub fn linProPhotoComponent(c: f32) f32 { + const ET2: f32 = 16.0 / 512.0; + const abs = @abs(c); + if (abs <= ET2) { + return c / 16.0; + } + const sign: f32 = if (c < 0.0) -1.0 else 1.0; + return sign * bun.powf(abs, 1.8); + } + }; + + const prophoto = _prophoto.resolveMissing(); + const r = H.linProPhotoComponent(prophoto.r); + const g = H.linProPhotoComponent(prophoto.g); + const b = H.linProPhotoComponent(prophoto.b); + + // https://github.com/w3c/csswg-drafts/blob/fba005e2ce9bcac55b49e4aa19b87208b3a0631e/css-color-4/conversions.js#L155 + // convert an array of linear-light prophoto-rgb values to CIE XYZ + // using D50 (so no chromatic adaptation needed afterwards) + // http://www.brucelindbloom.com/index.html?Eqn_RGB_XYZ_Matrix.html + const MATRIX: [9]f32 = .{ + 0.7977604896723027, + 0.13518583717574031, + 0.0313493495815248, + 0.2880711282292934, + 0.7118432178101014, + 0.00008565396060525902, + 0.0, + 0.0, + 0.8251046025104601, + }; + + const x, const y, const z = multiplyMatrix(&MATRIX, r, g, b); + return XYZd50{ + .x = x, + .y = y, + .z = z, + .alpha = prophoto.alpha, + }; + } + }; + + pub const convert_Rec2020 = struct { + pub usingnamespace generated.convert_Rec2020; + + pub fn intoPredefinedColor(rgb: *const Rec2020) PredefinedColor { + return PredefinedColor{ .rec2020 = rgb.* }; + } + + pub fn intoCssColor(rgb: *const Rec2020, allocator: Allocator) CssColor { + return CssColor{ + .predefined = bun.create( + allocator, + PredefinedColor, + rgb.intoPredefinedColor(), + ), + }; + } + + pub fn intoXYZd65(_rec2020: *const Rec2020) XYZd65 { + // https://github.com/w3c/csswg-drafts/blob/fba005e2ce9bcac55b49e4aa19b87208b3a0631e/css-color-4/conversions.js#L235 + // convert an array of rec2020 RGB values in the range 0.0 - 1.0 + // to linear light (un-companded) form. + // ITU-R BT.2020-2 p.4 + + const H = struct { + pub fn linRec2020Component(c: f32) f32 { + const A: f32 = 1.09929682680944; + const B: f32 = 0.018053968510807; + + const abs = @abs(c); + if (abs < B * 4.5) { + return c / 4.5; + } + + const sign: f32 = if (c < 0.0) -1.0 else 1.0; + return sign * bun.powf( + (abs + A - 1.0) / A, + 1.0 / 0.45, + ); + } + }; + + const rec2020 = _rec2020.resolveMissing(); + const r = H.linRec2020Component(rec2020.r); + const g = H.linRec2020Component(rec2020.g); + const b = H.linRec2020Component(rec2020.b); + + // https://github.com/w3c/csswg-drafts/blob/fba005e2ce9bcac55b49e4aa19b87208b3a0631e/css-color-4/conversions.js#L276 + // convert an array of linear-light rec2020 values to CIE XYZ + // using D65 (no chromatic adaptation) + // http://www.brucelindbloom.com/index.html?Eqn_RGB_XYZ_Matrix.html + const MATRIX: [9]f32 = .{ + 0.6369580483012914, + 0.14461690358620832, + 0.1688809751641721, + 0.2627002120112671, + 0.6779980715188708, + 0.05930171646986196, + 0.000000000000000, + 0.028072693049087428, + 1.060985057710791, + }; + + const x, const y, const z = multiplyMatrix(&MATRIX, r, g, b); + + return XYZd65{ + .x = x, + .y = y, + .z = z, + .alpha = rec2020.alpha, + }; + } + }; + + pub const convert_XYZd50 = struct { + pub usingnamespace generated.convert_XYZd50; + + pub fn intoPredefinedColor(rgb: *const XYZd50) PredefinedColor { + return PredefinedColor{ .xyz_d50 = rgb.* }; + } + + pub fn intoCssColor(rgb: *const XYZd50, allocator: Allocator) CssColor { + return CssColor{ + .predefined = bun.create( + allocator, + PredefinedColor, + rgb.intoPredefinedColor(), + ), + }; + } + + pub fn intoLAB(_xyz: *const XYZd50) LAB { + // https://github.com/w3c/csswg-drafts/blob/fba005e2ce9bcac55b49e4aa19b87208b3a0631e/css-color-4/conversions.js#L332 + // Assuming XYZ is relative to D50, convert to CIE LAB + // from CIE standard, which now defines these as a rational fraction + const E: f32 = 216.0 / 24389.0; // 6^3/29^3 + const K: f32 = 24389.0 / 27.0; // 29^3/3^3 + + // compute xyz, which is XYZ scaled relative to reference white + const xyz = _xyz.resolveMissing(); + const x = xyz.x / D50[0]; + const y = xyz.y / D50[1]; + const z = xyz.y / D50[2]; + + // now compute f + + const f0 = if (x > E) std.math.cbrt(x) else (K * x + 16.0) / 116.0; + + const f1 = if (y > E) std.math.cbrt(y) else (K * y + 16.0) / 116.0; + + const f2 = if (z > E) std.math.cbrt(z) else (K * z + 16.0) / 116.0; + + const l = ((116.0 * f1) - 16.0) / 100.0; + const a = 500.0 * (f0 - f1); + const b = 500.0 * (f1 - f2); + + return LAB{ + .l = l, + .a = a, + .b = b, + .alpha = xyz.alpha, + }; + } + + pub fn intoXYZd65(_xyz: *const XYZd50) XYZd65 { + // https://github.com/w3c/csswg-drafts/blob/fba005e2ce9bcac55b49e4aa19b87208b3a0631e/css-color-4/conversions.js#L105 + const MATRIX: [9]f32 = .{ + 0.9554734527042182, + -0.023098536874261423, + 0.0632593086610217, + -0.028369706963208136, + 1.0099954580058226, + 0.021041398966943008, + 0.012314001688319899, + -0.020507696433477912, + 1.3303659366080753, + }; + + const xyz = _xyz.resolveMissing(); + const x, const y, const z = multiplyMatrix(&MATRIX, xyz.x, xyz.y, xyz.z); + return XYZd65{ + .x = x, + .y = y, + .z = z, + .alpha = xyz.alpha, + }; + } + + pub fn intoProPhoto(_xyz: *const XYZd50) ProPhoto { + // convert XYZ to linear-light prophoto-rgb + const MATRIX: [9]f32 = .{ + 1.3457989731028281, + -0.25558010007997534, + -0.05110628506753401, + -0.5446224939028347, + 1.5082327413132781, + 0.02053603239147973, + 0.0, + 0.0, + 1.2119675456389454, + }; + const H = struct { + pub fn gamProPhotoComponent(c: f32) f32 { + // https://github.com/w3c/csswg-drafts/blob/fba005e2ce9bcac55b49e4aa19b87208b3a0631e/css-color-4/conversions.js#L137 + // convert linear-light prophoto-rgb in the range 0.0-1.0 + // to gamma corrected form + // Transfer curve is gamma 1.8 with a small linear portion + // TODO for negative values, extend linear portion on reflection of axis, then add pow below that + const ET: f32 = 1.0 / 512.0; + const abs = @abs(c); + if (abs >= ET) { + const sign: f32 = if (c < 0.0) -1.0 else 1.0; + return sign * bun.powf(abs, 1.0 / 1.8); + } + return 16.0 * c; + } + }; + const xyz = _xyz.resolveMissing(); + const r1, const g1, const b1 = multiplyMatrix(&MATRIX, xyz.x, xyz.y, xyz.z); + const r = H.gamProPhotoComponent(r1); + const g = H.gamProPhotoComponent(g1); + const b = H.gamProPhotoComponent(b1); + return ProPhoto{ + .r = r, + .g = g, + .b = b, + .alpha = xyz.alpha, + }; + } + }; + + pub const convert_XYZd65 = struct { + pub usingnamespace generated.convert_XYZd65; + + pub fn intoPredefinedColor(rgb: *const XYZd65) PredefinedColor { + return PredefinedColor{ .xyz_d65 = rgb.* }; + } + + pub fn intoCssColor(rgb: *const XYZd65, allocator: Allocator) CssColor { + return CssColor{ + .predefined = bun.create( + allocator, + PredefinedColor, + rgb.intoPredefinedColor(), + ), + }; + } + + pub fn intoXYZd50(_xyz: *const XYZd65) XYZd50 { + // https://github.com/w3c/csswg-drafts/blob/fba005e2ce9bcac55b49e4aa19b87208b3a0631e/css-color-4/conversions.js#L319 + + const MATRIX: [9]f32 = .{ + 1.0479298208405488, + 0.022946793341019088, + -0.05019222954313557, + 0.029627815688159344, + 0.990434484573249, + -0.01707382502938514, + -0.009243058152591178, + 0.015055144896577895, + 0.7518742899580008, + }; + + const xyz = _xyz.resolveMissing(); + const x, const y, const z = multiplyMatrix(&MATRIX, xyz.x, xyz.y, xyz.z); + return XYZd50{ + .x = x, + .y = y, + .z = z, + .alpha = xyz.alpha, + }; + } + + pub fn intoSRGBLinear(_xyz: *const XYZd65) SRGBLinear { + // https://github.com/w3c/csswg-drafts/blob/fba005e2ce9bcac55b49e4aa19b87208b3a0631e/css-color-4/conversions.js#L62 + const MATRIX: [9]f32 = .{ + 3.2409699419045226, + -1.537383177570094, + -0.4986107602930034, + -0.9692436362808796, + 1.8759675015077202, + 0.04155505740717559, + 0.05563007969699366, + -0.20397695888897652, + 1.0569715142428786, + }; + + const xyz = _xyz.resolveMissing(); + const r, const g, const b = multiplyMatrix(&MATRIX, xyz.x, xyz.y, xyz.z); + return SRGBLinear{ + .r = r, + .g = g, + .b = b, + .alpha = xyz.alpha, + }; + } + + pub fn intoA98(_xyz: *const XYZd65) A98 { + // https://github.com/w3c/csswg-drafts/blob/fba005e2ce9bcac55b49e4aa19b87208b3a0631e/css-color-4/conversions.js#L222 + // convert XYZ to linear-light a98-rgb + + const MATRIX: [9]f32 = .{ + 2.0415879038107465, + -0.5650069742788596, + -0.34473135077832956, + -0.9692436362808795, + 1.8759675015077202, + 0.04155505740717557, + 0.013444280632031142, + -0.11836239223101838, + 1.0151749943912054, + }; + + const H = struct { + pub fn gamA98Component(c: f32) f32 { + // https://github.com/w3c/csswg-drafts/blob/fba005e2ce9bcac55b49e4aa19b87208b3a0631e/css-color-4/conversions.js#L193 + // convert linear-light a98-rgb in the range 0.0-1.0 + // to gamma corrected form + // negative values are also now accepted + const sign: f32 = if (c < 0.0) -1.0 else 1.0; + return sign * bun.powf(@abs(c), 256.0 / 563.0); + } + }; + + const xyz = _xyz.resolveMissing(); + const r1, const g1, const b1 = multiplyMatrix(&MATRIX, xyz.x, xyz.y, xyz.z); + const r = H.gamA98Component(r1); + const g = H.gamA98Component(g1); + const b = H.gamA98Component(b1); + return A98{ + .r = r, + .g = g, + .b = b, + .alpha = xyz.alpha, + }; + } + + pub fn intoRec2020(_xyz: *const XYZd65) Rec2020 { + // convert XYZ to linear-light rec2020 + const MATRIX: [9]f32 = .{ + 1.7166511879712674, + -0.35567078377639233, + -0.25336628137365974, + -0.6666843518324892, + 1.6164812366349395, + 0.01576854581391113, + 0.017639857445310783, + -0.042770613257808524, + 0.9421031212354738, + }; + + const H = struct { + pub fn gamRec2020Component(c: f32) f32 { + // convert linear-light rec2020 RGB in the range 0.0-1.0 + // to gamma corrected form + // ITU-R BT.2020-2 p.4 + + const A: f32 = 1.09929682680944; + const B: f32 = 0.018053968510807; + + const abs = @abs(c); + if (abs > B) { + const sign: f32 = if (c < 0.0) -1.0 else 1.0; + return sign * (A * bun.powf(abs, 0.45) - (A - 1.0)); + } + + return 4.5 * c; + } + }; + + const xyz = _xyz.resolveMissing(); + const r1, const g1, const b1 = multiplyMatrix(&MATRIX, xyz.x, xyz.y, xyz.z); + const r = H.gamRec2020Component(r1); + const g = H.gamRec2020Component(g1); + const b = H.gamRec2020Component(b1); + return Rec2020{ + .r = r, + .g = g, + .b = b, + .alpha = xyz.alpha, + }; + } + + pub fn intoOKLAB(_xyz: *const XYZd65) OKLAB { + // https://github.com/w3c/csswg-drafts/blob/fba005e2ce9bcac55b49e4aa19b87208b3a0631e/css-color-4/conversions.js#L400 + const XYZ_TO_LMS: [9]f32 = .{ + 0.8190224432164319, + 0.3619062562801221, + -0.12887378261216414, + 0.0329836671980271, + 0.9292868468965546, + 0.03614466816999844, + 0.048177199566046255, + 0.26423952494422764, + 0.6335478258136937, + }; + + const LMS_TO_OKLAB: [9]f32 = .{ + 0.2104542553, + 0.7936177850, + -0.0040720468, + 1.9779984951, + -2.4285922050, + 0.4505937099, + 0.0259040371, + 0.7827717662, + -0.8086757660, + }; + + const cbrt = std.math.cbrt; + + const xyz = _xyz.resolveMissing(); + const a1, const b1, const c1 = multiplyMatrix(&XYZ_TO_LMS, xyz.x, xyz.y, xyz.z); + const l, const a, const b = multiplyMatrix(&LMS_TO_OKLAB, cbrt(a1), cbrt(b1), cbrt(c1)); + + return OKLAB{ + .l = l, + .a = a, + .b = b, + .alpha = xyz.alpha, + }; + } + + pub fn intoP3(_xyz: *const XYZd65) P3 { + // https://github.com/w3c/csswg-drafts/blob/fba005e2ce9bcac55b49e4aa19b87208b3a0631e/css-color-4/conversions.js#L105 + const MATRIX: [9]f32 = .{ + 2.493496911941425, + -0.9313836179191239, + -0.40271078445071684, + -0.8294889695615747, + 1.7626640603183463, + 0.023624685841943577, + 0.03584583024378447, + -0.07617238926804182, + 0.9568845240076872, + }; + + const xyz = _xyz.resolveMissing(); + const r1, const g1, const b1 = multiplyMatrix(&MATRIX, xyz.x, xyz.y, xyz.z); + const r, const g, const b = gamSrgb(r1, g1, b1); // same as sRGB + return P3{ + .r = r, + .g = g, + .b = b, + .alpha = xyz.alpha, + }; + } + }; + + pub const convert_LCH = struct { + pub usingnamespace generated.convert_LCH; + + pub fn intoCssColor(c: *const LCH, allocator: Allocator) CssColor { + return CssColor{ .lab = bun.create( + allocator, + LABColor, + LABColor{ .lch = c.* }, + ) }; + } + + pub fn intoLAB(_lch: *const LCH) LAB { + const lch = _lch.resolveMissing(); + const l, const a, const b = polarToRectangular(lch.l, lch.c, lch.h); + return LAB{ + .l = l, + .a = a, + .b = b, + .alpha = lch.alpha, + }; + } + }; + + pub const convert_OKLAB = struct { + pub usingnamespace generated.convert_OKLAB; + + pub fn intoCssColor(c: *const OKLAB, allocator: Allocator) CssColor { + return CssColor{ .lab = bun.create( + allocator, + LABColor, + LABColor{ .oklab = c.* }, + ) }; + } + + pub fn intoOKLAB(labb: *const OKLAB) OKLAB { + return labb.*; + } + + pub fn intoOKLCH(labb: *const OKLAB) OKLCH { + const lab = labb.resolveMissing(); + const l, const c, const h = rectangularToPolar(lab.l, lab.a, lab.b); + return OKLCH{ + .l = l, + .c = c, + .h = h, + .alpha = lab.alpha, + }; + } + + pub fn intoXYZd65(_lab: *const OKLAB) XYZd65 { + // https://github.com/w3c/csswg-drafts/blob/fba005e2ce9bcac55b49e4aa19b87208b3a0631e/css-color-4/conversions.js#L418 + const LMS_TO_XYZ: [9]f32 = .{ + 1.2268798733741557, + -0.5578149965554813, + 0.28139105017721583, + -0.04057576262431372, + 1.1122868293970594, + -0.07171106666151701, + -0.07637294974672142, + -0.4214933239627914, + 1.5869240244272418, + }; + + const OKLAB_TO_LMS: [9]f32 = .{ + 0.99999999845051981432, + 0.39633779217376785678, + 0.21580375806075880339, + 1.0000000088817607767, + -0.1055613423236563494, + -0.063854174771705903402, + 1.0000000546724109177, + -0.089484182094965759684, + -1.2914855378640917399, + }; + + const lab = _lab.resolveMissing(); + const a, const b, const c = multiplyMatrix(&OKLAB_TO_LMS, lab.l, lab.a, lab.b); + const x, const y, const z = multiplyMatrix( + &LMS_TO_XYZ, + bun.powf(a, 3), + bun.powf(b, 3), + bun.powf(c, 3), + ); + + return XYZd65{ + .x = x, + .y = y, + .z = z, + .alpha = lab.alpha, + }; + } + }; + + pub const convert_OKLCH = struct { + pub usingnamespace generated.convert_OKLCH; + + pub fn intoCssColor(c: *const OKLCH, allocator: Allocator) CssColor { + return CssColor{ .lab = bun.create( + allocator, + LABColor, + LABColor{ .oklch = c.* }, + ) }; + } + + pub fn intoOKLAB(_lch: *const OKLCH) OKLAB { + const lch = _lch.resolveMissing(); + const l, const a, const b = polarToRectangular(lch.l, lch.c, lch.h); + return OKLAB{ + .l = l, + .a = a, + .b = b, + .alpha = lch.alpha, + }; + } + + pub fn intoOKLCH(x: *const OKLCH) OKLCH { + return x.*; + } + }; +}; diff --git a/src/css/values/color_generated.zig b/src/css/values/color_generated.zig new file mode 100644 index 0000000000..a767990f1f --- /dev/null +++ b/src/css/values/color_generated.zig @@ -0,0 +1,945 @@ +//!This file is generated by `color_via.ts`. Do not edit it directly! +const color = @import("./color.zig"); +const RGBA = color.RGBA; +const LAB = color.LAB; +const LCH = color.LCH; +const SRGB = color.SRGB; +const HSL = color.HSL; +const HWB = color.HWB; +const SRGBLinear = color.SRGBLinear; +const P3 = color.P3; +const A98 = color.A98; +const ProPhoto = color.ProPhoto; +const XYZd50 = color.XYZd50; +const XYZd65 = color.XYZd65; +const OKLAB = color.OKLAB; +const OKLCH = color.OKLCH; +const Rec2020 = color.Rec2020; + +pub const generated_color_conversions = struct { + pub const convert_RGBA = struct { + pub fn intoLAB(this: *const RGBA) LAB { + const xyz: SRGB = this.intoSRGB(); + return xyz.intoLAB(); + } + + pub fn intoLCH(this: *const RGBA) LCH { + const xyz: SRGB = this.intoSRGB(); + return xyz.intoLCH(); + } + + pub fn intoOKLAB(this: *const RGBA) OKLAB { + const xyz: SRGB = this.intoSRGB(); + return xyz.intoOKLAB(); + } + + pub fn intoOKLCH(this: *const RGBA) OKLCH { + const xyz: SRGB = this.intoSRGB(); + return xyz.intoOKLCH(); + } + + pub fn intoP3(this: *const RGBA) P3 { + const xyz: SRGB = this.intoSRGB(); + return xyz.intoP3(); + } + + pub fn intoSRGBLinear(this: *const RGBA) SRGBLinear { + const xyz: SRGB = this.intoSRGB(); + return xyz.intoSRGBLinear(); + } + + pub fn intoA98(this: *const RGBA) A98 { + const xyz: SRGB = this.intoSRGB(); + return xyz.intoA98(); + } + + pub fn intoProPhoto(this: *const RGBA) ProPhoto { + const xyz: SRGB = this.intoSRGB(); + return xyz.intoProPhoto(); + } + + pub fn intoXYZd50(this: *const RGBA) XYZd50 { + const xyz: SRGB = this.intoSRGB(); + return xyz.intoXYZd50(); + } + + pub fn intoXYZd65(this: *const RGBA) XYZd65 { + const xyz: SRGB = this.intoSRGB(); + return xyz.intoXYZd65(); + } + + pub fn intoRec2020(this: *const RGBA) Rec2020 { + const xyz: SRGB = this.intoSRGB(); + return xyz.intoRec2020(); + } + + pub fn intoHSL(this: *const RGBA) HSL { + const xyz: SRGB = this.intoSRGB(); + return xyz.intoHSL(); + } + + pub fn intoHWB(this: *const RGBA) HWB { + const xyz: SRGB = this.intoSRGB(); + return xyz.intoHWB(); + } + }; + pub const convert_LAB = struct { + pub fn intoXYZd65(this: *const LAB) XYZd65 { + const xyz: XYZd50 = this.intoXYZd50(); + return xyz.intoXYZd65(); + } + + pub fn intoOKLAB(this: *const LAB) OKLAB { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoOKLAB(); + } + + pub fn intoOKLCH(this: *const LAB) OKLCH { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoOKLCH(); + } + + pub fn intoSRGB(this: *const LAB) SRGB { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoSRGB(); + } + + pub fn intoSRGBLinear(this: *const LAB) SRGBLinear { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoSRGBLinear(); + } + + pub fn intoP3(this: *const LAB) P3 { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoP3(); + } + + pub fn intoA98(this: *const LAB) A98 { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoA98(); + } + + pub fn intoProPhoto(this: *const LAB) ProPhoto { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoProPhoto(); + } + + pub fn intoRec2020(this: *const LAB) Rec2020 { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoRec2020(); + } + + pub fn intoHSL(this: *const LAB) HSL { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoHSL(); + } + + pub fn intoHWB(this: *const LAB) HWB { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoHWB(); + } + + pub fn intoRGBA(this: *const LAB) RGBA { + const xyz: SRGB = this.intoSRGB(); + return xyz.intoRGBA(); + } + }; + pub const convert_SRGB = struct { + pub fn intoLAB(this: *const SRGB) LAB { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoLAB(); + } + + pub fn intoLCH(this: *const SRGB) LCH { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoLCH(); + } + + pub fn intoXYZd65(this: *const SRGB) XYZd65 { + const xyz: SRGBLinear = this.intoSRGBLinear(); + return xyz.intoXYZd65(); + } + + pub fn intoOKLAB(this: *const SRGB) OKLAB { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoOKLAB(); + } + + pub fn intoOKLCH(this: *const SRGB) OKLCH { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoOKLCH(); + } + + pub fn intoP3(this: *const SRGB) P3 { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoP3(); + } + + pub fn intoA98(this: *const SRGB) A98 { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoA98(); + } + + pub fn intoProPhoto(this: *const SRGB) ProPhoto { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoProPhoto(); + } + + pub fn intoRec2020(this: *const SRGB) Rec2020 { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoRec2020(); + } + + pub fn intoXYZd50(this: *const SRGB) XYZd50 { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoXYZd50(); + } + }; + pub const convert_HSL = struct { + pub fn intoLAB(this: *const HSL) LAB { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoLAB(); + } + + pub fn intoLCH(this: *const HSL) LCH { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoLCH(); + } + + pub fn intoP3(this: *const HSL) P3 { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoP3(); + } + + pub fn intoSRGBLinear(this: *const HSL) SRGBLinear { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoSRGBLinear(); + } + + pub fn intoA98(this: *const HSL) A98 { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoA98(); + } + + pub fn intoProPhoto(this: *const HSL) ProPhoto { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoProPhoto(); + } + + pub fn intoXYZd50(this: *const HSL) XYZd50 { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoXYZd50(); + } + + pub fn intoRec2020(this: *const HSL) Rec2020 { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoRec2020(); + } + + pub fn intoOKLAB(this: *const HSL) OKLAB { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoOKLAB(); + } + + pub fn intoOKLCH(this: *const HSL) OKLCH { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoOKLCH(); + } + + pub fn intoXYZd65(this: *const HSL) XYZd65 { + const xyz: SRGB = this.intoSRGB(); + return xyz.intoXYZd65(); + } + + pub fn intoHWB(this: *const HSL) HWB { + const xyz: SRGB = this.intoSRGB(); + return xyz.intoHWB(); + } + + pub fn intoRGBA(this: *const HSL) RGBA { + const xyz: SRGB = this.intoSRGB(); + return xyz.intoRGBA(); + } + }; + pub const convert_HWB = struct { + pub fn intoLAB(this: *const HWB) LAB { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoLAB(); + } + + pub fn intoLCH(this: *const HWB) LCH { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoLCH(); + } + + pub fn intoP3(this: *const HWB) P3 { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoP3(); + } + + pub fn intoSRGBLinear(this: *const HWB) SRGBLinear { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoSRGBLinear(); + } + + pub fn intoA98(this: *const HWB) A98 { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoA98(); + } + + pub fn intoProPhoto(this: *const HWB) ProPhoto { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoProPhoto(); + } + + pub fn intoXYZd50(this: *const HWB) XYZd50 { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoXYZd50(); + } + + pub fn intoRec2020(this: *const HWB) Rec2020 { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoRec2020(); + } + + pub fn intoHSL(this: *const HWB) HSL { + const xyz: SRGB = this.intoSRGB(); + return xyz.intoHSL(); + } + + pub fn intoXYZd65(this: *const HWB) XYZd65 { + const xyz: SRGB = this.intoSRGB(); + return xyz.intoXYZd65(); + } + + pub fn intoOKLAB(this: *const HWB) OKLAB { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoOKLAB(); + } + + pub fn intoOKLCH(this: *const HWB) OKLCH { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoOKLCH(); + } + + pub fn intoRGBA(this: *const HWB) RGBA { + const xyz: SRGB = this.intoSRGB(); + return xyz.intoRGBA(); + } + }; + pub const convert_SRGBLinear = struct { + pub fn intoLAB(this: *const SRGBLinear) LAB { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoLAB(); + } + + pub fn intoLCH(this: *const SRGBLinear) LCH { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoLCH(); + } + + pub fn intoP3(this: *const SRGBLinear) P3 { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoP3(); + } + + pub fn intoOKLAB(this: *const SRGBLinear) OKLAB { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoOKLAB(); + } + + pub fn intoOKLCH(this: *const SRGBLinear) OKLCH { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoOKLCH(); + } + + pub fn intoA98(this: *const SRGBLinear) A98 { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoA98(); + } + + pub fn intoProPhoto(this: *const SRGBLinear) ProPhoto { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoProPhoto(); + } + + pub fn intoRec2020(this: *const SRGBLinear) Rec2020 { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoRec2020(); + } + + pub fn intoXYZd50(this: *const SRGBLinear) XYZd50 { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoXYZd50(); + } + + pub fn intoHSL(this: *const SRGBLinear) HSL { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoHSL(); + } + + pub fn intoHWB(this: *const SRGBLinear) HWB { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoHWB(); + } + + pub fn intoRGBA(this: *const SRGBLinear) RGBA { + const xyz: SRGB = this.intoSRGB(); + return xyz.intoRGBA(); + } + }; + pub const convert_P3 = struct { + pub fn intoLAB(this: *const P3) LAB { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoLAB(); + } + + pub fn intoLCH(this: *const P3) LCH { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoLCH(); + } + + pub fn intoSRGB(this: *const P3) SRGB { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoSRGB(); + } + + pub fn intoSRGBLinear(this: *const P3) SRGBLinear { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoSRGBLinear(); + } + + pub fn intoOKLAB(this: *const P3) OKLAB { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoOKLAB(); + } + + pub fn intoOKLCH(this: *const P3) OKLCH { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoOKLCH(); + } + + pub fn intoA98(this: *const P3) A98 { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoA98(); + } + + pub fn intoProPhoto(this: *const P3) ProPhoto { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoProPhoto(); + } + + pub fn intoRec2020(this: *const P3) Rec2020 { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoRec2020(); + } + + pub fn intoXYZd50(this: *const P3) XYZd50 { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoXYZd50(); + } + + pub fn intoHSL(this: *const P3) HSL { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoHSL(); + } + + pub fn intoHWB(this: *const P3) HWB { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoHWB(); + } + + pub fn intoRGBA(this: *const P3) RGBA { + const xyz: SRGB = this.intoSRGB(); + return xyz.intoRGBA(); + } + }; + pub const convert_A98 = struct { + pub fn intoLAB(this: *const A98) LAB { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoLAB(); + } + + pub fn intoLCH(this: *const A98) LCH { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoLCH(); + } + + pub fn intoSRGB(this: *const A98) SRGB { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoSRGB(); + } + + pub fn intoP3(this: *const A98) P3 { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoP3(); + } + + pub fn intoSRGBLinear(this: *const A98) SRGBLinear { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoSRGBLinear(); + } + + pub fn intoOKLAB(this: *const A98) OKLAB { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoOKLAB(); + } + + pub fn intoOKLCH(this: *const A98) OKLCH { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoOKLCH(); + } + + pub fn intoProPhoto(this: *const A98) ProPhoto { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoProPhoto(); + } + + pub fn intoRec2020(this: *const A98) Rec2020 { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoRec2020(); + } + + pub fn intoXYZd50(this: *const A98) XYZd50 { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoXYZd50(); + } + + pub fn intoHSL(this: *const A98) HSL { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoHSL(); + } + + pub fn intoHWB(this: *const A98) HWB { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoHWB(); + } + + pub fn intoRGBA(this: *const A98) RGBA { + const xyz: SRGB = this.intoSRGB(); + return xyz.intoRGBA(); + } + }; + pub const convert_ProPhoto = struct { + pub fn intoXYZd65(this: *const ProPhoto) XYZd65 { + const xyz: XYZd50 = this.intoXYZd50(); + return xyz.intoXYZd65(); + } + + pub fn intoLAB(this: *const ProPhoto) LAB { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoLAB(); + } + + pub fn intoLCH(this: *const ProPhoto) LCH { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoLCH(); + } + + pub fn intoSRGB(this: *const ProPhoto) SRGB { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoSRGB(); + } + + pub fn intoP3(this: *const ProPhoto) P3 { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoP3(); + } + + pub fn intoSRGBLinear(this: *const ProPhoto) SRGBLinear { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoSRGBLinear(); + } + + pub fn intoA98(this: *const ProPhoto) A98 { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoA98(); + } + + pub fn intoOKLAB(this: *const ProPhoto) OKLAB { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoOKLAB(); + } + + pub fn intoOKLCH(this: *const ProPhoto) OKLCH { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoOKLCH(); + } + + pub fn intoRec2020(this: *const ProPhoto) Rec2020 { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoRec2020(); + } + + pub fn intoHSL(this: *const ProPhoto) HSL { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoHSL(); + } + + pub fn intoHWB(this: *const ProPhoto) HWB { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoHWB(); + } + + pub fn intoRGBA(this: *const ProPhoto) RGBA { + const xyz: SRGB = this.intoSRGB(); + return xyz.intoRGBA(); + } + }; + pub const convert_Rec2020 = struct { + pub fn intoLAB(this: *const Rec2020) LAB { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoLAB(); + } + + pub fn intoLCH(this: *const Rec2020) LCH { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoLCH(); + } + + pub fn intoSRGB(this: *const Rec2020) SRGB { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoSRGB(); + } + + pub fn intoP3(this: *const Rec2020) P3 { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoP3(); + } + + pub fn intoSRGBLinear(this: *const Rec2020) SRGBLinear { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoSRGBLinear(); + } + + pub fn intoA98(this: *const Rec2020) A98 { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoA98(); + } + + pub fn intoProPhoto(this: *const Rec2020) ProPhoto { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoProPhoto(); + } + + pub fn intoXYZd50(this: *const Rec2020) XYZd50 { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoXYZd50(); + } + + pub fn intoOKLAB(this: *const Rec2020) OKLAB { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoOKLAB(); + } + + pub fn intoOKLCH(this: *const Rec2020) OKLCH { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoOKLCH(); + } + + pub fn intoHSL(this: *const Rec2020) HSL { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoHSL(); + } + + pub fn intoHWB(this: *const Rec2020) HWB { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoHWB(); + } + + pub fn intoRGBA(this: *const Rec2020) RGBA { + const xyz: SRGB = this.intoSRGB(); + return xyz.intoRGBA(); + } + }; + pub const convert_XYZd50 = struct { + pub fn intoLCH(this: *const XYZd50) LCH { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoLCH(); + } + + pub fn intoSRGB(this: *const XYZd50) SRGB { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoSRGB(); + } + + pub fn intoP3(this: *const XYZd50) P3 { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoP3(); + } + + pub fn intoSRGBLinear(this: *const XYZd50) SRGBLinear { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoSRGBLinear(); + } + + pub fn intoA98(this: *const XYZd50) A98 { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoA98(); + } + + pub fn intoOKLAB(this: *const XYZd50) OKLAB { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoOKLAB(); + } + + pub fn intoOKLCH(this: *const XYZd50) OKLCH { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoOKLCH(); + } + + pub fn intoRec2020(this: *const XYZd50) Rec2020 { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoRec2020(); + } + + pub fn intoHSL(this: *const XYZd50) HSL { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoHSL(); + } + + pub fn intoHWB(this: *const XYZd50) HWB { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoHWB(); + } + + pub fn intoRGBA(this: *const XYZd50) RGBA { + const xyz: SRGB = this.intoSRGB(); + return xyz.intoRGBA(); + } + }; + pub const convert_XYZd65 = struct { + pub fn intoLAB(this: *const XYZd65) LAB { + const xyz: XYZd50 = this.intoXYZd50(); + return xyz.intoLAB(); + } + + pub fn intoProPhoto(this: *const XYZd65) ProPhoto { + const xyz: XYZd50 = this.intoXYZd50(); + return xyz.intoProPhoto(); + } + + pub fn intoOKLCH(this: *const XYZd65) OKLCH { + const xyz: OKLAB = this.intoOKLAB(); + return xyz.intoOKLCH(); + } + + pub fn intoLCH(this: *const XYZd65) LCH { + const xyz: LAB = this.intoLAB(); + return xyz.intoLCH(); + } + + pub fn intoSRGB(this: *const XYZd65) SRGB { + const xyz: SRGBLinear = this.intoSRGBLinear(); + return xyz.intoSRGB(); + } + + pub fn intoHSL(this: *const XYZd65) HSL { + const xyz: SRGB = this.intoSRGB(); + return xyz.intoHSL(); + } + + pub fn intoHWB(this: *const XYZd65) HWB { + const xyz: SRGB = this.intoSRGB(); + return xyz.intoHWB(); + } + + pub fn intoRGBA(this: *const XYZd65) RGBA { + const xyz: SRGB = this.intoSRGB(); + return xyz.intoRGBA(); + } + }; + pub const convert_LCH = struct { + pub fn intoXYZd65(this: *const LCH) XYZd65 { + const xyz: LAB = this.intoLAB(); + return xyz.intoXYZd65(); + } + + pub fn intoOKLAB(this: *const LCH) OKLAB { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoOKLAB(); + } + + pub fn intoOKLCH(this: *const LCH) OKLCH { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoOKLCH(); + } + + pub fn intoSRGB(this: *const LCH) SRGB { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoSRGB(); + } + + pub fn intoSRGBLinear(this: *const LCH) SRGBLinear { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoSRGBLinear(); + } + + pub fn intoP3(this: *const LCH) P3 { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoP3(); + } + + pub fn intoA98(this: *const LCH) A98 { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoA98(); + } + + pub fn intoProPhoto(this: *const LCH) ProPhoto { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoProPhoto(); + } + + pub fn intoRec2020(this: *const LCH) Rec2020 { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoRec2020(); + } + + pub fn intoXYZd50(this: *const LCH) XYZd50 { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoXYZd50(); + } + + pub fn intoHSL(this: *const LCH) HSL { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoHSL(); + } + + pub fn intoHWB(this: *const LCH) HWB { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoHWB(); + } + + pub fn intoRGBA(this: *const LCH) RGBA { + const xyz: SRGB = this.intoSRGB(); + return xyz.intoRGBA(); + } + }; + pub const convert_OKLAB = struct { + pub fn intoLAB(this: *const OKLAB) LAB { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoLAB(); + } + + pub fn intoLCH(this: *const OKLAB) LCH { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoLCH(); + } + + pub fn intoSRGB(this: *const OKLAB) SRGB { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoSRGB(); + } + + pub fn intoP3(this: *const OKLAB) P3 { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoP3(); + } + + pub fn intoSRGBLinear(this: *const OKLAB) SRGBLinear { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoSRGBLinear(); + } + + pub fn intoA98(this: *const OKLAB) A98 { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoA98(); + } + + pub fn intoProPhoto(this: *const OKLAB) ProPhoto { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoProPhoto(); + } + + pub fn intoXYZd50(this: *const OKLAB) XYZd50 { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoXYZd50(); + } + + pub fn intoRec2020(this: *const OKLAB) Rec2020 { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoRec2020(); + } + + pub fn intoHSL(this: *const OKLAB) HSL { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoHSL(); + } + + pub fn intoHWB(this: *const OKLAB) HWB { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoHWB(); + } + + pub fn intoRGBA(this: *const OKLAB) RGBA { + const xyz: SRGB = this.intoSRGB(); + return xyz.intoRGBA(); + } + }; + pub const convert_OKLCH = struct { + pub fn intoXYZd65(this: *const OKLCH) XYZd65 { + const xyz: OKLAB = this.intoOKLAB(); + return xyz.intoXYZd65(); + } + + pub fn intoLAB(this: *const OKLCH) LAB { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoLAB(); + } + + pub fn intoLCH(this: *const OKLCH) LCH { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoLCH(); + } + + pub fn intoSRGB(this: *const OKLCH) SRGB { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoSRGB(); + } + + pub fn intoP3(this: *const OKLCH) P3 { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoP3(); + } + + pub fn intoSRGBLinear(this: *const OKLCH) SRGBLinear { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoSRGBLinear(); + } + + pub fn intoA98(this: *const OKLCH) A98 { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoA98(); + } + + pub fn intoProPhoto(this: *const OKLCH) ProPhoto { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoProPhoto(); + } + + pub fn intoXYZd50(this: *const OKLCH) XYZd50 { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoXYZd50(); + } + + pub fn intoRec2020(this: *const OKLCH) Rec2020 { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoRec2020(); + } + + pub fn intoHSL(this: *const OKLCH) HSL { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoHSL(); + } + + pub fn intoHWB(this: *const OKLCH) HWB { + const xyz: XYZd65 = this.intoXYZd65(); + return xyz.intoHWB(); + } + + pub fn intoRGBA(this: *const OKLCH) RGBA { + const xyz: SRGB = this.intoSRGB(); + return xyz.intoRGBA(); + } + }; +}; diff --git a/src/css/values/color_js.zig b/src/css/values/color_js.zig new file mode 100644 index 0000000000..c0b7ffd739 --- /dev/null +++ b/src/css/values/color_js.zig @@ -0,0 +1,481 @@ +const bun = @import("root").bun; +const std = @import("std"); +const color = @import("./color.zig"); +const RGBA = color.RGBA; +const LAB = color.LAB; +const LCH = color.LCH; +const SRGB = color.SRGB; +const HSL = color.HSL; +const HWB = color.HWB; +const SRGBLinear = color.SRGBLinear; +const P3 = color.P3; +const JSC = bun.JSC; +const css = bun.css; + +const OutputColorFormat = enum { + ansi, + ansi_16, + ansi_16m, + ansi_256, + css, + hex, + HEX, + hsl, + lab, + number, + rgb, + rgba, + @"[rgb]", + @"[rgba]", + @"{rgb}", + @"{rgba}", + + pub const Map = bun.ComptimeStringMap(OutputColorFormat, .{ + .{ "[r,g,b,a]", .@"[rgba]" }, + .{ "[rgb]", .@"[rgb]" }, + .{ "[rgba]", .@"[rgba]" }, + .{ "{r,g,b}", .@"{rgb}" }, + .{ "{rgb}", .@"{rgb}" }, + .{ "{rgba}", .@"{rgba}" }, + .{ "ansi_256", .ansi_256 }, + .{ "ansi-256", .ansi_256 }, + .{ "ansi_16", .ansi_16 }, + .{ "ansi-16", .ansi_16 }, + .{ "ansi_16m", .ansi_16m }, + .{ "ansi-16m", .ansi_16m }, + .{ "ansi-24bit", .ansi_16m }, + .{ "ansi-truecolor", .ansi_16m }, + .{ "ansi", .ansi }, + .{ "ansi256", .ansi_256 }, + .{ "css", .css }, + .{ "hex", .hex }, + .{ "HEX", .HEX }, + .{ "hsl", .hsl }, + .{ "lab", .lab }, + .{ "number", .number }, + .{ "rgb", .rgb }, + .{ "rgba", .rgba }, + }); +}; + +fn colorIntFromJS(globalThis: *JSC.JSGlobalObject, input: JSC.JSValue, comptime property: []const u8) ?i32 { + if (input == .zero or input == .undefined or !input.isNumber()) { + globalThis.throwInvalidArgumentType("color", property, "integer"); + + return null; + } + + // CSS spec says to clamp values to their valid range so we'll respect that here + return std.math.clamp(input.coerce(i32, globalThis), 0, 255); +} + +// https://github.com/tmux/tmux/blob/dae2868d1227b95fd076fb4a5efa6256c7245943/colour.c#L44-L55 +pub const Ansi256 = struct { + const q2c = [_]u32{ 0x00, 0x5f, 0x87, 0xaf, 0xd7, 0xff }; + + fn sqdist(R: u32, G: u32, B: u32, r: u32, g: u32, b: u32) u32 { + return ((R -% r) *% (R -% r) +% (G -% g) *% (G -% g) +% (B -% b) *% (B -% b)); + } + + fn to6Cube(v: u32) u32 { + if (v < 48) + return (0); + if (v < 114) + return (1); + return ((v - 35) / 40); + } + + fn get(r: u32, g: u32, b: u32) u32 { + const qr = to6Cube(r); + const cr = q2c[@intCast(qr)]; + const qg = to6Cube(g); + const cg = q2c[@intCast(qg)]; + const qb = to6Cube(b); + const cb = q2c[@intCast(qb)]; + + if (cr == r and cg == g and cb == b) { + return 16 +% (36 *% qr) +% (6 *% qg) +% qb; + } + + const grey_avg = (r +% g +% b) / 3; + const grey_idx = if (grey_avg > 238) 23 else (grey_avg -% 3) / 10; + const grey = 8 +% (10 *% grey_idx); + + const d = sqdist(cr, cg, cb, r, g, b); + const idx = if (sqdist(grey, grey, grey, r, g, b) < d) 232 +% grey_idx else 16 +% (36 *% qr) +% (6 *% qg) +% qb; + return idx; + } + + const table_256: [256]u8 = .{ + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, + 0, 4, 4, 4, 12, 12, 2, 6, 4, 4, 12, 12, 2, 2, 6, 4, + 12, 12, 2, 2, 2, 6, 12, 12, 10, 10, 10, 10, 14, 12, 10, 10, + 10, 10, 10, 14, 1, 5, 4, 4, 12, 12, 3, 8, 4, 4, 12, 12, + 2, 2, 6, 4, 12, 12, 2, 2, 2, 6, 12, 12, 10, 10, 10, 10, + 14, 12, 10, 10, 10, 10, 10, 14, 1, 1, 5, 4, 12, 12, 1, 1, + 5, 4, 12, 12, 3, 3, 8, 4, 12, 12, 2, 2, 2, 6, 12, 12, + 10, 10, 10, 10, 14, 12, 10, 10, 10, 10, 10, 14, 1, 1, 1, 5, + 12, 12, 1, 1, 1, 5, 12, 12, 1, 1, 1, 5, 12, 12, 3, 3, + 3, 7, 12, 12, 10, 10, 10, 10, 14, 12, 10, 10, 10, 10, 10, 14, + 9, 9, 9, 9, 13, 12, 9, 9, 9, 9, 13, 12, 9, 9, 9, 9, + 13, 12, 9, 9, 9, 9, 13, 12, 11, 11, 11, 11, 7, 12, 10, 10, + 10, 10, 10, 14, 9, 9, 9, 9, 9, 13, 9, 9, 9, 9, 9, 13, + 9, 9, 9, 9, 9, 13, 9, 9, 9, 9, 9, 13, 9, 9, 9, 9, + 9, 13, 11, 11, 11, 11, 11, 15, 0, 0, 0, 0, 0, 0, 8, 8, + 8, 8, 8, 8, 7, 7, 7, 7, 7, 7, 15, 15, 15, 15, 15, 15, + }; + + pub fn get16(r: u32, g: u32, b: u32) u8 { + const val = get(r, g, b); + return table_256[val & 0xff]; + } + + pub const Buffer = [24]u8; + + pub fn from(rgba: RGBA, buf: *Buffer) []u8 { + const val = get(rgba.red, rgba.green, rgba.blue); + // 0x1b is the escape character + buf[0] = 0x1b; + buf[1] = '['; + buf[2] = '3'; + buf[3] = '8'; + buf[4] = ';'; + buf[5] = '5'; + buf[6] = ';'; + const extra = std.fmt.bufPrint(buf[7..], "{d}m", .{val}) catch unreachable; + return buf[0 .. 7 + extra.len]; + } +}; + +pub fn jsFunctionColor(globalThis: *JSC.JSGlobalObject, callFrame: *JSC.CallFrame) bun.JSError!JSC.JSValue { + const args = callFrame.argumentsAsArray(2); + if (args[0].isUndefined()) { + globalThis.throwInvalidArgumentType("color", "input", "string, number, or object"); + return JSC.JSValue.jsUndefined(); + } + + var arena = std.heap.ArenaAllocator.init(bun.default_allocator); + defer arena.deinit(); + var stack_fallback = std.heap.stackFallback(4096, arena.allocator()); + const allocator = stack_fallback.get(); + + var log = bun.logger.Log.init(allocator); + defer log.deinit(); + + const unresolved_format: OutputColorFormat = brk: { + if (!args[1].isEmptyOrUndefinedOrNull()) { + if (!args[1].isString()) { + globalThis.throwInvalidArgumentType("color", "format", "string"); + return JSC.JSValue.jsUndefined(); + } + + break :brk try args[1].toEnum(globalThis, "format", OutputColorFormat); + } + + break :brk OutputColorFormat.css; + }; + var input = JSC.ZigString.Slice.empty; + defer input.deinit(); + + var parsed_color: css.CssColor.ParseResult = brk: { + if (args[0].isNumber()) { + const number: i64 = args[0].toInt64(); + const Packed = packed struct(u32) { + blue: u8, + green: u8, + red: u8, + alpha: u8, + }; + const int: u32 = @truncate(@abs(@mod(number, std.math.maxInt(u32)))); + const rgba: Packed = @bitCast(int); + + break :brk .{ .result = css.CssColor{ .rgba = .{ .alpha = rgba.alpha, .red = rgba.red, .green = rgba.green, .blue = rgba.blue } } }; + } else if (args[0].jsType().isArrayLike()) { + switch (args[0].getLength(globalThis)) { + 3 => { + const r = colorIntFromJS(globalThis, args[0].getIndex(globalThis, 0), "[0]") orelse return .zero; + if (globalThis.hasException()) { + return .zero; + } + const g = colorIntFromJS(globalThis, args[0].getIndex(globalThis, 1), "[1]") orelse return .zero; + if (globalThis.hasException()) { + return .zero; + } + const b = colorIntFromJS(globalThis, args[0].getIndex(globalThis, 2), "[2]") orelse return .zero; + if (globalThis.hasException()) { + return .zero; + } + break :brk .{ .result = css.CssColor{ .rgba = .{ .alpha = 255, .red = @intCast(r), .green = @intCast(g), .blue = @intCast(b) } } }; + }, + 4 => { + const r = colorIntFromJS(globalThis, args[0].getIndex(globalThis, 0), "[0]") orelse return .zero; + if (globalThis.hasException()) { + return .zero; + } + const g = colorIntFromJS(globalThis, args[0].getIndex(globalThis, 1), "[1]") orelse return .zero; + if (globalThis.hasException()) { + return .zero; + } + const b = colorIntFromJS(globalThis, args[0].getIndex(globalThis, 2), "[2]") orelse return .zero; + if (globalThis.hasException()) { + return .zero; + } + const a = colorIntFromJS(globalThis, args[0].getIndex(globalThis, 3), "[3]") orelse return .zero; + if (globalThis.hasException()) { + return .zero; + } + break :brk .{ .result = css.CssColor{ .rgba = .{ .alpha = @intCast(a), .red = @intCast(r), .green = @intCast(g), .blue = @intCast(b) } } }; + }, + else => { + globalThis.throw("Expected array length 3 or 4", .{}); + return JSC.JSValue.jsUndefined(); + }, + } + } else if (args[0].isObject()) { + const r = colorIntFromJS(globalThis, args[0].get(globalThis, "r") orelse .zero, "r") orelse return .zero; + + if (globalThis.hasException()) { + return .zero; + } + const g = colorIntFromJS(globalThis, args[0].get(globalThis, "g") orelse .zero, "g") orelse return .zero; + + if (globalThis.hasException()) { + return .zero; + } + const b = colorIntFromJS(globalThis, args[0].get(globalThis, "b") orelse .zero, "b") orelse return .zero; + + if (globalThis.hasException()) { + return .zero; + } + + const a: ?u8 = if (args[0].getTruthy(globalThis, "a")) |a_value| brk2: { + if (a_value.isNumber()) { + break :brk2 @intCast(@mod(@as(i64, @intFromFloat(a_value.asNumber() * 255.0)), 256)); + } + break :brk2 null; + } else null; + if (globalThis.hasException()) { + return .zero; + } + + break :brk .{ + .result = css.CssColor{ + .rgba = .{ + .alpha = if (a != null) @intCast(a.?) else 255, + .red = @intCast(r), + .green = @intCast(g), + .blue = @intCast(b), + }, + }, + }; + } + + input = args[0].toSlice(globalThis, bun.default_allocator); + + var parser_input = css.ParserInput.new(allocator, input.slice()); + var parser = css.Parser.new(&parser_input, null); + break :brk css.CssColor.parse(&parser); + }; + + switch (parsed_color) { + .err => |err| { + if (log.msgs.items.len == 0) { + return .null; + } + + globalThis.throw("color() failed to parse {s}", .{@tagName(err.basic().kind)}); + return JSC.JSValue.jsUndefined(); + }, + .result => |*result| { + const format: OutputColorFormat = if (unresolved_format == .ansi) switch (bun.Output.Source.colorDepth()) { + // No color terminal, therefore return an empty string + .none => return JSC.JSValue.jsEmptyString(globalThis), + .@"16" => .ansi_16, + .@"16m" => .ansi_16m, + .@"256" => .ansi_256, + } else unresolved_format; + + formatted: { + var str = color: { + switch (format) { + // resolved above. + .ansi => unreachable, + + // Use the CSS printer. + .css => break :formatted, + + .number, + .rgb, + .rgba, + .hex, + .HEX, + .ansi_16, + .ansi_16m, + .ansi_256, + .@"{rgba}", + .@"{rgb}", + .@"[rgba]", + .@"[rgb]", + => |tag| { + const srgba = switch (result.*) { + .float => |float| switch (float.*) { + .rgb => |rgb| rgb, + inline else => |*val| val.intoSRGB(), + }, + .rgba => |*rgba| rgba.intoSRGB(), + .lab => |lab| switch (lab.*) { + inline else => |entry| entry.intoSRGB(), + }, + else => break :formatted, + }; + const rgba = srgba.intoRGBA(); + switch (tag) { + .@"{rgba}" => { + const object = JSC.JSValue.createEmptyObject(globalThis, 4); + object.put(globalThis, "r", JSC.JSValue.jsNumber(rgba.red)); + object.put(globalThis, "g", JSC.JSValue.jsNumber(rgba.green)); + object.put(globalThis, "b", JSC.JSValue.jsNumber(rgba.blue)); + object.put(globalThis, "a", JSC.JSValue.jsNumber(rgba.alphaF32())); + return object; + }, + .@"{rgb}" => { + const object = JSC.JSValue.createEmptyObject(globalThis, 4); + object.put(globalThis, "r", JSC.JSValue.jsNumber(rgba.red)); + object.put(globalThis, "g", JSC.JSValue.jsNumber(rgba.green)); + object.put(globalThis, "b", JSC.JSValue.jsNumber(rgba.blue)); + return object; + }, + .@"[rgb]" => { + const object = JSC.JSValue.createEmptyArray(globalThis, 3); + object.putIndex(globalThis, 0, JSC.JSValue.jsNumber(rgba.red)); + object.putIndex(globalThis, 1, JSC.JSValue.jsNumber(rgba.green)); + object.putIndex(globalThis, 2, JSC.JSValue.jsNumber(rgba.blue)); + return object; + }, + .@"[rgba]" => { + const object = JSC.JSValue.createEmptyArray(globalThis, 4); + object.putIndex(globalThis, 0, JSC.JSValue.jsNumber(rgba.red)); + object.putIndex(globalThis, 1, JSC.JSValue.jsNumber(rgba.green)); + object.putIndex(globalThis, 2, JSC.JSValue.jsNumber(rgba.blue)); + object.putIndex(globalThis, 3, JSC.JSValue.jsNumber(rgba.alpha)); + return object; + }, + .number => { + var int: u32 = 0; + int |= @as(u32, rgba.red) << 16; + int |= @as(u32, rgba.green) << 8; + int |= @as(u32, rgba.blue); + return JSC.JSValue.jsNumber(int); + }, + .hex => { + break :color bun.String.createFormat("#{}{}{}", .{ bun.fmt.hexIntLower(rgba.red), bun.fmt.hexIntLower(rgba.green), bun.fmt.hexIntLower(rgba.blue) }); + }, + .HEX => { + break :color bun.String.createFormat("#{}{}{}", .{ bun.fmt.hexIntUpper(rgba.red), bun.fmt.hexIntUpper(rgba.green), bun.fmt.hexIntUpper(rgba.blue) }); + }, + .rgb => { + break :color bun.String.createFormat("rgb({d}, {d}, {d})", .{ rgba.red, rgba.green, rgba.blue }); + }, + .rgba => { + break :color bun.String.createFormat("rgba({d}, {d}, {d}, {d})", .{ rgba.red, rgba.green, rgba.blue, rgba.alphaF32() }); + }, + .ansi_16 => { + const ansi_16_color = Ansi256.get16(rgba.red, rgba.green, rgba.blue); + // 16-color ansi, foreground text color + break :color bun.String.createLatin1(&[_]u8{ + // 0x1b is the escape character + // 38 is the foreground color code + // 5 is the 16-color mode + // {d} is the color index + 0x1b, '[', '3', '8', ';', '5', ';', ansi_16_color, 'm', + }); + }, + .ansi_16m => { + // true color ansi + var buf: [48]u8 = undefined; + // 0x1b is the escape character + buf[0] = 0x1b; + buf[1] = '['; + buf[2] = '3'; + buf[3] = '8'; + buf[4] = ';'; + buf[5] = '2'; + buf[6] = ';'; + const additional = std.fmt.bufPrint(buf[7..], "{d};{d};{d}m", .{ + rgba.red, + rgba.green, + rgba.blue, + }) catch unreachable; + + break :color bun.String.createLatin1(buf[0 .. 7 + additional.len]); + }, + .ansi_256 => { + // ANSI escape sequence + var buf: Ansi256.Buffer = undefined; + const val = Ansi256.from(rgba, &buf); + break :color bun.String.createLatin1(val); + }, + else => unreachable, + } + }, + + .hsl => { + const hsl = switch (result.*) { + .float => |float| brk: { + switch (float.*) { + .hsl => |hsl| break :brk hsl, + inline else => |*val| break :brk val.intoHSL(), + } + }, + .rgba => |*rgba| rgba.intoHSL(), + .lab => |lab| switch (lab.*) { + inline else => |entry| entry.intoHSL(), + }, + else => break :formatted, + }; + + break :color bun.String.createFormat("hsl({d}, {d}, {d})", .{ hsl.h, hsl.s, hsl.l }); + }, + .lab => { + const lab = switch (result.*) { + .float => |float| switch (float.*) { + inline else => |*val| val.intoLAB(), + }, + .lab => |lab| switch (lab.*) { + .lab => |lab_| lab_, + inline else => |entry| entry.intoLAB(), + }, + .rgba => |*rgba| rgba.intoLAB(), + else => break :formatted, + }; + + break :color bun.String.createFormat("lab({d}, {d}, {d})", .{ lab.l, lab.a, lab.b }); + }, + } + } catch bun.outOfMemory(); + + return str.transferToJS(globalThis); + } + + // Fallback to CSS string output + var dest = std.ArrayListUnmanaged(u8){}; + const writer = dest.writer(allocator); + + var printer = css.Printer(@TypeOf(writer)).new( + allocator, + std.ArrayList(u8).init(allocator), + writer, + .{}, + null, + ); + + result.toCss(@TypeOf(writer), &printer) catch |err| { + globalThis.throw("color() internal error: {s}", .{@errorName(err)}); + return .zero; + }; + + var out = bun.String.createUTF8(dest.items); + return out.transferToJS(globalThis); + }, + } +} diff --git a/src/css/values/color_via.ts b/src/css/values/color_via.ts new file mode 100644 index 0000000000..fd1127c6f0 --- /dev/null +++ b/src/css/values/color_via.ts @@ -0,0 +1,231 @@ +const RGBA = "RGBA"; +const LAB = "LAB"; +const SRGB = "SRGB"; +const HSL = "HSL"; +const HWB = "HWB"; +const SRGBLinear = "SRGBLinear"; +const P3 = "P3"; +const A98 = "A98"; +const ProPhoto = "ProPhoto"; +const Rec2020 = "Rec2020"; +const XYZd50 = "XYZd50"; +const XYZd65 = "XYZd65"; +const LCH = "LCH"; +const OKLAB = "OKLAB"; +const OKLCH = "OKLCH"; +const color_spaces = [ + RGBA, + LAB, + SRGB, + HSL, + HWB, + SRGBLinear, + P3, + A98, + ProPhoto, + Rec2020, + XYZd50, + XYZd65, + LCH, + OKLAB, + OKLCH, +]; + +type ColorSpaces = + | typeof RGBA + | typeof LAB + | typeof SRGB + | typeof HSL + | typeof HWB + | typeof SRGBLinear + | typeof P3 + | typeof A98 + | typeof ProPhoto + | typeof Rec2020 + | typeof XYZd50 + | typeof XYZd65 + | typeof LCH + | typeof OKLAB + | typeof OKLCH; + +type Foo = "a" | "b"; + +let code: Map = new Map(); + +initColorSpaces(); +addConversions(); +await generateCode(); + +function initColorSpaces() { + for (const space of color_spaces as ColorSpaces[]) { + code.set(space, []); + } +} + +async function generateCode() { + const output = `//!This file is generated by \`color_via.ts\`. Do not edit it directly! +const color = @import("./color.zig"); +const RGBA = color.RGBA; +const LAB = color.LAB; +const LCH = color.LCH; +const SRGB = color.SRGB; +const HSL = color.HSL; +const HWB = color.HWB; +const SRGBLinear = color.SRGBLinear; +const P3 = color.P3; +const A98 = color.A98; +const ProPhoto = color.ProPhoto; +const XYZd50 = color.XYZd50; +const XYZd65 = color.XYZd65; +const OKLAB = color.OKLAB; +const OKLCH = color.OKLCH; +const Rec2020 = color.Rec2020; + +pub const generated_color_conversions = struct { +${(() => { + let result = ""; + for (const [space, functions] of code) { + result += "\n"; + result += `pub const convert_${space} = struct {\n`; + result += functions.join("\n"); + result += "\n};"; + } + return result; +})()} +};`; + await Bun.$`echo ${output} > src/css/values/color_generated.zig; zig fmt src/css/values/color_generated.zig +`; +} + +function addConversions() { + // Once Rust specialization is stable, this could be simplified. + via("LAB", "XYZd50", "XYZd65"); + via("ProPhoto", "XYZd50", "XYZd65"); + via("OKLCH", "OKLAB", "XYZd65"); + + via("LAB", "XYZd65", "OKLAB"); + via("LAB", "XYZd65", "OKLCH"); + via("LAB", "XYZd65", "SRGB"); + via("LAB", "XYZd65", "SRGBLinear"); + via("LAB", "XYZd65", "P3"); + via("LAB", "XYZd65", "A98"); + via("LAB", "XYZd65", "ProPhoto"); + via("LAB", "XYZd65", "Rec2020"); + via("LAB", "XYZd65", "HSL"); + via("LAB", "XYZd65", "HWB"); + + via("LCH", "LAB", "XYZd65"); + via("LCH", "XYZd65", "OKLAB"); + via("LCH", "XYZd65", "OKLCH"); + via("LCH", "XYZd65", "SRGB"); + via("LCH", "XYZd65", "SRGBLinear"); + via("LCH", "XYZd65", "P3"); + via("LCH", "XYZd65", "A98"); + via("LCH", "XYZd65", "ProPhoto"); + via("LCH", "XYZd65", "Rec2020"); + via("LCH", "XYZd65", "XYZd50"); + via("LCH", "XYZd65", "HSL"); + via("LCH", "XYZd65", "HWB"); + + via("SRGB", "SRGBLinear", "XYZd65"); + via("SRGB", "XYZd65", "OKLAB"); + via("SRGB", "XYZd65", "OKLCH"); + via("SRGB", "XYZd65", "P3"); + via("SRGB", "XYZd65", "A98"); + via("SRGB", "XYZd65", "ProPhoto"); + via("SRGB", "XYZd65", "Rec2020"); + via("SRGB", "XYZd65", "XYZd50"); + + via("P3", "XYZd65", "SRGBLinear"); + via("P3", "XYZd65", "OKLAB"); + via("P3", "XYZd65", "OKLCH"); + via("P3", "XYZd65", "A98"); + via("P3", "XYZd65", "ProPhoto"); + via("P3", "XYZd65", "Rec2020"); + via("P3", "XYZd65", "XYZd50"); + via("P3", "XYZd65", "HSL"); + via("P3", "XYZd65", "HWB"); + + via("SRGBLinear", "XYZd65", "OKLAB"); + via("SRGBLinear", "XYZd65", "OKLCH"); + via("SRGBLinear", "XYZd65", "A98"); + via("SRGBLinear", "XYZd65", "ProPhoto"); + via("SRGBLinear", "XYZd65", "Rec2020"); + via("SRGBLinear", "XYZd65", "XYZd50"); + via("SRGBLinear", "XYZd65", "HSL"); + via("SRGBLinear", "XYZd65", "HWB"); + + via("A98", "XYZd65", "OKLAB"); + via("A98", "XYZd65", "OKLCH"); + via("A98", "XYZd65", "ProPhoto"); + via("A98", "XYZd65", "Rec2020"); + via("A98", "XYZd65", "XYZd50"); + via("A98", "XYZd65", "HSL"); + via("A98", "XYZd65", "HWB"); + + via("ProPhoto", "XYZd65", "OKLAB"); + via("ProPhoto", "XYZd65", "OKLCH"); + via("ProPhoto", "XYZd65", "Rec2020"); + via("ProPhoto", "XYZd65", "HSL"); + via("ProPhoto", "XYZd65", "HWB"); + + via("XYZd50", "XYZd65", "OKLAB"); + via("XYZd50", "XYZd65", "OKLCH"); + via("XYZd50", "XYZd65", "Rec2020"); + via("XYZd50", "XYZd65", "HSL"); + via("XYZd50", "XYZd65", "HWB"); + + via("Rec2020", "XYZd65", "OKLAB"); + via("Rec2020", "XYZd65", "OKLCH"); + via("Rec2020", "XYZd65", "HSL"); + via("Rec2020", "XYZd65", "HWB"); + + via("HSL", "XYZd65", "OKLAB"); + via("HSL", "XYZd65", "OKLCH"); + via("HSL", "SRGB", "XYZd65"); + via("HSL", "SRGB", "HWB"); + + via("HWB", "SRGB", "XYZd65"); + via("HWB", "XYZd65", "OKLAB"); + via("HWB", "XYZd65", "OKLCH"); + + // RGBA is an 8-bit version. Convert to SRGB, which is a + // more accurate floating point representation for all operations. + via("RGBA", "SRGB", "LAB"); + via("RGBA", "SRGB", "LCH"); + via("RGBA", "SRGB", "OKLAB"); + via("RGBA", "SRGB", "OKLCH"); + via("RGBA", "SRGB", "P3"); + via("RGBA", "SRGB", "SRGBLinear"); + via("RGBA", "SRGB", "A98"); + via("RGBA", "SRGB", "ProPhoto"); + via("RGBA", "SRGB", "XYZd50"); + via("RGBA", "SRGB", "XYZd65"); + via("RGBA", "SRGB", "Rec2020"); + via("RGBA", "SRGB", "HSL"); + via("RGBA", "SRGB", "HWB"); +} + +function via, V extends Exclude>( + from: T, + middle: U, + to: V, +) { + // Generate T, U, V function (where T, U, V are ColorSpaces) + let fromFunctions = code.get(from) || []; + fromFunctions.push(`pub fn into${to}(this: *const ${from}) ${to} { + const xyz: ${middle} = this.into${middle}(); + return xyz.into${to}(); +} +`); + code.set(from, fromFunctions); + + // Generate V, U, function + let toFunctions = code.get(to) || []; + toFunctions.push(`pub fn into${from}(this: *const ${to}) ${from} { + const xyz: ${middle} = this.into${middle}(); + return xyz.into${from}(); +} +`); + code.set(to, toFunctions); +} diff --git a/src/css/values/css_string.zig b/src/css/values/css_string.zig new file mode 100644 index 0000000000..7cb042cd3d --- /dev/null +++ b/src/css/values/css_string.zig @@ -0,0 +1,22 @@ +const std = @import("std"); +const Allocator = std.mem.Allocator; +const bun = @import("root").bun; +const logger = bun.logger; +const Log = logger.Log; + +pub const css = @import("../css_parser.zig"); +pub const Result = css.Result; +pub const Printer = css.Printer; +pub const PrintErr = css.PrintErr; + +/// A quoted CSS string. +pub const CSSString = []const u8; +pub const CSSStringFns = struct { + pub fn parse(input: *css.Parser) Result(CSSString) { + return input.expectString(); + } + + pub fn toCss(this: *const []const u8, comptime W: type, dest: *Printer(W)) PrintErr!void { + return css.serializer.serializeString(this.*, dest) catch return dest.addFmtError(); + } +}; diff --git a/src/css/values/easing.zig b/src/css/values/easing.zig new file mode 100644 index 0000000000..4d7c7cd00c --- /dev/null +++ b/src/css/values/easing.zig @@ -0,0 +1,257 @@ +const std = @import("std"); +const bun = @import("root").bun; +pub const css = @import("../css_parser.zig"); +const Result = css.Result; +const ArrayList = std.ArrayListUnmanaged; +const Printer = css.Printer; +const PrintErr = css.PrintErr; +const CSSNumber = css.css_values.number.CSSNumber; +const CSSNumberFns = css.css_values.number.CSSNumberFns; +const Calc = css.css_values.calc.Calc; +const DimensionPercentage = css.css_values.percentage.DimensionPercentage; +const LengthPercentage = css.css_values.length.LengthPercentage; +const Length = css.css_values.length.Length; +const Percentage = css.css_values.percentage.Percentage; +const CssColor = css.css_values.color.CssColor; +const Image = css.css_values.image.Image; +const Url = css.css_values.url.Url; +const CSSInteger = css.css_values.number.CSSInteger; +const CSSIntegerFns = css.css_values.number.CSSIntegerFns; +const Angle = css.css_values.angle.Angle; +const Time = css.css_values.time.Time; +const Resolution = css.css_values.resolution.Resolution; +const CustomIdent = css.css_values.ident.CustomIdent; +const CustomIdentFns = css.css_values.ident.CustomIdentFns; +const Ident = css.css_values.ident.Ident; + +/// A CSS [easing function](https://www.w3.org/TR/css-easing-1/#easing-functions). +pub const EasingFunction = union(enum) { + /// A linear easing function. + linear, + /// Equivalent to `cubic-bezier(0.25, 0.1, 0.25, 1)`. + ease, + /// Equivalent to `cubic-bezier(0.42, 0, 1, 1)`. + ease_in, + /// Equivalent to `cubic-bezier(0, 0, 0.58, 1)`. + ease_out, + /// Equivalent to `cubic-bezier(0.42, 0, 0.58, 1)`. + ease_in_out, + /// A custom cubic Bézier easing function. + cubic_bezier: struct { + /// The x-position of the first point in the curve. + x1: CSSNumber, + /// The y-position of the first point in the curve. + y1: CSSNumber, + /// The x-position of the second point in the curve. + x2: CSSNumber, + /// The y-position of the second point in the curve. + y2: CSSNumber, + }, + /// A step easing function. + steps: struct { + /// The number of intervals in the function. + count: CSSInteger, + /// The step position. + position: StepPosition = StepPosition.default, + }, + + pub fn parse(input: *css.Parser) Result(EasingFunction) { + const location = input.currentSourceLocation(); + if (input.tryParse(struct { + fn parse(i: *css.Parser) Result([]const u8) { + return i.expectIdent(); + } + }.parse, .{}).asValue()) |ident| { + // todo_stuff.match_ignore_ascii_case + const keyword = if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(ident, "linear")) + EasingFunction.linear + else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(ident, "ease")) + EasingFunction.ease + else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(ident, "ease-in")) + EasingFunction.ease_in + else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(ident, "ease-out")) + EasingFunction.ease_out + else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(ident, "ease-in-out")) + EasingFunction.ease_in_out + else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(ident, "step-start")) + EasingFunction{ .steps = .{ .count = 1, .position = .start } } + else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(ident, "step-end")) + EasingFunction{ .steps = .{ .count = 1, .position = .end } } + else + return .{ .err = location.newUnexpectedTokenError(.{ .ident = ident }) }; + return .{ .result = keyword }; + } + + const function = switch (input.expectFunction()) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + return input.parseNestedBlock( + EasingFunction, + .{ .loc = location, .function = function }, + struct { + fn parse( + closure: *const struct { loc: css.SourceLocation, function: []const u8 }, + i: *css.Parser, + ) Result(EasingFunction) { + if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(closure.function, "cubic-bezier")) { + const x1 = switch (CSSNumberFns.parse(i)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + if (i.expectComma().asErr()) |e| return .{ .err = e }; + const y1 = switch (CSSNumberFns.parse(i)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + if (i.expectComma().asErr()) |e| return .{ .err = e }; + const x2 = switch (CSSNumberFns.parse(i)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + if (i.expectComma().asErr()) |e| return .{ .err = e }; + const y2 = switch (CSSNumberFns.parse(i)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + return .{ .result = EasingFunction{ .cubic_bezier = .{ .x1 = x1, .y1 = y1, .x2 = x2, .y2 = y2 } } }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(closure.function, "steps")) { + const count = switch (CSSIntegerFns.parse(i)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + const position = i.tryParse(struct { + fn parse(p: *css.Parser) Result(StepPosition) { + if (p.expectComma().asErr()) |e| return .{ .err = e }; + return StepPosition.parse(p); + } + }.parse, .{}).unwrapOr(StepPosition.default); + return .{ .result = EasingFunction{ .steps = .{ .count = count, .position = position } } }; + } else { + return closure.loc.newUnexpectedTokenError(.{ .ident = closure.function }); + } + } + }.parse, + ); + } + + pub fn toCss(this: *const EasingFunction, comptime W: type, dest: *css.Printer(W)) css.PrintErr!void { + return switch (this.*) { + .linear => try dest.writeStr("linear"), + .ease => try dest.writeStr("ease"), + .ease_in => try dest.writeStr("ease-in"), + .ease_out => try dest.writeStr("ease-out"), + .ease_in_out => try dest.writeStr("ease-in-out"), + else => { + if (this.isEase()) { + return dest.writeStr("ease"); + } else if (this == .cubic_bezier and std.meta.eql(this.cubic_bezier, .{ + .x1 = 0.42, + .y1 = 0.0, + .x2 = 1.0, + .y2 = 1.0, + })) { + return dest.writeStr("ease-in"); + } else if (this == .cubic_bezier and std.meta.eql(this.cubic_bezier, .{ + .x1 = 0.0, + .y1 = 0.0, + .x2 = 0.58, + .y2 = 1.0, + })) { + return dest.writeStr("ease-out"); + } else if (this == .cubic_bezier and std.meta.eql(this.cubic_bezier, .{ + .x1 = 0.42, + .y1 = 0.0, + .x2 = 0.58, + .y2 = 1.0, + })) { + return dest.writeStr("ease-in-out"); + } + + switch (this.*) { + .cubic_bezier => |cb| { + try dest.writeStr("cubic-bezier("); + try css.generic.toCss(cb.x1, W, dest); + try dest.writeChar(','); + try css.generic.toCss(cb.y1, W, dest); + try dest.writeChar(','); + try css.generic.toCss(cb.x2, W, dest); + try dest.writeChar(','); + try css.generic.toCss(cb.y2, W, dest); + try dest.writeChar(')'); + }, + .steps => { + if (this.steps.count == 1 and this.steps.position == .start) { + return try dest.writeStr("step-start"); + } + if (this.steps.count == 1 and this.steps.position == .end) { + return try dest.writeStr("step-end"); + } + try dest.writeStr("steps("); + try dest.writeFmt("steps({d}", .{this.steps.count}); + try dest.delim(',', false); + try this.steps.position.toCss(W, dest); + return try dest.writeChar(')'); + }, + .linear, .ease, .ease_in, .ease_out, .ease_in_out => unreachable, + } + }, + }; + } + + /// Returns whether the easing function is equivalent to the `ease` keyword. + pub fn isEase(this: *const EasingFunction) bool { + return this.* == .ease or + (this.* == .cubic_bezier and std.meta.eql(this.cubic_bezier == .{ + .x1 = 0.25, + .y1 = 0.1, + .x2 = 0.25, + .y2 = 1.0, + })); + } +}; + +/// A [step position](https://www.w3.org/TR/css-easing-1/#step-position), used within the `steps()` function. +pub const StepPosition = enum { + /// The first rise occurs at input progress value of 0. + start, + /// The last rise occurs at input progress value of 1. + end, + /// All rises occur within the range (0, 1). + jump_none, + /// The first rise occurs at input progress value of 0 and the last rise occurs at input progress value of 1. + jump_both, + + // TODO: implement this + // pub usingnamespace css.DeriveToCss(@This()); + + pub fn toCss(this: *const StepPosition, comptime W: type, dest: *css.Printer(W)) css.PrintErr!void { + _ = this; // autofix + _ = dest; // autofix + @panic(css.todo_stuff.depth); + } + + pub fn parse(input: *css.Parser) Result(StepPosition) { + const location = input.currentSourceLocation(); + const ident = switch (input.expectIdent()) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + // todo_stuff.match_ignore_ascii_case + const keyword = if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(ident, "start")) + StepPosition.start + else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(ident, "end")) + StepPosition.end + else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(ident, "jump-start")) + StepPosition.start + else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(ident, "jump-end")) + StepPosition.end + else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(ident, "jump-none")) + StepPosition.jump_none + else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(ident, "jump-both")) + StepPosition.jump_both + else + return .{ .err = location.newUnexpectedTokenError(.{ .ident = ident }) }; + return .{ .result = keyword }; + } +}; diff --git a/src/css/values/gradient.zig b/src/css/values/gradient.zig new file mode 100644 index 0000000000..292db6ec88 --- /dev/null +++ b/src/css/values/gradient.zig @@ -0,0 +1,1662 @@ +const std = @import("std"); +const Allocator = std.mem.Allocator; +const bun = @import("root").bun; +const ArrayList = std.ArrayListUnmanaged; +pub const css = @import("../css_parser.zig"); +const Result = css.Result; +const VendorPrefix = css.VendorPrefix; +const Printer = css.Printer; +const PrintErr = css.PrintErr; +const CssColor = css.css_values.color.CssColor; +const CSSNumber = css.css_values.number.CSSNumber; +const CSSNumberFns = css.css_values.number.CSSNumberFns; +const Url = css.css_values.url.Url; +const Angle = css.css_values.angle.Angle; +const AnglePercentage = css.css_values.angle.AnglePercentage; +const HorizontalPositionKeyword = css.css_values.position.HorizontalPositionKeyword; +const VerticalPositionKeyword = css.css_values.position.VerticalPositionKeyword; +const Position = css.css_values.position.Position; +const Length = css.css_values.length.Length; +const LengthPercentage = css.css_values.length.LengthPercentage; +const NumberOrPercentage = css.css_values.percentage.NumberOrPercentage; + +/// A CSS [``](https://www.w3.org/TR/css-images-3/#gradients) value. +pub const Gradient = union(enum) { + /// A `linear-gradient()`, and its vendor prefix. + linear: LinearGradient, + /// A `repeating-linear-gradient()`, and its vendor prefix. + repeating_linear: LinearGradient, + /// A `radial-gradient()`, and its vendor prefix. + radial: RadialGradient, + /// A `repeating-radial-gradient`, and its vendor prefix. + repeating_radial: RadialGradient, + /// A `conic-gradient()`. + conic: ConicGradient, + /// A `repeating-conic-gradient()`. + repeating_conic: ConicGradient, + /// A legacy `-webkit-gradient()`. + @"webkit-gradient": WebKitGradient, + + pub fn parse(input: *css.Parser) Result(Gradient) { + const location = input.currentSourceLocation(); + const func = switch (input.expectFunction()) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + const Closure = struct { location: css.SourceLocation, func: []const u8 }; + return input.parseNestedBlock(Gradient, Closure{ .location = location, .func = func }, struct { + fn parse( + closure: Closure, + input_: *css.Parser, + ) Result(Gradient) { + // css.todo_stuff.match_ignore_ascii_case + if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(closure.func, "linear-gradient")) { + return .{ .result = .{ .linear = switch (LinearGradient.parse(input_, css.VendorPrefix{ .none = true })) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + } } }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(closure.func, "repeating-linear-gradient")) { + return .{ .result = .{ .repeating_linear = switch (LinearGradient.parse(input_, css.VendorPrefix{ .none = true })) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + } } }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(closure.func, "radial-gradient")) { + return .{ .result = .{ .radial = switch (RadialGradient.parse(input_, css.VendorPrefix{ .none = true })) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + } } }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(closure.func, "repeating-radial-gradient")) { + return .{ .result = .{ .repeating_radial = switch (RadialGradient.parse(input_, css.VendorPrefix{ .none = true })) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + } } }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(closure.func, "conic-gradient")) { + return .{ .result = .{ .conic = switch (ConicGradient.parse(input_)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + } } }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(closure.func, "repeating-conic-gradient")) { + return .{ .result = .{ .repeating_conic = switch (ConicGradient.parse(input_)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + } } }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(closure.func, "-webkit-linear-gradient")) { + return .{ .result = .{ .linear = switch (LinearGradient.parse(input_, css.VendorPrefix{ .webkit = true })) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + } } }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(closure.func, "-webkit-repeating-linear-gradient")) { + return .{ .result = .{ .repeating_linear = switch (LinearGradient.parse(input_, css.VendorPrefix{ .webkit = true })) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + } } }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(closure.func, "-webkit-radial-gradient")) { + return .{ .result = .{ .radial = switch (RadialGradient.parse(input_, css.VendorPrefix{ .webkit = true })) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + } } }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(closure.func, "-webkit-repeating-radial-gradient")) { + return .{ .result = .{ .repeating_radial = switch (RadialGradient.parse(input_, css.VendorPrefix{ .webkit = true })) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + } } }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(closure.func, "-moz-linear-gradient")) { + return .{ .result = .{ .linear = switch (LinearGradient.parse(input_, css.VendorPrefix{ .moz = true })) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + } } }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(closure.func, "-moz-repeating-linear-gradient")) { + return .{ .result = .{ .repeating_linear = switch (LinearGradient.parse(input_, css.VendorPrefix{ .moz = true })) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + } } }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(closure.func, "-moz-radial-gradient")) { + return .{ .result = .{ .radial = switch (RadialGradient.parse(input_, css.VendorPrefix{ .moz = true })) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + } } }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(closure.func, "-moz-repeating-radial-gradient")) { + return .{ .result = .{ .repeating_radial = switch (RadialGradient.parse(input_, css.VendorPrefix{ .moz = true })) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + } } }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(closure.func, "-o-linear-gradient")) { + return .{ .result = .{ .linear = switch (LinearGradient.parse(input_, css.VendorPrefix{ .o = true })) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + } } }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(closure.func, "-o-repeating-linear-gradient")) { + return .{ .result = .{ .repeating_linear = switch (LinearGradient.parse(input_, css.VendorPrefix{ .o = true })) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + } } }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(closure.func, "-o-radial-gradient")) { + return .{ .result = .{ .radial = switch (RadialGradient.parse(input_, css.VendorPrefix{ .o = true })) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + } } }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(closure.func, "-o-repeating-radial-gradient")) { + return .{ .result = .{ .repeating_radial = switch (RadialGradient.parse(input_, css.VendorPrefix{ .o = true })) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + } } }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(closure.func, "-webkit-gradient")) { + return .{ .result = .{ .@"webkit-gradient" = switch (WebKitGradient.parse(input_)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + } } }; + } else { + return .{ .err = closure.location.newUnexpectedTokenError(.{ .ident = closure.func }) }; + } + } + }.parse); + } + + pub fn toCss(this: *const Gradient, comptime W: type, dest: *Printer(W)) PrintErr!void { + const f: []const u8, const prefix: ?css.VendorPrefix = switch (this.*) { + .linear => |g| .{ "linear-gradient(", g.vendor_prefix }, + .repeating_linear => |g| .{ "repeating-linear-gradient(", g.vendor_prefix }, + .radial => |g| .{ "radial-gradient(", g.vendor_prefix }, + .repeating_radial => |g| .{ "repeating-linear-gradient(", g.vendor_prefix }, + .conic => .{ "conic-gradient(", null }, + .repeating_conic => .{ "repeating-conic-gradient(", null }, + .@"webkit-gradient" => .{ "-webkit-gradient(", null }, + }; + + if (prefix) |p| { + try p.toCss(W, dest); + } + + try dest.writeStr(f); + + switch (this.*) { + .linear, .repeating_linear => |*linear| { + try linear.toCss(W, dest, linear.vendor_prefix.eq(css.VendorPrefix{ .none = true })); + }, + .radial, .repeating_radial => |*radial| { + try radial.toCss(W, dest); + }, + .conic, .repeating_conic => |*conic| { + try conic.toCss(W, dest); + }, + .@"webkit-gradient" => |*g| { + try g.toCss(W, dest); + }, + } + + return dest.writeChar(')'); + } + + /// Attempts to convert the gradient to the legacy `-webkit-gradient()` syntax. + /// + /// Returns an error in case the conversion is not possible. + pub fn getLegacyWebkit(this: *const @This(), allocator: Allocator) ?Gradient { + return Gradient{ .@"webkit-gradient" = WebKitGradient.fromStandard(this, allocator) orelse return null }; + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn eql(this: *const Gradient, other: *const Gradient) bool { + return css.implementEql(Gradient, this, other); + // if (this.* == .linear and other.* == .linear) { + // return this.linear.eql(&other.linear); + // } else if (this.* == .repeating_linear and other.* == .repeating_linear) { + // return this.repeating_linear.eql(&other.repeating_linear); + // } else if (this.* == .radial and other.* == .radial) { + // return this.radial.eql(&other.radial); + // } else if (this.* == .repeating_radial and other.* == .repeating_radial) { + // return this.repeating_radial.eql(&other.repeating_radial); + // } else if (this.* == .conic and other.* == .conic) { + // return this.conic.eql(&other.conic); + // } else if (this.* == .repeating_conic and other.* == .repeating_conic) { + // return this.repeating_conic.eql(&other.repeating_conic); + // } else if (this.* == .@"webkit-gradient" and other.* == .@"webkit-gradient") { + // return this.@"webkit-gradient".eql(&other.@"webkit-gradient"); + // } + // ret + } + + /// Returns the vendor prefix of the gradient. + pub fn getVendorPrefix(this: *const @This()) VendorPrefix { + return switch (this.*) { + .linear => |linear| linear.vendor_prefix, + .repeating_linear => |linear| linear.vendor_prefix, + .radial => |radial| radial.vendor_prefix, + .repeating_radial => |radial| radial.vendor_prefix, + .@"webkit-gradient" => VendorPrefix{ .webkit = true }, + else => VendorPrefix{ .none = true }, + }; + } + + /// Returns the vendor prefixes needed for the given browser targets. + pub fn getNecessaryPrefixes(this: *const @This(), targets: css.targets.Targets) css.VendorPrefix { + const getPrefixes = struct { + fn call(tgts: css.targets.Targets, feature: css.prefixes.Feature, prefix: VendorPrefix) VendorPrefix { + return tgts.prefixes(prefix, feature); + } + }.call; + + return switch (this.*) { + .linear => |linear| getPrefixes(targets, .linear_gradient, linear.vendor_prefix), + .repeating_linear => |linear| getPrefixes(targets, .repeating_linear_gradient, linear.vendor_prefix), + .radial => |radial| getPrefixes(targets, .radial_gradient, radial.vendor_prefix), + .repeating_radial => |radial| getPrefixes(targets, .repeating_radial_gradient, radial.vendor_prefix), + else => VendorPrefix{ .none = true }, + }; + } + + /// Returns a copy of the gradient with the given vendor prefix. + pub fn getPrefixed(this: *const @This(), allocator: Allocator, prefix: css.VendorPrefix) Gradient { + return switch (this.*) { + .linear => |*linear| .{ .linear = brk: { + var x = linear.deepClone(allocator); + x.vendor_prefix = prefix; + break :brk x; + } }, + .repeating_linear => |*linear| .{ .repeating_linear = brk: { + var x = linear.deepClone(allocator); + x.vendor_prefix = prefix; + break :brk x; + } }, + .radial => |*radial| .{ .radial = brk: { + var x = radial.deepClone(allocator); + x.vendor_prefix = prefix; + break :brk x; + } }, + .repeating_radial => |*radial| .{ .repeating_radial = brk: { + var x = radial.deepClone(allocator); + x.vendor_prefix = prefix; + break :brk x; + } }, + else => this.deepClone(allocator), + }; + } + + /// Returns a fallback gradient for the given color fallback type. + pub fn getFallback(this: *const @This(), allocator: Allocator, kind: css.ColorFallbackKind) Gradient { + return switch (this.*) { + .linear => |g| .{ .linear = g.getFallback(allocator, kind) }, + .repeating_linear => |g| .{ .repeating_linear = g.getFallback(allocator, kind) }, + .radial => |g| .{ .radial = g.getFallback(allocator, kind) }, + .repeating_radial => |g| .{ .repeating_radial = g.getFallback(allocator, kind) }, + .conic => |g| .{ .conic = g.getFallback(allocator, kind) }, + .repeating_conic => |g| .{ .repeating_conic = g.getFallback(allocator, kind) }, + .@"webkit-gradient" => |g| .{ .@"webkit-gradient" = g.getFallback(allocator, kind) }, + }; + } + + /// Returns the color fallback types needed for the given browser targets. + pub fn getNecessaryFallbacks(this: *const @This(), targets: css.targets.Targets) css.ColorFallbackKind { + var fallbacks = css.ColorFallbackKind.empty(); + switch (this.*) { + .linear, .repeating_linear => |*linear| { + for (linear.items.items) |*item| { + fallbacks = fallbacks.bitwiseOr(item.getNecessaryFallbacks(targets)); + } + }, + .radial, .repeating_radial => |*radial| { + for (radial.items.items) |*item| { + fallbacks = fallbacks.bitwiseOr(item.getNecessaryFallbacks(targets)); + } + }, + .conic, .repeating_conic => |*conic| { + for (conic.items.items) |*item| { + fallbacks = fallbacks.bitwiseOr(item.getNecessaryFallbacks(targets)); + } + }, + .@"webkit-gradient" => {}, + } + return fallbacks; + } +}; + +/// A CSS [`linear-gradient()`](https://www.w3.org/TR/css-images-3/#linear-gradients) or `repeating-linear-gradient()`. +pub const LinearGradient = struct { + /// The vendor prefixes for the gradient. + vendor_prefix: VendorPrefix, + /// The direction of the gradient. + direction: LineDirection, + /// The color stops and transition hints for the gradient. + items: ArrayList(GradientItem(LengthPercentage)), + + pub fn parse(input: *css.Parser, vendor_prefix: VendorPrefix) Result(LinearGradient) { + const direction: LineDirection = if (input.tryParse(LineDirection.parse, .{vendor_prefix.neq(VendorPrefix{ .none = true })}).asValue()) |dir| direction: { + if (input.expectComma().asErr()) |e| return .{ .err = e }; + break :direction dir; + } else LineDirection{ .vertical = .bottom }; + const items = switch (parseItems(LengthPercentage, input)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + return .{ .result = LinearGradient{ .direction = direction, .items = items, .vendor_prefix = vendor_prefix } }; + } + + pub fn toCss(this: *const LinearGradient, comptime W: type, dest: *Printer(W), is_prefixed: bool) PrintErr!void { + const angle: f32 = switch (this.direction) { + .vertical => |v| switch (v) { + .bottom => 180.0, + .top => 0.0, + }, + .angle => |a| a.toDegrees(), + else => -1.0, + }; + + // We can omit `to bottom` or `180deg` because it is the default. + if (angle == 180.0) { + // todo_stuff.depth + try serializeItems(LengthPercentage, &this.items, W, dest); + } + // If we have `to top` or `0deg`, and all of the positions and hints are percentages, + // we can flip the gradient the other direction and omit the direction. + else if (angle == 0.0 and dest.minify and brk: { + for (this.items.items) |*item| { + if (item.* == .hint and item.hint != .percentage) break :brk false; + if (item.* == .color_stop and item.color_stop.position != null and item.color_stop.position.? != .percentage) break :brk false; + } + break :brk true; + }) { + var flipped_items = ArrayList(GradientItem(LengthPercentage)).initCapacity( + dest.allocator, + this.items.items.len, + ) catch bun.outOfMemory(); + defer flipped_items.deinit(dest.allocator); + + var i: usize = this.items.items.len; + while (i > 0) { + i -= 1; + const item = &this.items.items[i]; + switch (item.*) { + .hint => |*h| switch (h.*) { + .percentage => |p| flipped_items.append(dest.allocator, .{ .hint = .{ .percentage = .{ .v = 1.0 - p.v } } }) catch bun.outOfMemory(), + else => unreachable, + }, + .color_stop => |*cs| flipped_items.append(dest.allocator, .{ + .color_stop = .{ + .color = cs.color, + .position = if (cs.position) |*p| switch (p.*) { + .percentage => |perc| .{ .percentage = .{ .v = 1.0 - perc.v } }, + else => unreachable, + } else null, + }, + }) catch bun.outOfMemory(), + } + } + + serializeItems(LengthPercentage, &flipped_items, W, dest) catch return dest.addFmtError(); + } else { + if ((this.direction != .vertical or this.direction.vertical != .bottom) and + (this.direction != .angle or this.direction.angle.deg != 180.0)) + { + try this.direction.toCss(W, dest, is_prefixed); + try dest.delim(',', false); + } + + serializeItems(LengthPercentage, &this.items, W, dest) catch return dest.addFmtError(); + } + } + + pub fn isCompatible(this: *const @This(), browsers: css.targets.Browsers) bool { + for (this.items.items) |*item| { + if (!item.isCompatible(browsers)) return false; + } + return true; + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn eql(this: *const LinearGradient, other: *const LinearGradient) bool { + return this.vendor_prefix.eql(other.vendor_prefix) and this.direction.eql(&other.direction) and css.generic.eqlList(GradientItem(LengthPercentage), &this.items, &other.items); + } + + pub fn getFallback(this: *const @This(), allocator: std.mem.Allocator, kind: css.ColorFallbackKind) LinearGradient { + var fallback_items = ArrayList(GradientItem(LengthPercentage)).initCapacity(allocator, this.items.items.len) catch bun.outOfMemory(); + fallback_items.items.len = this.items.items.len; + for (fallback_items.items, this.items.items) |*out, *in| { + out.* = in.getFallback(allocator, kind); + } + + return LinearGradient{ + .direction = this.direction.deepClone(allocator), + .items = fallback_items, + .vendor_prefix = this.vendor_prefix, + }; + } +}; + +/// A CSS [`radial-gradient()`](https://www.w3.org/TR/css-images-3/#radial-gradients) or `repeating-radial-gradient()`. +pub const RadialGradient = struct { + /// The vendor prefixes for the gradient. + vendor_prefix: VendorPrefix, + /// The shape of the gradient. + shape: EndingShape, + /// The position of the gradient. + position: Position, + /// The color stops and transition hints for the gradient. + items: ArrayList(GradientItem(LengthPercentage)), + + pub fn parse(input: *css.Parser, vendor_prefix: VendorPrefix) Result(RadialGradient) { + // todo_stuff.depth + const shape = switch (input.tryParse(EndingShape.parse, .{})) { + .result => |vv| vv, + .err => null, + }; + const position = switch (input.tryParse(struct { + fn parse(input_: *css.Parser) Result(Position) { + if (input_.expectIdentMatching("at").asErr()) |e| return .{ .err = e }; + return Position.parse(input_); + } + }.parse, .{})) { + .result => |v| v, + .err => null, + }; + + if (shape != null or position != null) { + if (input.expectComma().asErr()) |e| return .{ .err = e }; + } + + const items = switch (parseItems(LengthPercentage, input)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + return .{ + .result = RadialGradient{ + // todo_stuff.depth + .shape = shape orelse EndingShape.default(), + // todo_stuff.depth + .position = position orelse Position.center(), + .items = items, + .vendor_prefix = vendor_prefix, + }, + }; + } + + pub fn toCss(this: *const RadialGradient, comptime W: type, dest: *Printer(W)) PrintErr!void { + if (std.meta.eql(this.shape, EndingShape.default())) { + try this.shape.toCss(W, dest); + if (this.position.isCenter()) { + try dest.delim(',', false); + } else { + try dest.writeChar(' '); + } + } + + if (!this.position.isCenter()) { + try dest.writeStr("at "); + try this.position.toCss(W, dest); + try dest.delim(',', false); + } + + try serializeItems(LengthPercentage, &this.items, W, dest); + } + + pub fn isCompatible(this: *const @This(), browsers: css.targets.Browsers) bool { + for (this.items.items) |*item| { + if (!item.isCompatible(browsers)) return false; + } + return true; + } + + pub fn getFallback(this: *const RadialGradient, allocator: Allocator, kind: css.ColorFallbackKind) RadialGradient { + var items = ArrayList(GradientItem(LengthPercentage)).initCapacity(allocator, this.items.items.len) catch bun.outOfMemory(); + items.items.len = this.items.items.len; + for (items.items, this.items.items) |*out, *in| { + out.* = in.getFallback(allocator, kind); + } + + return RadialGradient{ + .shape = this.shape.deepClone(allocator), + .position = this.position.deepClone(allocator), + .items = items, + .vendor_prefix = this.vendor_prefix, + }; + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn eql(this: *const RadialGradient, other: *const RadialGradient) bool { + return this.vendor_prefix.eql(other.vendor_prefix) and + this.shape.eql(&other.shape) and + this.position.eql(&other.position) and + css.generic.eqlList(GradientItem(LengthPercentage), &this.items, &other.items); + } +}; + +/// A CSS [`conic-gradient()`](https://www.w3.org/TR/css-images-4/#conic-gradients) or `repeating-conic-gradient()`. +pub const ConicGradient = struct { + /// The angle of the gradient. + angle: Angle, + /// The position of the gradient. + position: Position, + /// The color stops and transition hints for the gradient. + items: ArrayList(GradientItem(AnglePercentage)), + + pub fn parse(input: *css.Parser) Result(ConicGradient) { + const angle = input.tryParse(struct { + inline fn parse(i: *css.Parser) Result(Angle) { + if (i.expectIdentMatching("from").asErr()) |e| return .{ .err = e }; + // Spec allows unitless zero angles for gradients. + // https://w3c.github.io/csswg-drafts/css-images-4/#valdef-conic-gradient-angle + return Angle.parseWithUnitlessZero(i); + } + }.parse, .{}).unwrapOr(Angle{ .deg = 0.0 }); + + const position = input.tryParse(struct { + inline fn parse(i: *css.Parser) Result(Position) { + if (i.expectIdentMatching("at").asErr()) |e| return .{ .err = e }; + return Position.parse(i); + } + }.parse, .{}).unwrapOr(Position.center()); + + if (!angle.eql(&Angle{ .deg = 0.0 }) or !std.meta.eql(position, Position.center())) { + if (input.expectComma().asErr()) |e| return .{ .err = e }; + } + + const items = switch (parseItems(AnglePercentage, input)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + return .{ .result = ConicGradient{ + .angle = angle, + .position = position, + .items = items, + } }; + } + + pub fn toCss(this: *const ConicGradient, comptime W: type, dest: *Printer(W)) PrintErr!void { + if (!this.angle.isZero()) { + try dest.writeStr("from "); + try this.angle.toCss(W, dest); + + if (this.position.isCenter()) { + try dest.delim(',', false); + } else { + try dest.writeChar(' '); + } + } + + if (!this.position.isCenter()) { + try dest.writeStr("at "); + try this.position.toCss(W, dest); + try dest.delim(',', false); + } + + return try serializeItems(AnglePercentage, &this.items, W, dest); + } + + pub fn isCompatible(this: *const @This(), browsers: css.targets.Browsers) bool { + for (this.items.items) |*item| { + if (!item.isCompatible(browsers)) return false; + } + return true; + } + + pub fn getFallback(this: *const @This(), allocator: Allocator, kind: css.ColorFallbackKind) ConicGradient { + var items = ArrayList(GradientItem(AnglePercentage)).initCapacity(allocator, this.items.items.len) catch bun.outOfMemory(); + items.items.len = this.items.items.len; + for (items.items, this.items.items) |*out, *in| { + out.* = in.getFallback(allocator, kind); + } + + return ConicGradient{ + .angle = this.angle.deepClone(allocator), + .position = this.position.deepClone(allocator), + .items = items, + }; + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn eql(this: *const ConicGradient, other: *const ConicGradient) bool { + return this.angle.eql(&other.angle) and + this.position.eql(&other.position) and + css.generic.eqlList(GradientItem(AnglePercentage), &this.items, &other.items); + } +}; + +/// A legacy `-webkit-gradient()`. +pub const WebKitGradient = union(enum) { + /// A linear `-webkit-gradient()`. + linear: struct { + /// The starting point of the gradient. + from: WebKitGradientPoint, + /// The ending point of the gradient. + to: WebKitGradientPoint, + /// The color stops in the gradient. + stops: ArrayList(WebKitColorStop), + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + }, + /// A radial `-webkit-gradient()`. + radial: struct { + /// The starting point of the gradient. + from: WebKitGradientPoint, + /// The starting radius of the gradient. + r0: CSSNumber, + /// The ending point of the gradient. + to: WebKitGradientPoint, + /// The ending radius of the gradient. + r1: CSSNumber, + /// The color stops in the gradient. + stops: ArrayList(WebKitColorStop), + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + }, + + pub fn parse(input: *css.Parser) Result(WebKitGradient) { + const location = input.currentSourceLocation(); + const ident = switch (input.expectIdent()) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + if (input.expectComma().asErr()) |e| return .{ .err = e }; + + // todo_stuff.match_ignore_ascii_case + if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(ident, "linear")) { + // todo_stuff.depth + const from = switch (WebKitGradientPoint.parse(input)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + if (input.expectComma().asErr()) |e| return .{ .err = e }; + const to = switch (WebKitGradientPoint.parse(input)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + if (input.expectComma().asErr()) |e| return .{ .err = e }; + const stops = switch (input.parseCommaSeparated(WebKitColorStop, WebKitColorStop.parse)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + return .{ .result = WebKitGradient{ .linear = .{ + .from = from, + .to = to, + .stops = stops, + } } }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(ident, "radial")) { + const from = switch (WebKitGradientPoint.parse(input)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + if (input.expectComma().asErr()) |e| return .{ .err = e }; + const r0 = switch (CSSNumberFns.parse(input)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + if (input.expectComma().asErr()) |e| return .{ .err = e }; + const to = switch (WebKitGradientPoint.parse(input)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + if (input.expectComma().asErr()) |e| return .{ .err = e }; + const r1 = switch (CSSNumberFns.parse(input)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + if (input.expectComma().asErr()) |e| return .{ .err = e }; + // todo_stuff.depth + const stops = switch (input.parseCommaSeparated(WebKitColorStop, WebKitColorStop.parse)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + return .{ .result = WebKitGradient{ + .radial = .{ + .from = from, + .r0 = r0, + .to = to, + .r1 = r1, + .stops = stops, + }, + } }; + } else { + return .{ .err = location.newUnexpectedTokenError(.{ .ident = ident }) }; + } + } + + pub fn toCss(this: *const WebKitGradient, comptime W: type, dest: *Printer(W)) PrintErr!void { + switch (this.*) { + .linear => |*linear| { + try dest.writeStr("linear"); + try dest.delim(',', false); + try linear.from.toCss(W, dest); + try dest.delim(',', false); + try linear.to.toCss(W, dest); + for (linear.stops.items) |*stop| { + try dest.delim(',', false); + try stop.toCss(W, dest); + } + }, + .radial => |*radial| { + try dest.writeStr("radial"); + try dest.delim(',', false); + try radial.from.toCss(W, dest); + try dest.delim(',', false); + try CSSNumberFns.toCss(&radial.r0, W, dest); + try dest.delim(',', false); + try radial.to.toCss(W, dest); + try dest.delim(',', false); + try CSSNumberFns.toCss(&radial.r1, W, dest); + for (radial.stops.items) |*stop| { + try dest.delim(',', false); + try stop.toCss(W, dest); + } + }, + } + } + + pub fn getFallback(this: *const @This(), allocator: Allocator, kind: css.ColorFallbackKind) WebKitGradient { + var stops: ArrayList(WebKitColorStop) = .{}; + switch (this.*) { + .linear => |linear| { + stops = ArrayList(WebKitColorStop).initCapacity(allocator, linear.stops.items.len) catch bun.outOfMemory(); + stops.items.len = linear.stops.items.len; + for (stops.items, linear.stops.items) |*out, *in| { + out.* = in.getFallback(allocator, kind); + } + return WebKitGradient{ + .linear = .{ + .from = linear.from.deepClone(allocator), + .to = linear.to.deepClone(allocator), + .stops = stops, + }, + }; + }, + .radial => |radial| { + stops = ArrayList(WebKitColorStop).initCapacity(allocator, radial.stops.items.len) catch bun.outOfMemory(); + stops.items.len = radial.stops.items.len; + for (stops.items, radial.stops.items) |*out, *in| { + out.* = in.getFallback(allocator, kind); + } + return WebKitGradient{ + .radial = .{ + .from = radial.from.deepClone(allocator), + .r0 = radial.r0, + .to = radial.to.deepClone(allocator), + .r1 = radial.r1, + .stops = stops, + }, + }; + }, + } + } + + pub fn fromStandard(gradient: *const Gradient, allocator: Allocator) ?WebKitGradient { + switch (gradient.*) { + .linear => |*linear| { + // Convert from line direction to a from and to point, if possible. + const from: struct { f32, f32 }, const to: struct { f32, f32 } = switch (linear.direction) { + .horizontal => |horizontal| switch (horizontal) { + .left => .{ .{ 1.0, 0.0 }, .{ 0.0, 0.0 } }, + .right => .{ .{ 0.0, 0.0 }, .{ 1.0, 0.0 } }, + }, + .vertical => |vertical| switch (vertical) { + .top => .{ .{ 0.0, 1.0 }, .{ 0.0, 0.0 } }, + .bottom => .{ .{ 0.0, 0.0 }, .{ 0.0, 1.0 } }, + }, + .corner => |corner| switch (corner.horizontal) { + .left => switch (corner.vertical) { + .top => .{ .{ 1.0, 1.0 }, .{ 0.0, 0.0 } }, + .bottom => .{ .{ 1.0, 0.0 }, .{ 0.0, 1.0 } }, + }, + .right => switch (corner.vertical) { + .top => .{ .{ 0.0, 1.0 }, .{ 1.0, 0.0 } }, + .bottom => .{ .{ 0.0, 0.0 }, .{ 1.0, 1.0 } }, + }, + }, + .angle => |angle| brk: { + const degrees = angle.toDegrees(); + if (degrees == 0.0) { + break :brk .{ .{ 0.0, 1.0 }, .{ 0.0, 0.0 } }; + } else if (degrees == 90.0) { + break :brk .{ .{ 0.0, 0.0 }, .{ 1.0, 0.0 } }; + } else if (degrees == 180.0) { + break :brk .{ .{ 0.0, 0.0 }, .{ 0.0, 1.0 } }; + } else if (degrees == 270.0) { + break :brk .{ .{ 1.0, 0.0 }, .{ 0.0, 0.0 } }; + } else { + return null; + } + }, + }; + + return WebKitGradient{ + .linear = .{ + .from = .{ + .x = .{ .number = .{ .percentage = .{ .v = from[0] } } }, + .y = .{ .number = .{ .percentage = .{ .v = from[1] } } }, + }, + .to = .{ + .x = .{ .number = .{ .percentage = .{ .v = to[0] } } }, + .y = .{ .number = .{ .percentage = .{ .v = to[1] } } }, + }, + .stops = convertStopsToWebkit(allocator, &linear.items) orelse return null, + }, + }; + }, + .radial => |*radial| { + // Webkit radial gradients are always circles, not ellipses, and must be specified in pixels. + const radius = switch (radial.shape) { + .circle => |*circle| switch (circle.*) { + .radius => |r| if (r.toPx()) |px| px else return null, + else => return null, + }, + else => return null, + }; + + const x = WebKitGradientPointComponent(HorizontalPositionKeyword).fromPosition(&radial.position.x, allocator) orelse return null; + const y = WebKitGradientPointComponent(VerticalPositionKeyword).fromPosition(&radial.position.y, allocator) orelse return null; + const point = WebKitGradientPoint{ .x = x, .y = y }; + return WebKitGradient{ + .radial = .{ + .from = point.deepClone(allocator), + .r0 = 0.0, + .to = point, + .r1 = radius, + .stops = convertStopsToWebkit(allocator, &radial.items) orelse return null, + }, + }; + }, + else => return null, + } + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn eql(this: *const WebKitGradient, other: *const WebKitGradient) bool { + return switch (this.*) { + .linear => |*a| switch (other.*) { + .linear => a.from.eql(&other.linear.from) and a.to.eql(&other.linear.to) and css.generic.eqlList(WebKitColorStop, &a.stops, &other.linear.stops), + else => false, + }, + .radial => |*a| switch (other.*) { + .radial => a.from.eql(&other.radial.from) and a.to.eql(&other.radial.to) and a.r0 == other.radial.r0 and a.r1 == other.radial.r1 and css.generic.eqlList(WebKitColorStop, &a.stops, &other.radial.stops), + else => false, + }, + }; + } +}; + +/// The direction of a CSS `linear-gradient()`. +/// +/// See [LinearGradient](LinearGradient). +pub const LineDirection = union(enum) { + /// An angle. + angle: Angle, + /// A horizontal position keyword, e.g. `left` or `right`. + horizontal: HorizontalPositionKeyword, + /// A vertical position keyword, e.g. `top` or `bottom`. + vertical: VerticalPositionKeyword, + /// A corner, e.g. `bottom left` or `top right`. + corner: struct { + /// A horizontal position keyword, e.g. `left` or `right`. + horizontal: HorizontalPositionKeyword, + /// A vertical position keyword, e.g. `top` or `bottom`. + vertical: VerticalPositionKeyword, + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + }, + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn eql(this: *const LineDirection, other: *const LineDirection) bool { + return switch (this.*) { + .angle => |*a| switch (other.*) { + .angle => a.eql(&other.angle), + else => false, + }, + .horizontal => |*v| switch (other.*) { + .horizontal => v.* == other.horizontal, + else => false, + }, + .vertical => |*v| switch (other.*) { + .vertical => v.* == other.vertical, + else => false, + }, + .corner => |*c| switch (other.*) { + .corner => c.horizontal == other.corner.horizontal and c.vertical == other.corner.vertical, + else => false, + }, + }; + } + + pub fn parse(input: *css.Parser, is_prefixed: bool) Result(LineDirection) { + // Spec allows unitless zero angles for gradients. + // https://w3c.github.io/csswg-drafts/css-images-3/#linear-gradient-syntax + if (input.tryParse(Angle.parseWithUnitlessZero, .{}).asValue()) |angle| { + return .{ .result = LineDirection{ .angle = angle } }; + } + + if (!is_prefixed) { + if (input.expectIdentMatching("to").asErr()) |e| return .{ .err = e }; + } + + if (input.tryParse(HorizontalPositionKeyword.parse, .{}).asValue()) |x| { + if (input.tryParse(VerticalPositionKeyword.parse, .{}).asValue()) |y| { + return .{ .result = LineDirection{ .corner = .{ + .horizontal = x, + .vertical = y, + } } }; + } + return .{ .result = LineDirection{ .horizontal = x } }; + } + + const y = switch (VerticalPositionKeyword.parse(input)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + if (input.tryParse(HorizontalPositionKeyword.parse, .{}).asValue()) |x| { + return .{ .result = LineDirection{ .corner = .{ + .horizontal = x, + .vertical = y, + } } }; + } + return .{ .result = LineDirection{ .vertical = y } }; + } + + pub fn toCss(this: *const LineDirection, comptime W: type, dest: *Printer(W), is_prefixed: bool) PrintErr!void { + switch (this.*) { + .angle => |*angle| try angle.toCss(W, dest), + .horizontal => |*k| { + if (dest.minify) { + try dest.writeStr(switch (k.*) { + .left => "270deg", + .right => "90deg", + }); + } else { + if (!is_prefixed) { + try dest.writeStr("to "); + } + try k.toCss(W, dest); + } + }, + .vertical => |*k| { + if (dest.minify) { + try dest.writeStr(switch (k.*) { + .top => "0deg", + .bottom => "180deg", + }); + } else { + if (!is_prefixed) { + try dest.writeStr("to "); + } + try k.toCss(W, dest); + } + }, + .corner => |*c| { + if (!is_prefixed) { + try dest.writeStr("to "); + } + try c.vertical.toCss(W, dest); + try dest.writeChar(' '); + try c.horizontal.toCss(W, dest); + }, + } + } +}; + +/// Either a color stop or interpolation hint within a gradient. +/// +/// This type is generic, and items may be either a [LengthPercentage](super::length::LengthPercentage) +/// or [Angle](super::angle::Angle) depending on what type of gradient it is within. +pub fn GradientItem(comptime D: type) type { + return union(enum) { + /// A color stop. + color_stop: ColorStop(D), + /// A color interpolation hint. + hint: D, + + pub fn toCss(this: *const @This(), comptime W: type, dest: *Printer(W)) PrintErr!void { + return switch (this.*) { + .color_stop => |*c| try c.toCss(W, dest), + .hint => |*h| try css.generic.toCss(D, h, W, dest), + }; + } + + pub fn eql(this: *const GradientItem(D), other: *const GradientItem(D)) bool { + return switch (this.*) { + .color_stop => |*a| switch (other.*) { + .color_stop => a.eql(&other.color_stop), + else => false, + }, + .hint => |*a| switch (other.*) { + .hint => css.generic.eql(D, a, &other.hint), + else => false, + }, + }; + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn isCompatible(this: *const @This(), browsers: css.targets.Browsers) bool { + return switch (this.*) { + .color_stop => |*c| c.color.isCompatible(browsers), + .hint => css.compat.Feature.isCompatible(.gradient_interpolation_hints, browsers), + }; + } + + /// Returns a fallback gradient item for the given color fallback type. + pub fn getFallback(this: *const @This(), allocator: Allocator, kind: css.ColorFallbackKind) GradientItem(D) { + return switch (this.*) { + .color_stop => |*stop| .{ + .color_stop = .{ + .color = stop.color.getFallback(allocator, kind), + .position = if (stop.position) |*p| p.deepClone(allocator) else null, + }, + }, + .hint => this.deepClone(allocator), + }; + } + + /// Returns the color fallback types needed for the given browser targets. + pub fn getNecessaryFallbacks(this: *const @This(), targets: css.targets.Targets) css.ColorFallbackKind { + return switch (this.*) { + .color_stop => |*stop| stop.color.getNecessaryFallbacks(targets), + .hint => css.ColorFallbackKind.empty(), + }; + } + }; +} + +/// A `radial-gradient()` [ending shape](https://www.w3.org/TR/css-images-3/#valdef-radial-gradient-ending-shape). +/// +/// See [RadialGradient](RadialGradient). +pub const EndingShape = union(enum) { + /// An ellipse. + ellipse: Ellipse, + /// A circle. + circle: Circle, + + pub usingnamespace css.DeriveParse(@This()); + pub usingnamespace css.DeriveToCss(@This()); + + pub fn default() EndingShape { + return .{ .ellipse = .{ .extent = .@"farthest-corner" } }; + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn eql(this: *const EndingShape, other: *const EndingShape) bool { + return switch (this.*) { + .ellipse => |*a| switch (other.*) { + .ellipse => a.eql(&other.ellipse), + else => false, + }, + .circle => |*a| switch (other.*) { + .circle => a.eql(&other.circle), + else => false, + }, + }; + } +}; + +/// An x/y position within a legacy `-webkit-gradient()`. +pub const WebKitGradientPoint = struct { + /// The x-position. + x: WebKitGradientPointComponent(HorizontalPositionKeyword), + /// The y-position. + y: WebKitGradientPointComponent(VerticalPositionKeyword), + + pub fn parse(input: *css.Parser) Result(WebKitGradientPoint) { + const x = switch (WebKitGradientPointComponent(HorizontalPositionKeyword).parse(input)) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + const y = switch (WebKitGradientPointComponent(VerticalPositionKeyword).parse(input)) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + return .{ .result = .{ .x = x, .y = y } }; + } + + pub fn toCss(this: *const WebKitGradientPoint, comptime W: type, dest: *Printer(W)) PrintErr!void { + try this.x.toCss(W, dest); + try dest.writeChar(' '); + return try this.y.toCss(W, dest); + } + + pub fn eql(this: *const WebKitGradientPoint, other: *const WebKitGradientPoint) bool { + return this.x.eql(&other.x) and this.y.eql(&other.y); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } +}; + +/// A keyword or number within a [WebKitGradientPoint](WebKitGradientPoint). +pub fn WebKitGradientPointComponent(comptime S: type) type { + return union(enum) { + /// The `center` keyword. + center, + /// A number or percentage. + number: NumberOrPercentage, + /// A side keyword. + side: S, + + const This = @This(); + + pub fn parse(input: *css.Parser) Result(This) { + if (input.tryParse(css.Parser.expectIdentMatching, .{"center"}).isOk()) { + return .{ .result = .center }; + } + + if (input.tryParse(NumberOrPercentage.parse, .{}).asValue()) |number| { + return .{ .result = .{ .number = number } }; + } + + const keyword = switch (css.generic.parse(S, input)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + return .{ .result = .{ .side = keyword } }; + } + + pub fn toCss(this: *const This, comptime W: type, dest: *Printer(W)) PrintErr!void { + switch (this.*) { + .center => { + if (dest.minify) { + try dest.writeStr("50%"); + } else { + try dest.writeStr("center"); + } + }, + .number => |*lp| { + if (lp.* == .percentage and lp.percentage.v == 0.0) { + try dest.writeChar('0'); + } else { + try lp.toCss(W, dest); + } + }, + .side => |*s| { + if (dest.minify) { + const lp: LengthPercentage = s.intoLengthPercentage(); + try lp.toCss(W, dest); + } else { + try s.toCss(W, dest); + } + }, + } + } + + /// Attempts to convert a standard position to a webkit gradient point. + pub fn fromPosition(this: *const css.css_values.position.PositionComponent(S), allocator: Allocator) ?WebKitGradientPointComponent(S) { + return switch (this.*) { + .center => .center, + .length => |len| .{ + .number = switch (len) { + .percentage => |p| .{ .percentage = p }, + // Webkit gradient points can only be specified in pixels. + .dimension => |*d| if (d.toPx()) |px| .{ .number = px } else return null, + else => return null, + }, + }, + .side => |s| if (s.offset != null) + return null + else + .{ + .side = s.side.deepClone(allocator), + }, + }; + } + + pub fn eql(this: *const This, other: *const This) bool { + return switch (this.*) { + .center => switch (other.*) { + .center => true, + else => false, + }, + .number => |*a| switch (other.*) { + .number => a.eql(&other.number), + else => false, + }, + .side => |*a| switch (other.*) { + .side => |*b| a.eql(&b.*), + else => false, + }, + }; + } + }; +} + +/// A color stop within a legacy `-webkit-gradient()`. +pub const WebKitColorStop = struct { + /// The color of the color stop. + color: CssColor, + /// The position of the color stop. + position: CSSNumber, + + pub fn parse(input: *css.Parser) Result(WebKitColorStop) { + const location = input.currentSourceLocation(); + const function = switch (input.expectFunction()) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + const Closure = struct { loc: css.SourceLocation, function: []const u8 }; + return input.parseNestedBlock( + WebKitColorStop, + Closure{ .loc = location, .function = function }, + struct { + fn parse( + closure: Closure, + i: *css.Parser, + ) Result(WebKitColorStop) { + // todo_stuff.match_ignore_ascii_case + const position: f32 = if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(closure.function, "color-stop")) position: { + const p: NumberOrPercentage = switch (@call(.auto, @field(NumberOrPercentage, "parse"), .{i})) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + if (i.expectComma().asErr()) |e| return .{ .err = e }; + break :position p.intoF32(); + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(closure.function, "from")) position: { + break :position 0.0; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(closure.function, "to")) position: { + break :position 1.0; + } else { + return .{ .err = closure.loc.newUnexpectedTokenError(.{ .ident = closure.function }) }; + }; + const color = switch (CssColor.parse(i)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + return .{ .result = WebKitColorStop{ .color = color, .position = position } }; + } + }.parse, + ); + } + + pub fn toCss(this: *const WebKitColorStop, comptime W: type, dest: *Printer(W)) PrintErr!void { + if (this.position == 0.0) { + try dest.writeStr("from("); + try this.color.toCss(W, dest); + } else if (this.position == 1.0) { + try dest.writeStr("to("); + try this.color.toCss(W, dest); + } else { + try dest.writeStr("color-stop("); + try css.generic.toCss(CSSNumber, &this.position, W, dest); + try dest.delim(',', false); + try this.color.toCss(W, dest); + } + try dest.writeChar(')'); + } + + pub fn getFallback(this: *const @This(), allocator: Allocator, kind: css.ColorFallbackKind) WebKitColorStop { + return WebKitColorStop{ + .color = this.color.getFallback(allocator, kind), + .position = this.position, + }; + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn eql(this: *const WebKitColorStop, other: *const WebKitColorStop) bool { + return css.implementEql(WebKitColorStop, this, other); + } +}; + +/// A [``](https://www.w3.org/TR/css-images-4/#color-stop-syntax) within a gradient. +/// +/// This type is generic, and may be either a [LengthPercentage](super::length::LengthPercentage) +/// or [Angle](super::angle::Angle) depending on what type of gradient it is within. +pub fn ColorStop(comptime D: type) type { + return struct { + /// The color of the color stop. + color: CssColor, + /// The position of the color stop. + position: ?D, + + const This = @This(); + + pub fn parse(input: *css.Parser) Result(ColorStop(D)) { + const color = switch (CssColor.parse(input)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + const position = switch (input.tryParse(css.generic.parseFor(D), .{})) { + .result => |v| v, + .err => null, + }; + return .{ .result = .{ .color = color, .position = position } }; + } + + pub fn toCss(this: *const This, comptime W: type, dest: *Printer(W)) PrintErr!void { + try this.color.toCss(W, dest); + if (this.position) |*position| { + try dest.delim(',', false); + try css.generic.toCss(D, position, W, dest); + } + return; + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn eql(this: *const This, other: *const This) bool { + return this.color.eql(&other.color) and css.generic.eql(?D, &this.position, &other.position); + } + }; +} + +/// An ellipse ending shape for a `radial-gradient()`. +/// +/// See [RadialGradient](RadialGradient). +pub const Ellipse = union(enum) { + /// An ellipse with a specified horizontal and vertical radius. + size: struct { + /// The x-radius of the ellipse. + x: LengthPercentage, + /// The y-radius of the ellipse. + y: LengthPercentage, + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + }, + /// A shape extent keyword. + extent: ShapeExtent, + + pub fn parse(input: *css.Parser) Result(Ellipse) { + if (input.tryParse(ShapeExtent.parse, .{}).asValue()) |extent| { + // The `ellipse` keyword is optional, but only if the `circle` keyword is not present. + // If it is, then we'll re-parse as a circle. + if (input.tryParse(css.Parser.expectIdentMatching, .{"circle"}).isOk()) { + return .{ .err = input.newErrorForNextToken() }; + } + _ = input.tryParse(css.Parser.expectIdentMatching, .{"ellipse"}); + return .{ .result = Ellipse{ .extent = extent } }; + } + + if (input.tryParse(LengthPercentage.parse, .{}).asValue()) |x| { + const y = switch (LengthPercentage.parse(input)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + // The `ellipse` keyword is optional if there are two lengths. + _ = input.tryParse(css.Parser.expectIdentMatching, .{"ellipse"}); + return .{ .result = Ellipse{ .size = .{ .x = x, .y = y } } }; + } + + if (input.tryParse(css.Parser.expectIdentMatching, .{"ellipse"}).isOk()) { + if (input.tryParse(ShapeExtent.parse, .{}).asValue()) |extent| { + return .{ .result = Ellipse{ .extent = extent } }; + } + + if (input.tryParse(LengthPercentage.parse, .{}).asValue()) |x| { + const y = switch (LengthPercentage.parse(input)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + return .{ .result = Ellipse{ .size = .{ .x = x, .y = y } } }; + } + + // Assume `farthest-corner` if only the `ellipse` keyword is present. + return .{ .result = Ellipse{ .extent = .@"farthest-corner" } }; + } + + return .{ .err = input.newErrorForNextToken() }; + } + + pub fn toCss(this: *const Ellipse, comptime W: type, dest: *Printer(W)) PrintErr!void { + // The `ellipse` keyword is optional, so we don't emit it. + return switch (this.*) { + .size => |*s| { + try s.x.toCss(W, dest); + try dest.writeChar(' '); + return try s.y.toCss(W, dest); + }, + .extent => |*e| try e.toCss(W, dest), + }; + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn eql(this: *const Ellipse, other: *const Ellipse) bool { + return this.size.x.eql(&other.size.x) and this.size.y.eql(&other.size.y) and this.extent.eql(&other.extent); + } +}; + +pub const ShapeExtent = enum { + /// The closest side of the box to the gradient's center. + @"closest-side", + /// The farthest side of the box from the gradient's center. + @"farthest-side", + /// The closest corner of the box to the gradient's center. + @"closest-corner", + /// The farthest corner of the box from the gradient's center. + @"farthest-corner", + + pub fn eql(this: *const ShapeExtent, other: *const ShapeExtent) bool { + return this.* == other.*; + } + + pub fn asStr(this: *const @This()) []const u8 { + return css.enum_property_util.asStr(@This(), this); + } + + pub fn parse(input: *css.Parser) Result(@This()) { + return css.enum_property_util.parse(@This(), input); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn toCss(this: *const @This(), comptime W: type, dest: *Printer(W)) PrintErr!void { + return css.enum_property_util.toCss(@This(), this, W, dest); + } +}; + +/// A circle ending shape for a `radial-gradient()`. +/// +/// See [RadialGradient](RadialGradient). +pub const Circle = union(enum) { + /// A circle with a specified radius. + radius: Length, + /// A shape extent keyword. + extent: ShapeExtent, + + pub fn parse(input: *css.Parser) Result(Circle) { + if (input.tryParse(ShapeExtent.parse, .{}).asValue()) |extent| { + // The `circle` keyword is required. If it's not there, then it's an ellipse. + if (input.expectIdentMatching("circle").asErr()) |e| return .{ .err = e }; + return .{ .result = Circle{ .extent = extent } }; + } + + if (input.tryParse(Length.parse, .{}).asValue()) |length| { + // The `circle` keyword is optional if there is only a single length. + // We are assuming here that Ellipse.parse ran first. + _ = input.tryParse(css.Parser.expectIdentMatching, .{"circle"}); + return .{ .result = Circle{ .radius = length } }; + } + + if (input.tryParse(css.Parser.expectIdentMatching, .{"circle"}).isOk()) { + if (input.tryParse(ShapeExtent.parse, .{}).asValue()) |extent| { + return .{ .result = Circle{ .extent = extent } }; + } + + if (input.tryParse(Length.parse, .{}).asValue()) |length| { + return .{ .result = Circle{ .radius = length } }; + } + + // If only the `circle` keyword was given, default to `farthest-corner`. + return .{ .result = Circle{ .extent = .@"farthest-corner" } }; + } + + return .{ .err = input.newErrorForNextToken() }; + } + + pub fn toCss(this: *const Circle, comptime W: type, dest: *Printer(W)) PrintErr!void { + return switch (this.*) { + .radius => |r| try r.toCss(W, dest), + .extent => |extent| { + try dest.writeStr("circle"); + if (extent != .@"farthest-corner") { + try dest.writeChar(' '); + try extent.toCss(W, dest); + } + }, + }; + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn eql(this: *const Circle, other: *const Circle) bool { + return this.radius.eql(&other.radius) and this.extent.eql(&other.extent); + } +}; + +pub fn parseItems(comptime D: type, input: *css.Parser) Result(ArrayList(GradientItem(D))) { + var items = ArrayList(GradientItem(D)){}; + var seen_stop = false; + + while (true) { + const Closure = struct { items: *ArrayList(GradientItem(D)), seen_stop: *bool }; + if (input.parseUntilBefore( + css.Delimiters{ .comma = true }, + void, + Closure{ .items = &items, .seen_stop = &seen_stop }, + struct { + fn parse(closure: Closure, i: *css.Parser) Result(void) { + if (closure.seen_stop.*) { + if (i.tryParse(comptime css.generic.parseFor(D), .{}).asValue()) |hint| { + closure.seen_stop.* = false; + closure.items.append(i.allocator(), .{ .hint = hint }) catch bun.outOfMemory(); + return Result(void).success; + } + } + + const stop = switch (ColorStop(D).parse(i)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + + if (i.tryParse(comptime css.generic.parseFor(D), .{}).asValue()) |position| { + const color = stop.color.deepClone(i.allocator()); + closure.items.append(i.allocator(), .{ .color_stop = stop }) catch bun.outOfMemory(); + closure.items.append(i.allocator(), .{ .color_stop = .{ + .color = color, + .position = position, + } }) catch bun.outOfMemory(); + } else { + closure.items.append(i.allocator(), .{ .color_stop = stop }) catch bun.outOfMemory(); + } + + closure.seen_stop.* = true; + return Result(void).success; + } + }.parse, + ).asErr()) |e| return .{ .err = e }; + + if (input.next().asValue()) |tok| { + if (tok.* == .comma) continue; + bun.unreachablePanic("expected a comma after parsing a gradient", .{}); + } else { + break; + } + } + + return .{ .result = items }; +} + +pub fn serializeItems( + comptime D: type, + items: *const ArrayList(GradientItem(D)), + comptime W: type, + dest: *Printer(W), +) PrintErr!void { + var first = true; + var last: ?*const GradientItem(D) = null; + for (items.items) |*item| { + // Skip useless hints + if (item.* == .hint and item.hint == .percentage and item.hint.percentage.v == 0.5) { + continue; + } + + // Use double position stop if the last stop is the same color and all targets support it. + if (last) |prev| { + if (!dest.targets.shouldCompile(.double_position_gradients, .{ .double_position_gradients = true })) { + if (prev.* == .color_stop and prev.color_stop.position != null and + item.* == .color_stop and item.color_stop.position != null and + prev.color_stop.color.eql(&item.color_stop.color)) + { + try dest.writeChar(' '); + try item.color_stop.position.?.toCss(W, dest); + last = null; + continue; + } + } + } + + if (first) { + first = false; + } else { + try dest.delim(',', false); + } + try item.toCss(W, dest); + last = item; + } +} + +pub fn convertStopsToWebkit(allocator: Allocator, items: *const ArrayList(GradientItem(LengthPercentage))) ?ArrayList(WebKitColorStop) { + var stops: ArrayList(WebKitColorStop) = ArrayList(WebKitColorStop).initCapacity(allocator, items.items.len) catch bun.outOfMemory(); + for (items.items, 0..) |*item, i| { + switch (item.*) { + .color_stop => |*stop| { + // webkit stops must always be percentage based, not length based. + const position: f32 = if (stop.position) |pos| brk: { + break :brk switch (pos) { + .percentage => |percentage| percentage.v, + else => { + stops.deinit(allocator); + return null; + }, + }; + } else if (i == 0) brk: { + break :brk 0.0; + } else if (i == items.items.len - 1) brk: { + break :brk 1.0; + } else { + stops.deinit(allocator); + return null; + }; + + stops.append(allocator, .{ + .color = stop.color.deepClone(allocator), + .position = position, + }) catch return null; + }, + else => return null, + } + } + + return stops; +} diff --git a/src/css/values/ident.zig b/src/css/values/ident.zig new file mode 100644 index 0000000000..ee861540c9 --- /dev/null +++ b/src/css/values/ident.zig @@ -0,0 +1,197 @@ +const std = @import("std"); +const Allocator = std.mem.Allocator; +const bun = @import("root").bun; +const logger = bun.logger; +const Log = logger.Log; + +pub const css = @import("../css_parser.zig"); +pub const Result = css.Result; +pub const Printer = css.Printer; +pub const PrintErr = css.PrintErr; + +pub const Specifier = css.css_properties.css_modules.Specifier; + +/// A CSS [``](https://www.w3.org/TR/css-values-4/#dashed-idents) reference. +/// +/// Dashed idents are used in cases where an identifier can be either author defined _or_ CSS-defined. +/// Author defined idents must start with two dash characters ("--") or parsing will fail. +/// +/// In CSS modules, when the `dashed_idents` option is enabled, the identifier may be followed by the +/// `from` keyword and an argument indicating where the referenced identifier is declared (e.g. a filename). +pub const DashedIdentReference = struct { + /// The referenced identifier. + ident: DashedIdent, + /// CSS modules extension: the filename where the variable is defined. + /// Only enabled when the CSS modules `dashed_idents` option is turned on. + from: ?Specifier, + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub fn parseWithOptions(input: *css.Parser, options: *const css.ParserOptions) Result(DashedIdentReference) { + const ident = switch (DashedIdentFns.parse(input)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + + const from = if (options.css_modules != null and options.css_modules.?.dashed_idents) from: { + if (input.tryParse(css.Parser.expectIdentMatching, .{"from"}).isOk()) break :from switch (Specifier.parse(input)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + break :from null; + } else null; + + return .{ .result = DashedIdentReference{ .ident = ident, .from = from } }; + } + + pub fn toCss(this: *const @This(), comptime W: type, dest: *Printer(W)) PrintErr!void { + if (dest.css_module) |*css_module| { + if (css_module.config.dashed_idents) { + if (css_module.referenceDashed(this.ident.v, &this.from, dest.loc.source_index)) |name| { + try dest.writeStr("--"); + css.serializer.serializeName(name, dest) catch return dest.addFmtError(); + return; + } + } + } + + return dest.writeDashedIdent(&this.ident, false); + } + + pub fn hash(this: *const @This(), hasher: *std.hash.Wyhash) void { + return css.implementHash(@This(), this, hasher); + } +}; + +pub const DashedIdentFns = DashedIdent; +/// A CSS [``](https://www.w3.org/TR/css-values-4/#dashed-idents) declaration. +/// +/// Dashed idents are used in cases where an identifier can be either author defined _or_ CSS-defined. +/// Author defined idents must start with two dash characters ("--") or parsing will fail. +pub const DashedIdent = struct { + v: []const u8, + + pub fn HashMap(comptime V: type) type { + return std.ArrayHashMapUnmanaged( + DashedIdent, + V, + struct { + pub fn hash(_: @This(), s: DashedIdent) u32 { + return std.array_hash_map.hashString(s.v); + } + pub fn eql(_: @This(), a: DashedIdent, b: DashedIdent, _: usize) bool { + return bun.strings.eql(a, b); + } + }, + false, + ); + } + + pub fn parse(input: *css.Parser) Result(DashedIdent) { + const location = input.currentSourceLocation(); + const ident = switch (input.expectIdent()) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + if (!bun.strings.startsWith(ident, "--")) return .{ .err = location.newUnexpectedTokenError(.{ .ident = ident }) }; + + return .{ .result = .{ .v = ident } }; + } + + const This = @This(); + + pub fn toCss(this: *const DashedIdent, comptime W: type, dest: *Printer(W)) PrintErr!void { + return dest.writeDashedIdent(this, true); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn hash(this: *const @This(), hasher: *std.hash.Wyhash) void { + return css.implementHash(@This(), this, hasher); + } +}; + +/// A CSS [``](https://www.w3.org/TR/css-values-4/#css-css-identifier). +pub const IdentFns = Ident; +pub const Ident = struct { + v: []const u8, + + pub fn parse(input: *css.Parser) Result(Ident) { + const ident = switch (input.expectIdent()) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + return .{ .result = .{ .v = ident } }; + } + + pub fn toCss(this: *const Ident, comptime W: type, dest: *Printer(W)) PrintErr!void { + return css.serializer.serializeIdentifier(this.v, dest) catch return dest.addFmtError(); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn hash(this: *const @This(), hasher: *std.hash.Wyhash) void { + return css.implementHash(@This(), this, hasher); + } +}; + +pub const CustomIdentFns = CustomIdent; +pub const CustomIdent = struct { + v: []const u8, + + pub fn parse(input: *css.Parser) Result(CustomIdent) { + const location = input.currentSourceLocation(); + const ident = switch (input.expectIdent()) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + // css.todo_stuff.match_ignore_ascii_case + const valid = !(bun.strings.eqlCaseInsensitiveASCIIICheckLength(ident, "initial") or + bun.strings.eqlCaseInsensitiveASCIIICheckLength(ident, "inherit") or + bun.strings.eqlCaseInsensitiveASCIIICheckLength(ident, "unset") or + bun.strings.eqlCaseInsensitiveASCIIICheckLength(ident, "default") or + bun.strings.eqlCaseInsensitiveASCIIICheckLength(ident, "revert") or + bun.strings.eqlCaseInsensitiveASCIIICheckLength(ident, "revert-layer")); + + if (!valid) return .{ .err = location.newUnexpectedTokenError(.{ .ident = ident }) }; + return .{ .result = .{ .v = ident } }; + } + + const This = @This(); + + pub fn toCss(this: *const CustomIdent, comptime W: type, dest: *Printer(W)) PrintErr!void { + return @This().toCssWithOptions(this, W, dest, true); + } + + /// Write the custom ident to CSS. + pub fn toCssWithOptions( + this: *const CustomIdent, + comptime W: type, + dest: *Printer(W), + enabled_css_modules: bool, + ) PrintErr!void { + const css_module_custom_idents_enabled = enabled_css_modules and + if (dest.css_module) |*css_module| + css_module.config.custom_idents + else + false; + return dest.writeIdent(this.v, css_module_custom_idents_enabled); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn hash(this: *const @This(), hasher: *std.hash.Wyhash) void { + return css.implementHash(@This(), this, hasher); + } +}; + +/// A list of CSS [``](https://www.w3.org/TR/css-values-4/#custom-idents) values. +pub const CustomIdentList = css.SmallList(CustomIdent, 1); diff --git a/src/css/values/image.zig b/src/css/values/image.zig new file mode 100644 index 0000000000..b19fe98efe --- /dev/null +++ b/src/css/values/image.zig @@ -0,0 +1,367 @@ +const std = @import("std"); +const Allocator = std.mem.Allocator; +const bun = @import("root").bun; +pub const css = @import("../css_parser.zig"); +const Result = css.Result; +const ArrayList = std.ArrayListUnmanaged; +const Printer = css.Printer; +const PrintErr = css.PrintErr; +const CSSNumber = css.css_values.number.CSSNumber; +const CSSNumberFns = css.css_values.number.CSSNumberFns; +const Url = css.css_values.url.Url; +const Gradient = css.css_values.gradient.Gradient; +const Resolution = css.css_values.resolution.Resolution; +const VendorPrefix = css.VendorPrefix; +const UrlDependency = css.dependencies.UrlDependency; + +/// A CSS [``](https://www.w3.org/TR/css-images-3/#image-values) value. +pub const Image = union(enum) { + /// The `none` keyword. + none, + /// A `url()`. + url: Url, + /// A gradient. + gradient: *Gradient, + /// An `image-set()`. + image_set: ImageSet, + + pub usingnamespace css.DeriveParse(@This()); + pub usingnamespace css.DeriveToCss(@This()); + + pub fn deinit(_: *@This(), _: std.mem.Allocator) void { + // TODO: implement this + // Right now not implementing this. + // It is not a bug to implement this since all memory allocated in CSS parser is allocated into arena. + } + + pub fn isCompatible(this: *const @This(), browsers: css.targets.Browsers) bool { + return switch (this.*) { + .gradient => |g| switch (g.*) { + .linear => |linear| css.Feature.isCompatible(.linear_gradient, browsers) and linear.isCompatible(browsers), + .repeating_linear => |repeating_linear| css.Feature.isCompatible(.repeating_linear_gradient, browsers) and repeating_linear.isCompatible(browsers), + .radial => |radial| css.Feature.isCompatible(.radial_gradient, browsers) and radial.isCompatible(browsers), + .repeating_radial => |repeating_radial| css.Feature.isCompatible(.repeating_radial_gradient, browsers) and repeating_radial.isCompatible(browsers), + .conic => |conic| css.Feature.isCompatible(.conic_gradient, browsers) and conic.isCompatible(browsers), + .repeating_conic => |repeating_conic| css.Feature.isCompatible(.repeating_conic_gradient, browsers) and repeating_conic.isCompatible(browsers), + .@"webkit-gradient" => css.prefixes.Feature.isWebkitGradient(browsers), + }, + .image_set => |image_set| image_set.isCompatible(browsers), + .url, .none => true, + }; + } + + pub fn getPrefixed(this: *const @This(), allocator: Allocator, prefix: css.VendorPrefix) Image { + return switch (this.*) { + .gradient => |grad| .{ .gradient = bun.create(allocator, Gradient, grad.getPrefixed(allocator, prefix)) }, + .image_set => |image_set| .{ .image_set = image_set.getPrefixed(allocator, prefix) }, + else => this.deepClone(allocator), + }; + } + + pub fn getNecessaryPrefixes(this: *const @This(), targets: css.targets.Targets) css.VendorPrefix { + return switch (this.*) { + .gradient => |grad| grad.getNecessaryPrefixes(targets), + .image_set => |*image_set| image_set.getNecessaryPrefixes(targets), + else => css.VendorPrefix{ .none = true }, + }; + } + + pub fn hasVendorPrefix(this: *const @This()) bool { + const prefix = this.getVendorPrefix(); + return !prefix.isEmpty() and !prefix.eq(VendorPrefix{ .none = true }); + } + + /// Returns the vendor prefix used in the image value. + pub fn getVendorPrefix(this: *const @This()) VendorPrefix { + return switch (this.*) { + .gradient => |a| a.getVendorPrefix(), + .image_set => |a| a.getVendorPrefix(), + else => VendorPrefix.empty(), + }; + } + + /// Needed to satisfy ImageFallback interface + pub fn getImage(this: *const @This()) *const Image { + return this; + } + + /// Needed to satisfy ImageFallback interface + pub fn withImage(_: *const @This(), _: Allocator, image: Image) @This() { + return image; + } + + pub fn default() Image { + return .none; + } + + pub inline fn eql(this: *const Image, other: *const Image) bool { + return switch (this.*) { + .none => switch (other.*) { + .none => true, + else => false, + }, + .url => |*a| switch (other.*) { + .url => a.eql(&other.url), + else => false, + }, + .image_set => |*a| switch (other.*) { + .image_set => a.eql(&other.image_set), + else => false, + }, + .gradient => |a| switch (other.*) { + .gradient => a.eql(other.gradient), + else => false, + }, + }; + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + /// Returns a legacy `-webkit-gradient()` value for the image. + /// + /// May return an error in case the gradient cannot be converted. + pub fn getLegacyWebkit(this: *const @This(), allocator: Allocator) ?Image { + return switch (this.*) { + .gradient => |gradient| Image{ .gradient = bun.create(allocator, Gradient, gradient.getLegacyWebkit(allocator) orelse return null) }, + else => this.deepClone(allocator), + }; + } + + pub fn getFallback(this: *const @This(), allocator: Allocator, kind: css.ColorFallbackKind) Image { + return switch (this.*) { + .gradient => |grad| .{ .gradient = bun.create(allocator, Gradient, grad.getFallback(allocator, kind)) }, + else => this.deepClone(allocator), + }; + } + + pub fn getNecessaryFallbacks(this: *const @This(), targets: css.targets.Targets) css.ColorFallbackKind { + return switch (this.*) { + .gradient => |grad| grad.getNecessaryFallbacks(targets), + else => css.ColorFallbackKind.empty(), + }; + } + + // pub fn parse(input: *css.Parser) Result(Image) { + // _ = input; // autofix + // @panic(css.todo_stuff.depth); + // } + + // pub fn toCss(this: *const Image, comptime W: type, dest: *css.Printer(W)) css.PrintErr!void { + // _ = this; // autofix + // _ = dest; // autofix + // @panic(css.todo_stuff.depth); + // } +}; + +/// A CSS [`image-set()`](https://drafts.csswg.org/css-images-4/#image-set-notation) value. +/// +/// `image-set()` allows the user agent to choose between multiple versions of an image to +/// display the most appropriate resolution or file type that it supports. +pub const ImageSet = struct { + /// The image options to choose from. + options: ArrayList(ImageSetOption), + + /// The vendor prefix for the `image-set()` function. + vendor_prefix: VendorPrefix, + + pub fn parse(input: *css.Parser) Result(ImageSet) { + const location = input.currentSourceLocation(); + const f = switch (input.expectFunction()) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + const vendor_prefix = vendor_prefix: { + // todo_stuff.match_ignore_ascii_case + if (bun.strings.eqlCaseInsensitiveASCIIICheckLength("image-set", f)) { + break :vendor_prefix VendorPrefix{ .none = true }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength("-webkit-image-set", f)) { + break :vendor_prefix VendorPrefix{ .webkit = true }; + } else return .{ .err = location.newUnexpectedTokenError(.{ .ident = f }) }; + }; + + const Fn = struct { + pub fn parseNestedBlockFn(_: void, i: *css.Parser) Result(ArrayList(ImageSetOption)) { + return i.parseCommaSeparated(ImageSetOption, ImageSetOption.parse); + } + }; + + const options = switch (input.parseNestedBlock(ArrayList(ImageSetOption), {}, Fn.parseNestedBlockFn)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + + return .{ .result = ImageSet{ + .options = options, + .vendor_prefix = vendor_prefix, + } }; + } + + pub fn toCss(this: *const ImageSet, comptime W: type, dest: *css.Printer(W)) PrintErr!void { + try this.vendor_prefix.toCss(W, dest); + try dest.writeStr("image-set("); + var first = true; + for (this.options.items) |*option| { + if (first) { + first = false; + } else { + try dest.delim(',', false); + } + try option.toCss(W, dest, this.vendor_prefix.neq(VendorPrefix{ .none = true })); + } + return dest.writeChar(')'); + } + + pub fn isCompatible(this: *const @This(), browsers: css.targets.Browsers) bool { + return css.Feature.isCompatible(.image_set, browsers) and + for (this.options.items) |opt| + { + if (!opt.image.isCompatible(browsers)) break false; + } else true; + } + + /// Returns the `image-set()` value with the given vendor prefix. + pub fn getPrefixed(this: *const @This(), allocator: Allocator, prefix: css.VendorPrefix) ImageSet { + return ImageSet{ + .options = css.deepClone(ImageSetOption, allocator, &this.options), + .vendor_prefix = prefix, + }; + } + + pub fn eql(this: *const ImageSet, other: *const ImageSet) bool { + return this.vendor_prefix.eql(other.vendor_prefix) and css.generic.eqlList(ImageSetOption, &this.options, &other.options); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn getVendorPrefix(this: *const @This()) VendorPrefix { + return this.vendor_prefix; + } + + /// Returns the vendor prefixes needed for the given browser targets. + pub fn getNecessaryPrefixes(this: *const @This(), targets: css.targets.Targets) css.VendorPrefix { + return targets.prefixes(this.vendor_prefix, css.prefixes.Feature.image_set); + } +}; + +/// An image option within the `image-set()` function. See [ImageSet](ImageSet). +pub const ImageSetOption = struct { + /// The image for this option. + image: Image, + /// The resolution of the image. + resolution: Resolution, + /// The mime type of the image. + file_type: ?[]const u8, + + pub fn parse(input: *css.Parser) Result(ImageSetOption) { + const start_position = input.input.tokenizer.getPosition(); + const loc = input.currentSourceLocation(); + const image = if (input.tryParse(css.Parser.expectUrlOrString, .{}).asValue()) |url| brk: { + const record_idx = switch (input.addImportRecordForUrl( + url, + start_position, + )) { + .result => |idx| idx, + .err => |e| return .{ .err = e }, + }; + break :brk Image{ .url = Url{ + .import_record_idx = record_idx, + .loc = css.dependencies.Location.fromSourceLocation(loc), + } }; + } else switch (@call(.auto, @field(Image, "parse"), .{input})) { // For some reason, `Image.parse` makes zls crash, using this syntax until that's fixed + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + + const resolution: Resolution, const file_type: ?[]const u8 = if (input.tryParse(Resolution.parse, .{}).asValue()) |res| brk: { + const file_type = input.tryParse(parseFileType, .{}).asValue(); + break :brk .{ res, file_type }; + } else brk: { + const file_type = input.tryParse(parseFileType, .{}).asValue(); + const resolution = input.tryParse(Resolution.parse, .{}).unwrapOr(Resolution{ .dppx = 1.0 }); + break :brk .{ resolution, file_type }; + }; + + return .{ .result = ImageSetOption{ + .image = image, + .resolution = resolution, + .file_type = if (file_type) |x| x else null, + } }; + } + + pub fn toCss( + this: *const ImageSetOption, + comptime W: type, + dest: *css.Printer(W), + is_prefixed: bool, + ) PrintErr!void { + if (this.image == .url and !is_prefixed) { + const _dep: ?UrlDependency = if (dest.dependencies != null) + UrlDependency.new(dest.allocator, &this.image.url, dest.filename(), try dest.getImportRecords()) + else + null; + + if (_dep) |dep| { + css.serializer.serializeString(dep.placeholder, dest) catch return dest.addFmtError(); + if (dest.dependencies) |*dependencies| { + dependencies.append( + dest.allocator, + .{ .url = dep }, + ) catch bun.outOfMemory(); + } + } else { + css.serializer.serializeString(try dest.getImportRecordUrl(this.image.url.import_record_idx), dest) catch return dest.addFmtError(); + } + } else { + try this.image.toCss(W, dest); + } + + // TODO: Throwing an error when `self.resolution = Resolution::Dppx(0.0)` + // TODO: -webkit-image-set() does not support ` | | + // | | ` and `type()`. + try dest.writeChar(' '); + + // Safari only supports the x resolution unit in image-set(). + // In other places, x was added as an alias later. + // Temporarily ignore the targets while printing here. + const targets = targets: { + const targets = dest.targets; + dest.targets = .{}; + break :targets targets; + }; + try this.resolution.toCss(W, dest); + dest.targets = targets; + + if (this.file_type) |file_type| { + try dest.writeStr(" type("); + css.serializer.serializeString(file_type, dest) catch return dest.addFmtError(); + try dest.writeChar(')'); + } + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn eql(lhs: *const ImageSetOption, rhs: *const ImageSetOption) bool { + return lhs.image.eql(&rhs.image) and lhs.resolution.eql(&rhs.resolution) and (brk: { + if (lhs.file_type != null and rhs.file_type != null) { + break :brk bun.strings.eql(lhs.file_type.?, rhs.file_type.?); + } + break :brk false; + }); + } +}; + +fn parseFileType(input: *css.Parser) Result([]const u8) { + if (input.expectFunctionMatching("type").asErr()) |e| return .{ .err = e }; + const Fn = struct { + pub fn parseNestedBlockFn(_: void, i: *css.Parser) Result([]const u8) { + return i.expectString(); + } + }; + return input.parseNestedBlock([]const u8, {}, Fn.parseNestedBlockFn); +} diff --git a/src/css/values/length.zig b/src/css/values/length.zig new file mode 100644 index 0000000000..3cd1fc1b44 --- /dev/null +++ b/src/css/values/length.zig @@ -0,0 +1,800 @@ +const std = @import("std"); +const Allocator = std.mem.Allocator; +const bun = @import("root").bun; +pub const css = @import("../css_parser.zig"); +const Result = css.Result; +const ArrayList = std.ArrayListUnmanaged; +const Printer = css.Printer; +const PrintErr = css.PrintErr; +const CSSNumber = css.css_values.number.CSSNumber; +const CSSNumberFns = css.css_values.number.CSSNumberFns; +const Calc = css.css_values.calc.Calc; +const MathFunction = css.css_values.calc.MathFunction; +const DimensionPercentage = css.css_values.percentage.DimensionPercentage; + +/// Either a [``](https://www.w3.org/TR/css-values-4/#lengths) or a [``](https://www.w3.org/TR/css-values-4/#numbers). +pub const LengthOrNumber = union(enum) { + /// A number. + number: CSSNumber, + /// A length. + length: Length, + + pub usingnamespace css.DeriveParse(@This()); + pub usingnamespace css.DeriveToCss(@This()); + + pub fn default() LengthOrNumber { + return .{ .number = 0.0 }; + } + + pub fn eql(this: *const @This(), other: *const @This()) bool { + return switch (this.*) { + .number => |*n| n.* == other.number, + .length => |*l| l.eql(&other.length), + }; + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } +}; + +pub const LengthPercentage = DimensionPercentage(LengthValue); +/// Either a [``](https://www.w3.org/TR/css-values-4/#typedef-length-percentage), or the `auto` keyword. +pub const LengthPercentageOrAuto = union(enum) { + /// The `auto` keyword. + auto, + /// A [``](https://www.w3.org/TR/css-values-4/#typedef-length-percentage). + length: LengthPercentage, + + pub usingnamespace css.DeriveParse(@This()); + pub usingnamespace css.DeriveToCss(@This()); + + pub fn isCompatible(this: *const @This(), browsers: css.targets.Browsers) bool { + return switch (this.*) { + .length => this.length.isCompatible(browsers), + else => true, + }; + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } + + pub inline fn deepClone(this: *const @This(), allocator: Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } +}; + +const PX_PER_IN: f32 = 96.0; +const PX_PER_CM: f32 = PX_PER_IN / 2.54; +const PX_PER_MM: f32 = PX_PER_CM / 10.0; +const PX_PER_Q: f32 = PX_PER_CM / 40.0; +const PX_PER_PT: f32 = PX_PER_IN / 72.0; +const PX_PER_PC: f32 = PX_PER_IN / 6.0; + +pub const LengthValue = union(enum) { + // https://www.w3.org/TR/css-values-4/#absolute-lengths + /// A length in pixels. + px: CSSNumber, + /// A length in inches. 1in = 96px. + in: CSSNumber, + /// A length in centimeters. 1cm = 96px / 2.54. + cm: CSSNumber, + /// A length in millimeters. 1mm = 1/10th of 1cm. + mm: CSSNumber, + /// A length in quarter-millimeters. 1Q = 1/40th of 1cm. + q: CSSNumber, + /// A length in points. 1pt = 1/72nd of 1in. + pt: CSSNumber, + /// A length in picas. 1pc = 1/6th of 1in. + pc: CSSNumber, + + // https://www.w3.org/TR/css-values-4/#font-relative-lengths + /// A length in the `em` unit. An `em` is equal to the computed value of the + /// font-size property of the element on which it is used. + em: CSSNumber, + /// A length in the `rem` unit. A `rem` is equal to the computed value of the + /// `em` unit on the root element. + rem: CSSNumber, + /// A length in `ex` unit. An `ex` is equal to the x-height of the font. + ex: CSSNumber, + /// A length in the `rex` unit. A `rex` is equal to the value of the `ex` unit on the root element. + rex: CSSNumber, + /// A length in the `ch` unit. A `ch` is equal to the width of the zero ("0") character in the current font. + ch: CSSNumber, + /// A length in the `rch` unit. An `rch` is equal to the value of the `ch` unit on the root element. + rch: CSSNumber, + /// A length in the `cap` unit. A `cap` is equal to the cap-height of the font. + cap: CSSNumber, + /// A length in the `rcap` unit. An `rcap` is equal to the value of the `cap` unit on the root element. + rcap: CSSNumber, + /// A length in the `ic` unit. An `ic` is equal to the width of the “水” (CJK water ideograph) character in the current font. + ic: CSSNumber, + /// A length in the `ric` unit. An `ric` is equal to the value of the `ic` unit on the root element. + ric: CSSNumber, + /// A length in the `lh` unit. An `lh` is equal to the computed value of the `line-height` property. + lh: CSSNumber, + /// A length in the `rlh` unit. An `rlh` is equal to the value of the `lh` unit on the root element. + rlh: CSSNumber, + + // https://www.w3.org/TR/css-values-4/#viewport-relative-units + /// A length in the `vw` unit. A `vw` is equal to 1% of the [viewport width](https://www.w3.org/TR/css-values-4/#ua-default-viewport-size). + vw: CSSNumber, + /// A length in the `lvw` unit. An `lvw` is equal to 1% of the [large viewport width](https://www.w3.org/TR/css-values-4/#large-viewport-size). + lvw: CSSNumber, + /// A length in the `svw` unit. An `svw` is equal to 1% of the [small viewport width](https://www.w3.org/TR/css-values-4/#small-viewport-size). + svw: CSSNumber, + /// A length in the `dvw` unit. An `dvw` is equal to 1% of the [dynamic viewport width](https://www.w3.org/TR/css-values-4/#dynamic-viewport-size). + dvw: CSSNumber, + /// A length in the `cqw` unit. An `cqw` is equal to 1% of the [query container](https://drafts.csswg.org/css-contain-3/#query-container) width. + cqw: CSSNumber, + + /// A length in the `vh` unit. A `vh` is equal to 1% of the [viewport height](https://www.w3.org/TR/css-values-4/#ua-default-viewport-size). + vh: CSSNumber, + /// A length in the `lvh` unit. An `lvh` is equal to 1% of the [large viewport height](https://www.w3.org/TR/css-values-4/#large-viewport-size). + lvh: CSSNumber, + /// A length in the `svh` unit. An `svh` is equal to 1% of the [small viewport height](https://www.w3.org/TR/css-values-4/#small-viewport-size). + svh: CSSNumber, + /// A length in the `dvh` unit. An `dvh` is equal to 1% of the [dynamic viewport height](https://www.w3.org/TR/css-values-4/#dynamic-viewport-size). + dvh: CSSNumber, + /// A length in the `cqh` unit. An `cqh` is equal to 1% of the [query container](https://drafts.csswg.org/css-contain-3/#query-container) height. + cqh: CSSNumber, + + /// A length in the `vi` unit. A `vi` is equal to 1% of the [viewport size](https://www.w3.org/TR/css-values-4/#ua-default-viewport-size) + /// in the box's [inline axis](https://www.w3.org/TR/css-writing-modes-4/#inline-axis). + vi: CSSNumber, + /// A length in the `svi` unit. A `svi` is equal to 1% of the [small viewport size](https://www.w3.org/TR/css-values-4/#small-viewport-size) + /// in the box's [inline axis](https://www.w3.org/TR/css-writing-modes-4/#inline-axis). + svi: CSSNumber, + /// A length in the `lvi` unit. A `lvi` is equal to 1% of the [large viewport size](https://www.w3.org/TR/css-values-4/#large-viewport-size) + /// in the box's [inline axis](https://www.w3.org/TR/css-writing-modes-4/#inline-axis). + lvi: CSSNumber, + /// A length in the `dvi` unit. A `dvi` is equal to 1% of the [dynamic viewport size](https://www.w3.org/TR/css-values-4/#dynamic-viewport-size) + /// in the box's [inline axis](https://www.w3.org/TR/css-writing-modes-4/#inline-axis). + dvi: CSSNumber, + /// A length in the `cqi` unit. An `cqi` is equal to 1% of the [query container](https://drafts.csswg.org/css-contain-3/#query-container) inline size. + cqi: CSSNumber, + + /// A length in the `vb` unit. A `vb` is equal to 1% of the [viewport size](https://www.w3.org/TR/css-values-4/#ua-default-viewport-size) + /// in the box's [block axis](https://www.w3.org/TR/css-writing-modes-4/#block-axis). + vb: CSSNumber, + /// A length in the `svb` unit. A `svb` is equal to 1% of the [small viewport size](https://www.w3.org/TR/css-values-4/#small-viewport-size) + /// in the box's [block axis](https://www.w3.org/TR/css-writing-modes-4/#block-axis). + svb: CSSNumber, + /// A length in the `lvb` unit. A `lvb` is equal to 1% of the [large viewport size](https://www.w3.org/TR/css-values-4/#large-viewport-size) + /// in the box's [block axis](https://www.w3.org/TR/css-writing-modes-4/#block-axis). + lvb: CSSNumber, + /// A length in the `dvb` unit. A `dvb` is equal to 1% of the [dynamic viewport size](https://www.w3.org/TR/css-values-4/#dynamic-viewport-size) + /// in the box's [block axis](https://www.w3.org/TR/css-writing-modes-4/#block-axis). + dvb: CSSNumber, + /// A length in the `cqb` unit. An `cqb` is equal to 1% of the [query container](https://drafts.csswg.org/css-contain-3/#query-container) block size. + cqb: CSSNumber, + + /// A length in the `vmin` unit. A `vmin` is equal to the smaller of `vw` and `vh`. + vmin: CSSNumber, + /// A length in the `svmin` unit. An `svmin` is equal to the smaller of `svw` and `svh`. + svmin: CSSNumber, + /// A length in the `lvmin` unit. An `lvmin` is equal to the smaller of `lvw` and `lvh`. + lvmin: CSSNumber, + /// A length in the `dvmin` unit. An `dvmin` is equal to the smaller of `dvw` and `dvh`. + dvmin: CSSNumber, + /// A length in the `cqmin` unit. An `cqmin` is equal to the smaller of `cqi` and `cqb`. + cqmin: CSSNumber, + + /// A length in the `vmax` unit. A `vmax` is equal to the larger of `vw` and `vh`. + vmax: CSSNumber, + /// A length in the `svmax` unit. An `svmax` is equal to the larger of `svw` and `svh`. + svmax: CSSNumber, + /// A length in the `lvmax` unit. An `lvmax` is equal to the larger of `lvw` and `lvh`. + lvmax: CSSNumber, + /// A length in the `dvmax` unit. An `dvmax` is equal to the larger of `dvw` and `dvh`. + dvmax: CSSNumber, + /// A length in the `cqmax` unit. An `cqmin` is equal to the larger of `cqi` and `cqb`. + cqmax: CSSNumber, + + const FeatureMap = .{ + .px = null, + .in = null, + .cm = null, + .mm = null, + .q = css.Feature.q_unit, + .pt = null, + .pc = null, + .em = null, + .rem = css.Feature.rem_unit, + .ex = css.Feature.ex_unit, + .rex = null, + .ch = css.Feature.ch_unit, + .rch = null, + .cap = css.Feature.cap_unit, + .rcap = null, + .ic = css.Feature.ic_unit, + .ric = null, + .lh = css.Feature.lh_unit, + .rlh = css.Feature.rlh_unit, + .vw = css.Feature.vw_unit, + .lvw = css.Feature.viewport_percentage_units_large, + .svw = css.Feature.viewport_percentage_units_small, + .dvw = css.Feature.viewport_percentage_units_dynamic, + .cqw = css.Feature.container_query_length_units, + .vh = css.Feature.vh_unit, + .lvh = css.Feature.viewport_percentage_units_large, + .svh = css.Feature.viewport_percentage_units_small, + .dvh = css.Feature.viewport_percentage_units_dynamic, + .cqh = css.Feature.container_query_length_units, + .vi = css.Feature.vi_unit, + .svi = css.Feature.viewport_percentage_units_small, + .lvi = css.Feature.viewport_percentage_units_large, + .dvi = css.Feature.viewport_percentage_units_dynamic, + .cqi = css.Feature.container_query_length_units, + .vb = css.Feature.vb_unit, + .svb = css.Feature.viewport_percentage_units_small, + .lvb = css.Feature.viewport_percentage_units_large, + .dvb = css.Feature.viewport_percentage_units_dynamic, + .cqb = css.Feature.container_query_length_units, + .vmin = css.Feature.vmin_unit, + .svmin = css.Feature.viewport_percentage_units_small, + .lvmin = css.Feature.viewport_percentage_units_large, + .dvmin = css.Feature.viewport_percentage_units_dynamic, + .cqmin = css.Feature.container_query_length_units, + .vmax = css.Feature.vmax_unit, + .svmax = css.Feature.viewport_percentage_units_small, + .lvmax = css.Feature.viewport_percentage_units_large, + .dvmax = css.Feature.viewport_percentage_units_dynamic, + .cqmax = css.Feature.container_query_length_units, + }; + + comptime { + const struct_fields = std.meta.fields(LengthValue); + const feature_fields = std.meta.fields(@TypeOf(FeatureMap)); + if (struct_fields.len != feature_fields.len) { + @compileError("LengthValue and FeatureMap must have the same number of fields"); + } + for (struct_fields) |field| { + _ = @field(FeatureMap, field.name); + } + } + + pub fn parse(input: *css.Parser) Result(@This()) { + const location = input.currentSourceLocation(); + const token = switch (input.next()) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + switch (token.*) { + .dimension => |*dim| { + // todo_stuff.match_ignore_ascii_case + inline for (std.meta.fields(@This())) |field| { + if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(field.name, dim.unit)) { + return .{ .result = @unionInit(LengthValue, field.name, dim.num.value) }; + } + } + }, + .number => |*num| return .{ .result = .{ .px = num.value } }, + else => {}, + } + return .{ .err = location.newUnexpectedTokenError(token.*) }; + } + + pub fn toCss(this: *const @This(), comptime W: type, dest: *css.Printer(W)) css.PrintErr!void { + const value, const unit = this.toUnitValue(); + + // The unit can be omitted if the value is zero, except inside calc() + // expressions, where unitless numbers won't be parsed as dimensions. + if (!dest.in_calc and value == 0.0) { + return dest.writeChar('0'); + } + + return css.serializer.serializeDimension(value, unit, W, dest); + } + + pub fn isZero(this: *const LengthValue) bool { + inline for (bun.meta.EnumFields(@This())) |field| { + if (@intFromEnum(this.*) == field.value) { + return @field(this, field.name) == 0.0; + } + } + unreachable; + } + + pub fn zero() LengthValue { + return .{ .px = 0.0 }; + } + + /// Attempts to convert the value to pixels. + /// Returns `None` if the conversion is not possible. + pub fn toPx(this: *const @This()) ?CSSNumber { + return switch (this.*) { + .px => |v| v, + .in => |v| v * PX_PER_IN, + .cm => |v| v * PX_PER_CM, + .mm => |v| v * PX_PER_MM, + .q => |v| v * PX_PER_Q, + .pt => |v| v * PX_PER_PT, + .pc => |v| v * PX_PER_PC, + else => null, + }; + } + + pub inline fn eql(this: *const @This(), other: *const @This()) bool { + inline for (bun.meta.EnumFields(@This())) |field| { + if (field.value == @intFromEnum(this.*) and field.value == @intFromEnum(other.*)) { + return @field(this, field.name) == @field(other, field.name); + } + } + return false; + } + + pub fn isSignNegative(this: *const @This()) bool { + const s = this.trySign() orelse return false; + return css.signfns.isSignNegative(s); + } + + pub fn isSignPositive(this: *const @This()) bool { + const s = this.trySign() orelse return false; + return css.signfns.isSignPositive(s); + } + + pub fn trySign(this: *const @This()) ?f32 { + return sign(this); + } + + pub fn sign(this: *const @This()) f32 { + const enum_fields = @typeInfo(@typeInfo(@This()).Union.tag_type.?).Enum.fields; + inline for (std.meta.fields(@This()), 0..) |field, i| { + if (enum_fields[i].value == @intFromEnum(this.*)) { + return css.signfns.signF32(@field(this, field.name)); + } + } + unreachable; + } + + pub fn tryFromToken(token: *const css.Token) css.Maybe(@This(), void) { + switch (token.*) { + .dimension => |*dim| { + inline for (std.meta.fields(@This())) |field| { + if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(field.name, dim.unit)) { + return .{ .result = @unionInit(LengthValue, field.name, dim.num.value) }; + } + } + }, + else => {}, + } + return .{ .err = {} }; + } + + pub fn toUnitValue(this: *const @This()) struct { CSSNumber, []const u8 } { + const enum_fields = @typeInfo(@typeInfo(@This()).Union.tag_type.?).Enum.fields; + inline for (std.meta.fields(@This()), 0..) |field, i| { + if (enum_fields[i].value == @intFromEnum(this.*)) { + return .{ @field(this, field.name), field.name }; + } + } + unreachable; + } + + pub fn map(this: *const @This(), comptime map_fn: *const fn (f32) f32) LengthValue { + inline for (comptime bun.meta.EnumFields(@This())) |field| { + if (field.value == @intFromEnum(this.*)) { + return @unionInit(LengthValue, field.name, map_fn(@field(this, field.name))); + } + } + unreachable; + } + + pub fn mulF32(this: @This(), _: Allocator, other: f32) LengthValue { + const fields = comptime bun.meta.EnumFields(@This()); + inline for (fields) |field| { + if (field.value == @intFromEnum(this)) { + return @unionInit(LengthValue, field.name, @field(this, field.name) * other); + } + } + unreachable; + } + + pub fn tryFromAngle(_: css.css_values.angle.Angle) ?@This() { + return null; + } + + pub fn partialCmp(this: *const LengthValue, other: *const LengthValue) ?std.math.Order { + if (@intFromEnum(this.*) == @intFromEnum(other.*)) { + inline for (bun.meta.EnumFields(LengthValue)) |field| { + if (field.value == @intFromEnum(this.*)) { + const a = @field(this, field.name); + const b = @field(other, field.name); + return css.generic.partialCmpF32(&a, &b); + } + } + unreachable; + } + + const a = this.toPx(); + const b = other.toPx(); + if (a != null and b != null) { + return css.generic.partialCmpF32(&a.?, &b.?); + } + return null; + } + + pub fn tryOp( + this: *const LengthValue, + other: *const LengthValue, + ctx: anytype, + comptime op_fn: *const fn (@TypeOf(ctx), a: f32, b: f32) f32, + ) ?LengthValue { + if (@intFromEnum(this.*) == @intFromEnum(other.*)) { + inline for (bun.meta.EnumFields(LengthValue)) |field| { + if (field.value == @intFromEnum(this.*)) { + const a = @field(this, field.name); + const b = @field(other, field.name); + return @unionInit(LengthValue, field.name, op_fn(ctx, a, b)); + } + } + unreachable; + } + + const a = this.toPx(); + const b = this.toPx(); + if (a != null and b != null) { + return .{ .px = op_fn(ctx, a.?, b.?) }; + } + return null; + } + + pub fn tryOpTo( + this: *const LengthValue, + other: *const LengthValue, + comptime R: type, + ctx: anytype, + comptime op_fn: *const fn (@TypeOf(ctx), a: f32, b: f32) R, + ) ?R { + if (@intFromEnum(this.*) == @intFromEnum(other.*)) { + inline for (bun.meta.EnumFields(LengthValue)) |field| { + if (field.value == @intFromEnum(this.*)) { + const a = @field(this, field.name); + const b = @field(other, field.name); + return op_fn(ctx, a, b); + } + } + unreachable; + } + + const a = this.toPx(); + const b = this.toPx(); + if (a != null and b != null) { + return op_fn(ctx, a.?, b.?); + } + return null; + } + + pub fn hash(this: *const @This(), hasher: *std.hash.Wyhash) void { + return css.implementHash(@This(), this, hasher); + } + + pub fn tryAdd(this: *const LengthValue, _: std.mem.Allocator, rhs: *const LengthValue) ?LengthValue { + if (@intFromEnum(this.*) == @intFromEnum(rhs.*)) { + inline for (bun.meta.EnumFields(LengthValue)) |field| { + if (field.value == @intFromEnum(this.*)) { + return @unionInit(LengthValue, field.name, @field(this, field.name) + @field(rhs, field.name)); + } + } + unreachable; + } + if (this.toPx()) |a| { + if (rhs.toPx()) |b| { + return .{ .px = a + b }; + } + } + return null; + } + + pub fn isCompatible(this: *const @This(), browsers: css.targets.Browsers) bool { + inline for (bun.meta.EnumFields(LengthValue)) |field| { + if (field.value == @intFromEnum(this.*)) { + if (comptime @TypeOf(@field(FeatureMap, field.name)) == css.compat.Feature) { + const feature = @field(FeatureMap, field.name); + return css.compat.Feature.isCompatible(feature, browsers); + } + return true; + } + } + unreachable; + } +}; + +/// A CSS [``](https://www.w3.org/TR/css-values-4/#lengths) value, with support for `calc()`. +pub const Length = union(enum) { + /// An explicitly specified length value. + value: LengthValue, + /// A computed length value using `calc()`. + calc: *Calc(Length), + + pub fn deepClone(this: *const Length, allocator: Allocator) Length { + return switch (this.*) { + .value => |v| .{ .value = v }, + .calc => |calc| .{ .calc = bun.create(allocator, Calc(Length), Calc(Length).deepClone(calc, allocator)) }, + }; + } + + pub fn deinit(this: *const Length, allocator: Allocator) void { + return switch (this.*) { + .calc => |calc| calc.deinit(allocator), + .value => {}, + }; + } + + pub fn parse(input: *css.Parser) Result(Length) { + if (input.tryParse(Calc(Length).parse, .{}).asValue()) |calc_value| { + // PERF: I don't like this redundant allocation + if (calc_value == .value) { + const ret = calc_value.value.*; + input.allocator().destroy(calc_value.value); + return .{ .result = ret }; + } + return .{ .result = .{ + .calc = bun.create( + input.allocator(), + Calc(Length), + calc_value, + ), + } }; + } + + const len = switch (LengthValue.parse(input)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + return .{ .result = .{ .value = len } }; + } + + pub fn toCss(this: *const @This(), comptime W: type, dest: *Printer(W)) PrintErr!void { + return switch (this.*) { + .value => |a| a.toCss(W, dest), + .calc => |c| c.toCss(W, dest), + }; + } + + pub fn eql(this: *const @This(), other: *const @This()) bool { + return switch (this.*) { + .value => |a| other.* == .value and a.eql(&other.value), + .calc => |a| other.* == .calc and a.eql(other.calc), + }; + } + + pub fn px(p: CSSNumber) Length { + return .{ .value = .{ .px = p } }; + } + + pub fn toPx(this: *const Length) ?CSSNumber { + return switch (this.*) { + .value => |a| a.toPx(), + else => null, + }; + } + + pub fn mulF32(this: Length, allocator: Allocator, other: f32) Length { + return switch (this) { + .value => Length{ .value = this.value.mulF32(allocator, other) }, + .calc => Length{ + .calc = bun.create( + allocator, + Calc(Length), + this.calc.mulF32(allocator, other), + ), + }, + }; + } + + pub fn add(this: Length, allocator: Allocator, other: Length) Length { + // Unwrap calc(...) functions so we can add inside. + // Then wrap the result in a calc(...) again if necessary. + const a = unwrapCalc(allocator, this); + const b = unwrapCalc(allocator, other); + const res: Length = Length.addInternal(a, allocator, b); + if (res == .calc) { + if (res.calc.* == .value) return res.calc.value.*; + if (res.calc.* == .function and res.calc.function.* != .calc) return Length{ .calc = bun.create(allocator, Calc(Length), Calc(Length){ .function = res.calc.function }) }; + return Length{ .calc = bun.create(allocator, Calc(Length), Calc(Length){ + .function = bun.create(allocator, MathFunction(Length), MathFunction(Length){ .calc = res.calc.* }), + }) }; + } + return res; + } + + fn addInternal(this: Length, allocator: Allocator, other: Length) Length { + if (this.tryAdd(allocator, &other)) |r| return r; + return this.add__(allocator, other); + } + + fn intoCalc(this: Length, allocator: Allocator) Calc(Length) { + return switch (this) { + .calc => |c| c.*, + else => |v| Calc(Length){ .value = bun.create(allocator, Length, v) }, + }; + } + + fn add__(this: Length, allocator: Allocator, other: Length) Length { + var a = this; + var b = other; + + if (a.isZero()) return b; + + if (b.isZero()) return a; + + if (a.isSignNegative() and b.isSignPositive()) { + std.mem.swap(Length, &a, &b); + } + + if (a == .calc and b == .calc) { + return Length{ .calc = bun.create(allocator, Calc(Length), a.calc.add(allocator, b.calc.*)) }; + } else if (a == .calc) { + switch (a.calc.*) { + .value => |v| return v.add__(allocator, b), + else => return Length{ .calc = bun.create(allocator, Calc(Length), Calc(Length){ + .sum = .{ + .left = bun.create(allocator, Calc(Length), a.calc.*), + .right = bun.create(allocator, Calc(Length), b.intoCalc(allocator)), + }, + }) }, + } + } else if (b == .calc) { + switch (b.calc.*) { + .value => |v| return a.add__(allocator, v.*), + else => return Length{ .calc = bun.create(allocator, Calc(Length), Calc(Length){ + .sum = .{ + .left = bun.create(allocator, Calc(Length), a.intoCalc(allocator)), + .right = bun.create(allocator, Calc(Length), b.calc.*), + }, + }) }, + } + } else { + return Length{ .calc = bun.create(allocator, Calc(Length), Calc(Length){ + .sum = .{ + .left = bun.create(allocator, Calc(Length), a.intoCalc(allocator)), + .right = bun.create(allocator, Calc(Length), b.intoCalc(allocator)), + }, + }) }; + } + } + + fn tryAdd(this: *const Length, allocator: Allocator, other: *const Length) ?Length { + if (this.* == .value and other.* == .value) { + if (this.value.tryAdd(allocator, &other.value)) |res| { + return Length{ .value = res }; + } + return null; + } + + if (this.* == .calc) { + switch (this.calc.*) { + .value => |v| return v.tryAdd(allocator, other), + .sum => |s| { + const a = Length{ .calc = s.left }; + if (a.tryAdd(allocator, other)) |res| { + return res.add__(allocator, Length{ .calc = s.right }); + } + + const b = Length{ .calc = s.right }; + if (b.tryAdd(allocator, other)) |res| { + return (Length{ .calc = s.left }).add__(allocator, res); + } + + return null; + }, + else => return null, + } + } + + if (other.* == .calc) { + switch (other.calc.*) { + .value => |v| return v.tryAdd(allocator, this), + .sum => |s| { + const a = Length{ .calc = s.left }; + if (this.tryAdd(allocator, &a)) |res| { + return res.add__(allocator, Length{ .calc = s.right }); + } + + const b = Length{ .calc = s.right }; + if (this.tryAdd(allocator, &b)) |res| { + return (Length{ .calc = s.left }).add__(allocator, res); + } + + return null; + }, + else => return null, + } + } + + return null; + } + + fn unwrapCalc(allocator: Allocator, length: Length) Length { + return switch (length) { + .calc => |c| switch (c.*) { + .function => |f| switch (f.*) { + .calc => |c2| .{ .calc = bun.create(allocator, Calc(Length), c2) }, + else => |c2| .{ .calc = bun.create( + allocator, + Calc(Length), + Calc(Length){ .function = bun.create(allocator, css.css_values.calc.MathFunction(Length), c2) }, + ) }, + }, + else => .{ .calc = c }, + }, + else => length, + }; + } + + pub fn trySign(this: *const Length) ?f32 { + return switch (this.*) { + .value => |v| v.sign(), + .calc => |v| v.trySign(), + }; + } + + pub fn isSignNegative(this: *const @This()) bool { + const s = this.trySign() orelse return false; + return css.signfns.isSignNegative(s); + } + + pub fn isSignPositive(this: *const @This()) bool { + const s = this.trySign() orelse return false; + return css.signfns.isSignPositive(s); + } + + pub fn partialCmp(this: *const Length, other: *const Length) ?std.math.Order { + if (this.* == .value and other.* == .value) return css.generic.partialCmp(LengthValue, &this.value, &other.value); + return null; + } + + pub fn tryFromAngle(_: css.css_values.angle.Angle) ?@This() { + return null; + } + + pub fn tryMap(this: *const Length, comptime map_fn: *const fn (f32) f32) ?Length { + return switch (this.*) { + .value => |v| .{ .value = v.map(map_fn) }, + else => null, + }; + } + + pub fn tryOp( + this: *const Length, + other: *const Length, + ctx: anytype, + comptime op_fn: *const fn (@TypeOf(ctx), a: f32, b: f32) f32, + ) ?Length { + if (this.* == .value and other.* == .value) { + if (this.value.tryOp(&other.value, ctx, op_fn)) |val| return .{ .value = val }; + return null; + } + return null; + } + + pub fn tryOpTo( + this: *const Length, + other: *const Length, + comptime R: type, + ctx: anytype, + comptime op_fn: *const fn (@TypeOf(ctx), a: f32, b: f32) R, + ) ?R { + if (this.* == .value and other.* == .value) { + return this.value.tryOpTo(&other.value, R, ctx, op_fn); + } + return null; + } + + pub fn isZero(this: *const Length) bool { + return switch (this.*) { + .value => |v| v.isZero(), + else => false, + }; + } + + pub fn isCompatible(this: *const @This(), browsers: css.targets.Browsers) bool { + return switch (this.*) { + .value => |*v| v.isCompatible(browsers), + .calc => |c| c.isCompatible(browsers), + }; + } +}; diff --git a/src/css/values/number.zig b/src/css/values/number.zig new file mode 100644 index 0000000000..ddb701a7c0 --- /dev/null +++ b/src/css/values/number.zig @@ -0,0 +1,69 @@ +const std = @import("std"); +const bun = @import("root").bun; +pub const css = @import("../css_parser.zig"); +const Result = css.Result; +const ArrayList = std.ArrayListUnmanaged; +const Printer = css.Printer; +const PrintErr = css.PrintErr; +const Calc = css.css_values.calc.Calc; + +pub const CSSNumber = f32; +pub const CSSNumberFns = struct { + pub fn parse(input: *css.Parser) Result(CSSNumber) { + if (input.tryParse(Calc(f32).parse, .{}).asValue()) |calc_value| { + switch (calc_value) { + .value => |v| return .{ .result = v.* }, + .number => |n| return .{ .result = n }, + // Numbers are always compatible, so they will always compute to a value. + else => return .{ .err = input.newCustomError(css.ParserError.invalid_value) }, + } + } + + return input.expectNumber(); + } + + pub fn toCss(this: *const CSSNumber, comptime W: type, dest: *Printer(W)) PrintErr!void { + const number: f32 = this.*; + if (number != 0.0 and @abs(number) < 1.0) { + // PERF(alloc): Use stack fallback here? + // why the extra allocation anyway? isn't max amount of digits to stringify an f32 small? + var s = ArrayList(u8){}; + defer s.deinit(dest.allocator); + const writer = s.writer(dest.allocator); + css.to_css.float32(number, writer) catch { + return dest.addFmtError(); + }; + if (number < 0.0) { + try dest.writeChar('-'); + try dest.writeStr(bun.strings.trimLeadingPattern2(s.items, '-', '0')); + } else { + try dest.writeStr(bun.strings.trimLeadingChar(s.items, '0')); + } + } else { + return css.to_css.float32(number, dest) catch { + return dest.addFmtError(); + }; + } + } + + pub fn tryFromAngle(_: css.css_values.angle.Angle) ?CSSNumber { + return null; + } + + pub fn sign(this: *const CSSNumber) f32 { + if (this.* == 0.0) return if (css.signfns.isSignPositive(this.*)) 0.0 else 0.0; + return css.signfns.signum(this.*); + } +}; + +/// A CSS [``](https://www.w3.org/TR/css-values-4/#integers) value. +pub const CSSInteger = i32; +pub const CSSIntegerFns = struct { + pub fn parse(input: *css.Parser) Result(CSSInteger) { + // TODO: calc?? + return input.expectInteger(); + } + pub inline fn toCss(this: *const CSSInteger, comptime W: type, dest: *Printer(W)) PrintErr!void { + try css.to_css.integer(i32, this.*, W, dest); + } +}; diff --git a/src/css/values/percentage.zig b/src/css/values/percentage.zig new file mode 100644 index 0000000000..bdafae93e6 --- /dev/null +++ b/src/css/values/percentage.zig @@ -0,0 +1,501 @@ +const std = @import("std"); +const bun = @import("root").bun; +pub const css = @import("../css_parser.zig"); +const Result = css.Result; +const ArrayList = std.ArrayListUnmanaged; +const Printer = css.Printer; +const PrintErr = css.PrintErr; +const CSSNumber = css.css_values.number.CSSNumber; +const CSSNumberFns = css.css_values.number.CSSNumberFns; +const Calc = css.css_values.calc.Calc; + +pub const Percentage = struct { + v: CSSNumber, + + pub fn parse(input: *css.Parser) Result(Percentage) { + if (input.tryParse(Calc(Percentage).parse, .{}).asValue()) |calc_value| { + if (calc_value == .value) return .{ .result = calc_value.value.* }; + // Percentages are always compatible, so they will always compute to a value. + bun.unreachablePanic("Percentages are always compatible, so they will always compute to a value.", .{}); + } + + const percent = switch (input.expectPercentage()) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + + return .{ .result = Percentage{ .v = percent } }; + } + + pub fn toCss(this: *const @This(), comptime W: type, dest: *Printer(W)) PrintErr!void { + const x = this.v * 100.0; + const int_value: ?i32 = if ((x - @trunc(x)) == 0.0) + @intFromFloat(this.v) + else + null; + + const percent = css.Token{ .percentage = .{ + .has_sign = this.v < 0.0, + .unit_value = this.v, + .int_value = int_value, + } }; + + if (this.v != 0.0 and @abs(this.v) < 0.01) { + // TODO: is this the max length? + var buf: [32]u8 = undefined; + var fba = std.heap.FixedBufferAllocator.init(&buf); + var string = std.ArrayList(u8).init(fba.allocator()); + const writer = string.writer(); + percent.toCssGeneric(writer) catch return dest.addFmtError(); + if (this.v < 0.0) { + try dest.writeChar('-'); + try dest.writeStr(bun.strings.trimLeadingPattern2(string.items, '-', '0')); + } else { + try dest.writeStr(bun.strings.trimLeadingChar(string.items, '0')); + } + } else { + try percent.toCss(W, dest); + } + } + + pub inline fn eql(this: *const Percentage, other: *const Percentage) bool { + return this.v == other.v; + } + + pub fn add(lhs: Percentage, _: std.mem.Allocator, rhs: Percentage) Percentage { + return Percentage{ .v = lhs.v + rhs.v }; + } + + pub fn mulF32(this: Percentage, _: std.mem.Allocator, other: f32) Percentage { + return Percentage{ .v = this.v * other }; + } + + pub fn isZero(this: *const Percentage) bool { + return this.v == 0.0; + } + + pub fn sign(this: *const Percentage) f32 { + return css.signfns.signF32(this.v); + } + + pub fn trySign(this: *const Percentage) ?f32 { + return this.sign(); + } + + pub fn partialCmp(this: *const Percentage, other: *const Percentage) ?std.math.Order { + return css.generic.partialCmp(f32, &this.v, &other.v); + } + + pub fn tryFromAngle(_: css.css_values.angle.Angle) ?Percentage { + return null; + } + + pub fn tryMap(_: *const Percentage, comptime _: *const fn (f32) f32) ?Percentage { + // Percentages cannot be mapped because we don't know what they will resolve to. + // For example, they might be positive or negative depending on what they are a + // percentage of, which we don't know. + return null; + } + + pub fn op( + this: *const Percentage, + other: *const Percentage, + ctx: anytype, + comptime op_fn: *const fn (@TypeOf(ctx), a: f32, b: f32) f32, + ) Percentage { + return Percentage{ .v = op_fn(ctx, this.v, other.v) }; + } + + pub fn opTo( + this: *const Percentage, + other: *const Percentage, + comptime R: type, + ctx: anytype, + comptime op_fn: *const fn (@TypeOf(ctx), a: f32, b: f32) R, + ) R { + return op_fn(ctx, this.v, other.v); + } + + pub fn tryOp( + this: *const Percentage, + other: *const Percentage, + ctx: anytype, + comptime op_fn: *const fn (@TypeOf(ctx), a: f32, b: f32) f32, + ) ?Percentage { + return Percentage{ .v = op_fn(ctx, this.v, other.v) }; + } +}; + +fn needsDeepclone(comptime D: type) bool { + return switch (D) { + css.css_values.angle.Angle => false, + css.css_values.length.LengthValue => false, + else => @compileError("Can't tell if " ++ @typeName(D) ++ " needs deepclone, please add it to this switch statement."), + }; +} + +pub fn DimensionPercentage(comptime D: type) type { + const needs_deepclone = needsDeepclone(D); + return union(enum) { + dimension: D, + percentage: Percentage, + calc: *Calc(DimensionPercentage(D)), + + const This = @This(); + + pub fn parse(input: *css.Parser) Result(@This()) { + if (input.tryParse(Calc(This).parse, .{}).asValue()) |calc_value| { + if (calc_value == .value) return .{ .result = calc_value.value.* }; + return .{ .result = .{ + .calc = bun.create(input.allocator(), Calc(DimensionPercentage(D)), calc_value), + } }; + } + + if (input.tryParse(D.parse, .{}).asValue()) |length| { + return .{ .result = .{ .dimension = length } }; + } + + if (input.tryParse(Percentage.parse, .{}).asValue()) |percentage| { + return .{ .result = .{ .percentage = percentage } }; + } + + return .{ .err = input.newErrorForNextToken() }; + } + + pub fn toCss(this: *const @This(), comptime W: type, dest: *css.Printer(W)) css.PrintErr!void { + return switch (this.*) { + .dimension => |*length| length.toCss(W, dest), + .percentage => |*per| per.toCss(W, dest), + .calc => |calc| calc.toCss(W, dest), + }; + } + + pub fn isCompatible(this: *const @This(), browsers: css.targets.Browsers) bool { + return switch (this.*) { + .dimension => |*d| d.isCompatible(browsers), + .calc => |c| c.isCompatible(browsers), + .percentage => true, + }; + } + + pub fn eql(this: *const This, other: *const This) bool { + return css.implementEql(@This(), this, other); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) This { + return switch (this.*) { + .dimension => |d| if (comptime needs_deepclone) .{ .dimension = d.deepClone(allocator) } else this.*, + .percentage => return this.*, + .calc => |calc| .{ .calc = bun.create(allocator, Calc(DimensionPercentage(D)), calc.deepClone(allocator)) }, + }; + } + + pub fn deinit(this: *const @This(), allocator: std.mem.Allocator) void { + return switch (this.*) { + .dimension => |d| if (comptime @hasDecl(D, "deinit")) d.deinit(allocator), + .percentage => {}, + .calc => |calc| calc.deinit(allocator), + }; + } + + pub fn zero() This { + return This{ .dimension = switch (D) { + f32 => 0.0, + else => D.zero(), + } }; + } + + pub fn isZero(this: *const This) bool { + return switch (this.*) { + .dimension => |*d| switch (D) { + f32 => d == 0.0, + else => d.isZero(), + }, + .percentage => |*p| p.isZero(), + else => false, + }; + } + + fn mulValueF32(lhs: D, allocator: std.mem.Allocator, rhs: f32) D { + return switch (D) { + f32 => lhs * rhs, + else => lhs.mulF32(allocator, rhs), + }; + } + + pub fn mulF32(this: This, allocator: std.mem.Allocator, other: f32) This { + return switch (this) { + .dimension => |d| .{ .dimension = mulValueF32(d, allocator, other) }, + .percentage => |p| .{ .percentage = p.mulF32(allocator, other) }, + .calc => |c| .{ .calc = bun.create(allocator, Calc(DimensionPercentage(D)), c.mulF32(allocator, other)) }, + }; + } + + pub fn add(this: This, allocator: std.mem.Allocator, other: This) This { + // Unwrap calc(...) functions so we can add inside. + // Then wrap the result in a calc(...) again if necessary. + const a = unwrapCalc(this, allocator); + const b = unwrapCalc(other, allocator); + const res = a.addInternal(allocator, b); + return switch (res) { + .calc => |c| switch (c.*) { + .value => |l| l.*, + .function => |f| if (f.* != .calc) .{ + .calc = bun.create(allocator, Calc(DimensionPercentage(D)), .{ + .function = f, + }), + } else .{ + .calc = bun.create(allocator, Calc(DimensionPercentage(D)), .{ + .function = bun.create( + allocator, + css.css_values.calc.MathFunction(DimensionPercentage(D)), + .{ .calc = c.* }, + ), + }), + }, + else => .{ + .calc = bun.create(allocator, Calc(DimensionPercentage(D)), .{ + .function = bun.create( + allocator, + css.css_values.calc.MathFunction(DimensionPercentage(D)), + .{ .calc = c.* }, + ), + }), + }, + }, + else => res, + }; + } + + fn addInternal(this: This, allocator: std.mem.Allocator, other: This) This { + if (this.addRecursive(allocator, &other)) |res| return res; + return this.addImpl(allocator, other); + } + + fn addRecursive(this: *const This, allocator: std.mem.Allocator, other: *const This) ?This { + if (this.* == .dimension and other.* == .dimension) { + if (this.dimension.tryAdd(allocator, &other.dimension)) |res| { + return .{ .dimension = res }; + } + } else if (this.* == .percentage and other.* == .percentage) { + return .{ .percentage = .{ .v = this.percentage.v + other.percentage.v } }; + } else if (this.* == .calc) { + switch (this.calc.*) { + .value => |v| return v.addRecursive(allocator, other), + .sum => |sum| { + const left_calc = This{ .calc = sum.left }; + if (left_calc.addRecursive(allocator, other)) |res| { + return res.add(allocator, This{ .calc = sum.right }); + } + + const right_calc = This{ .calc = sum.right }; + if (right_calc.addRecursive(allocator, other)) |res| { + return (This{ .calc = sum.left }).add(allocator, res); + } + }, + else => {}, + } + } else if (other.* == .calc) { + switch (other.calc.*) { + .value => |v| return this.addRecursive(allocator, v), + .sum => |sum| { + const left_calc = This{ .calc = sum.left }; + if (this.addRecursive(allocator, &left_calc)) |res| { + return res.add(allocator, This{ .calc = sum.right }); + } + + const right_calc = This{ .calc = sum.right }; + if (this.addRecursive(allocator, &right_calc)) |res| { + return (This{ .calc = sum.left }).add(allocator, res); + } + }, + else => {}, + } + } + + return null; + } + + fn addImpl(this: This, allocator: std.mem.Allocator, other: This) This { + var a = this; + var b = other; + + if (a.isZero()) return b; + if (b.isZero()) return a; + + if (a.isSignNegative() and b.isSignPositive()) { + std.mem.swap(This, &a, &b); + } + + if (a == .calc and b == .calc) { + return .{ .calc = bun.create(allocator, Calc(DimensionPercentage(D)), a.calc.add(allocator, b.calc.*)) }; + } else if (a == .calc) { + if (a.calc.* == .value) { + return a.calc.value.add(allocator, b); + } else { + return .{ + .calc = bun.create( + allocator, + Calc(DimensionPercentage(D)), + .{ .sum = .{ + .left = bun.create(allocator, Calc(DimensionPercentage(D)), a.calc.*), + .right = bun.create(allocator, Calc(DimensionPercentage(D)), b.intoCalc(allocator)), + } }, + ), + }; + } + } else if (b == .calc) { + if (b.calc.* == .value) { + return a.add(allocator, b.calc.value.*); + } else { + return .{ + .calc = bun.create( + allocator, + Calc(DimensionPercentage(D)), + .{ .sum = .{ + .left = bun.create(allocator, Calc(DimensionPercentage(D)), a.intoCalc(allocator)), + .right = bun.create(allocator, Calc(DimensionPercentage(D)), b.calc.*), + } }, + ), + }; + } + } else { + return .{ + .calc = bun.create( + allocator, + Calc(DimensionPercentage(D)), + .{ .sum = .{ + .left = bun.create(allocator, Calc(DimensionPercentage(D)), a.intoCalc(allocator)), + .right = bun.create(allocator, Calc(DimensionPercentage(D)), b.intoCalc(allocator)), + } }, + ), + }; + } + } + + inline fn isSignPositive(this: This) bool { + const sign = this.trySign() orelse return false; + return css.signfns.isSignPositive(sign); + } + + inline fn isSignNegative(this: This) bool { + const sign = this.trySign() orelse return false; + return css.signfns.isSignNegative(sign); + } + + fn unwrapCalc(this: This, allocator: std.mem.Allocator) This { + return switch (this) { + .calc => |calc| switch (calc.*) { + .function => |f| switch (f.*) { + .calc => |c2| .{ .calc = bun.create(allocator, Calc(DimensionPercentage(D)), c2) }, + else => .{ .calc = bun.create( + allocator, + Calc(DimensionPercentage(D)), + .{ + .function = bun.create( + allocator, + css.css_values.calc.MathFunction(DimensionPercentage(D)), + f.*, + ), + }, + ) }, + }, + else => .{ .calc = calc }, + }, + else => this, + }; + } + + pub fn partialCmp(this: *const This, other: *const This) ?std.math.Order { + if (this.* == .dimension and other.* == .dimension) { + return this.dimension.partialCmp(&other.dimension); + } else if (this.* == .percentage and other.* == .percentage) { + return this.percentage.partialCmp(&other.percentage); + } else { + return null; + } + } + + pub fn trySign(this: *const This) ?f32 { + return switch (this.*) { + .dimension => |*d| css.generic.trySign(@TypeOf(d.*), d), + .percentage => |p| p.trySign(), + .calc => |c| c.trySign(), + }; + } + + pub fn tryFromAngle(angle: css.css_values.angle.Angle) ?This { + return DimensionPercentage(D){ + .dimension = D.tryFromAngle(angle) orelse return null, + }; + } + + pub fn tryMap(this: *const This, comptime mapfn: *const fn (f32) f32) ?This { + return switch (this.*) { + .dimension => |vv| if (css.generic.tryMap(D, &vv, mapfn)) |v| .{ .dimension = v } else null, + else => null, + }; + } + + pub fn tryOp( + this: *const This, + other: *const This, + ctx: anytype, + comptime op_fn: *const fn (@TypeOf(ctx), a: f32, b: f32) f32, + ) ?This { + if (this.* == .dimension and other.* == .dimension) return .{ .dimension = css.generic.tryOp(D, &this.dimension, &other.dimension, ctx, op_fn) orelse return null }; + if (this.* == .percentage and other.* == .percentage) return .{ .percentage = Percentage{ .v = op_fn(ctx, this.percentage.v, other.percentage.v) } }; + return null; + } + + pub fn intoCalc(this: This, allocator: std.mem.Allocator) Calc(DimensionPercentage(D)) { + return switch (this) { + .calc => |calc| calc.*, + else => .{ .value = bun.create(allocator, This, this) }, + }; + } + }; +} + +/// Either a `` or ``. +pub const NumberOrPercentage = union(enum) { + /// A number. + number: CSSNumber, + /// A percentage. + percentage: Percentage, + + // TODO: implement this + pub usingnamespace css.DeriveParse(@This()); + pub usingnamespace css.DeriveToCss(@This()); + + // pub fn parse(input: *css.Parser) Result(NumberOrPercentage) { + // _ = input; // autofix + // @panic(css.todo_stuff.depth); + // } + + // pub fn toCss(this: *const NumberOrPercentage, comptime W: type, dest: *css.Printer(W)) css.PrintErr!void { + // _ = this; // autofix + // _ = dest; // autofix + // @panic(css.todo_stuff.depth); + // } + + pub fn eql(this: *const NumberOrPercentage, other: *const NumberOrPercentage) bool { + return switch (this.*) { + .number => |*a| switch (other.*) { + .number => a.* == other.number, + .percentage => false, + }, + .percentage => |*a| switch (other.*) { + .number => false, + .percentage => a.eql(&other.percentage), + }, + }; + } + + pub fn intoF32(this: *const @This()) f32 { + return switch (this.*) { + .number => this.number, + .percentage => this.percentage.v, + }; + } +}; diff --git a/src/css/values/position.zig b/src/css/values/position.zig new file mode 100644 index 0000000000..83c00b9692 --- /dev/null +++ b/src/css/values/position.zig @@ -0,0 +1,443 @@ +const std = @import("std"); +const bun = @import("root").bun; +pub const css = @import("../css_parser.zig"); +const Result = css.Result; +const ArrayList = std.ArrayListUnmanaged; +const Printer = css.Printer; +const PrintErr = css.PrintErr; +const CSSNumber = css.css_values.number.CSSNumber; +const CSSNumberFns = css.css_values.number.CSSNumberFns; +const Calc = css.css_values.calc.Calc; +const DimensionPercentage = css.css_values.percentage.DimensionPercentage; +const LengthPercentage = css.css_values.length.LengthPercentage; +const Percentage = css.css_values.percentage.Percentage; + +/// A CSS `` value, +/// as used in the `background-position` property, gradients, masks, etc. +pub const Position = struct { + /// The x-position. + x: HorizontalPosition, + /// The y-position. + y: VerticalPosition, + + pub fn parse(input: *css.Parser) Result(Position) { + // Try parsing a horizontal position first + if (input.tryParse(HorizontalPosition.parse, .{}).asValue()) |horizontal_pos| { + switch (horizontal_pos) { + .center => { + // Try parsing a vertical position next + if (input.tryParse(VerticalPosition.parse, .{}).asValue()) |y| { + return .{ .result = Position{ + .x = .center, + .y = y, + } }; + } + + // If it didn't work, assume the first actually represents a y position, + // and the next is an x position. e.g. `center left` rather than `left center`. + const x = input.tryParse(HorizontalPosition.parse, .{}).unwrapOr(HorizontalPosition.center); + const y = VerticalPosition.center; + return .{ .result = Position{ .x = x, .y = y } }; + }, + .length => |*x| { + // If we got a length as the first component, then the second must + // be a keyword or length (not a side offset). + if (input.tryParse(VerticalPositionKeyword.parse, .{}).asValue()) |y_keyword| { + const y = VerticalPosition{ .side = .{ + .side = y_keyword, + .offset = null, + } }; + return .{ .result = Position{ .x = .{ .length = x.* }, .y = y } }; + } + if (input.tryParse(LengthPercentage.parse, .{}).asValue()) |y_lp| { + const y = VerticalPosition{ .length = y_lp }; + return .{ .result = Position{ .x = .{ .length = x.* }, .y = y } }; + } + const y = VerticalPosition.center; + _ = input.tryParse(css.Parser.expectIdentMatching, .{"center"}); + return .{ .result = Position{ .x = .{ .length = x.* }, .y = y } }; + }, + .side => |*side| { + const x_keyword = side.side; + const lp = side.offset; + + // If we got a horizontal side keyword (and optional offset), expect another for the vertical side. + // e.g. `left center` or `left 20px center` + if (input.tryParse(css.Parser.expectIdentMatching, .{"center"}).isOk()) { + const x = HorizontalPosition{ .side = .{ + .side = x_keyword, + .offset = lp, + } }; + const y = VerticalPosition.center; + return .{ .result = Position{ .x = x, .y = y } }; + } + + // e.g. `left top`, `left top 20px`, `left 20px top`, or `left 20px top 20px` + if (input.tryParse(VerticalPositionKeyword.parse, .{}).asValue()) |y_keyword| { + const y_lp = switch (input.tryParse(LengthPercentage.parse, .{})) { + .result => |vv| vv, + .err => null, + }; + const x = HorizontalPosition{ .side = .{ + .side = x_keyword, + .offset = lp, + } }; + const y = VerticalPosition{ .side = .{ + .side = y_keyword, + .offset = y_lp, + } }; + return .{ .result = Position{ .x = x, .y = y } }; + } + + // If we didn't get a vertical side keyword (e.g. `left 20px`), then apply the offset to the vertical side. + const x = HorizontalPosition{ .side = .{ + .side = x_keyword, + .offset = null, + } }; + const y = if (lp) |lp_val| + VerticalPosition{ .length = lp_val } + else + VerticalPosition.center; + return .{ .result = Position{ .x = x, .y = y } }; + }, + } + } + + // If the horizontal position didn't parse, then it must be out of order. Try vertical position keyword. + const y_keyword = switch (VerticalPositionKeyword.parse(input)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + const lp_and_x_pos = input.tryParse(struct { + fn parse(i: *css.Parser) Result(struct { ?LengthPercentage, HorizontalPosition }) { + const y_lp = i.tryParse(LengthPercentage.parse, .{}).asValue(); + if (i.tryParse(HorizontalPositionKeyword.parse, .{}).asValue()) |x_keyword| { + const x_lp = i.tryParse(LengthPercentage.parse, .{}).asValue(); + const x_pos = HorizontalPosition{ .side = .{ + .side = x_keyword, + .offset = x_lp, + } }; + return .{ .result = .{ y_lp, x_pos } }; + } + if (i.expectIdentMatching("center").asErr()) |e| return .{ .err = e }; + const x_pos = HorizontalPosition.center; + return .{ .result = .{ y_lp, x_pos } }; + } + }.parse, .{}); + + if (lp_and_x_pos.asValue()) |tuple| { + const y_lp = tuple[0]; + const x = tuple[1]; + const y = VerticalPosition{ .side = .{ + .side = y_keyword, + .offset = y_lp, + } }; + return .{ .result = Position{ .x = x, .y = y } }; + } + + const x = HorizontalPosition.center; + const y = VerticalPosition{ .side = .{ + .side = y_keyword, + .offset = null, + } }; + return .{ .result = Position{ .x = x, .y = y } }; + } + + pub fn toCss(this: *const Position, comptime W: type, dest: *css.Printer(W)) css.PrintErr!void { + if (this.x == .side and this.y == .length and this.x.side.side != .left) { + try this.x.toCss(W, dest); + try dest.writeStr(" top "); + try this.y.length.toCss(W, dest); + } else if (this.x == .side and this.x.side.side != .left and this.y.isCenter()) { + // If there is a side keyword with an offset, "center" must be a keyword not a percentage. + try this.x.toCss(W, dest); + try dest.writeStr(" center"); + } else if (this.x == .length and this.y == .side and this.y.side.side != .top) { + try dest.writeStr("left "); + try this.x.length.toCss(W, dest); + try dest.writeStr(" "); + try this.y.toCss(W, dest); + } else if (this.x.isCenter() and this.y.isCenter()) { + // `center center` => 50% + try this.x.toCss(W, dest); + } else if (this.x == .length and this.y.isCenter()) { + // `center` is assumed if omitted. + try this.x.length.toCss(W, dest); + } else if (this.x == .side and this.x.side.offset == null and this.y.isCenter()) { + const p: LengthPercentage = this.x.side.side.intoLengthPercentage(); + try p.toCss(W, dest); + } else if (this.y == .side and this.y.side.offset == null and this.x.isCenter()) { + try this.y.toCss(W, dest); + } else if (this.x == .side and this.x.side.offset == null and this.y == .side and this.y.side.offset == null) { + const x: LengthPercentage = this.x.side.side.intoLengthPercentage(); + const y: LengthPercentage = this.y.side.side.intoLengthPercentage(); + try x.toCss(W, dest); + try dest.writeStr(" "); + try y.toCss(W, dest); + } else { + const zero = LengthPercentage.zero(); + const fifty = LengthPercentage{ .percentage = .{ .v = 0.5 } }; + const x_len: ?*const LengthPercentage = x_len: { + switch (this.x) { + .side => |side| { + if (side.side == .left) { + if (side.offset) |*offset| { + if (offset.isZero()) { + break :x_len &zero; + } else { + break :x_len offset; + } + } else { + break :x_len &zero; + } + } + }, + .length => |len| { + if (len.isZero()) { + break :x_len &zero; + } + }, + .center => break :x_len &fifty, + } + break :x_len null; + }; + + const y_len: ?*const LengthPercentage = y_len: { + switch (this.y) { + .side => |side| { + if (side.side == .top) { + if (side.offset) |*offset| { + if (offset.isZero()) { + break :y_len &zero; + } else { + break :y_len offset; + } + } else { + break :y_len &zero; + } + } + }, + .length => |len| { + if (len.isZero()) { + break :y_len &zero; + } + }, + .center => break :y_len &fifty, + } + break :y_len null; + }; + + if (x_len != null and y_len != null) { + try x_len.?.toCss(W, dest); + try dest.writeStr(" "); + try y_len.?.toCss(W, dest); + } else { + try this.x.toCss(W, dest); + try dest.writeStr(" "); + try this.y.toCss(W, dest); + } + } + } + + pub fn default() @This() { + return .{ + .x = HorizontalPosition{ .length = LengthPercentage{ .percentage = .{ .v = 0.0 } } }, + .y = VerticalPosition{ .length = LengthPercentage{ .percentage = .{ .v = 0.0 } } }, + }; + } + + /// Returns whether both the x and y positions are centered. + pub fn isCenter(this: *const @This()) bool { + return this.x.isCenter() and this.y.isCenter(); + } + + pub fn center() Position { + return .{ .x = .center, .y = .center }; + } + + pub fn eql(this: *const Position, other: *const Position) bool { + return this.x.eql(&other.x) and this.y.eql(&other.y); + } + + pub fn isZero(this: *const Position) bool { + return this.x.isZero() and this.y.isZero(); + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } +}; + +pub fn PositionComponent(comptime S: type) type { + return union(enum) { + /// The `center` keyword. + center, + /// A length or percentage from the top-left corner of the box. + length: LengthPercentage, + /// A side keyword with an optional offset. + side: struct { + /// A side keyword. + side: S, + /// Offset from the side. + offset: ?LengthPercentage, + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + }, + + const This = @This(); + + pub fn isZero(this: *const This) bool { + if (this.* == .length and this.length.isZero()) return true; + return false; + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn eql(this: *const This, other: *const This) bool { + return switch (this.*) { + .center => switch (other.*) { + .center => true, + else => false, + }, + .length => |*a| switch (other.*) { + .length => a.eql(&other.length), + else => false, + }, + .side => |*a| switch (other.*) { + .side => a.side.eql(&other.side.side) and css.generic.eql(?LengthPercentage, &a.offset, &other.side.offset), + else => false, + }, + }; + } + + pub fn parse(input: *css.Parser) Result(This) { + if (input.tryParse( + struct { + fn parse(i: *css.Parser) Result(void) { + return i.expectIdentMatching("center"); + } + }.parse, + .{}, + ).isOk()) { + return .{ .result = .center }; + } + + if (input.tryParse(LengthPercentage.parse, .{}).asValue()) |lp| { + return .{ .result = .{ .length = lp } }; + } + + const side = switch (S.parse(input)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + const offset = input.tryParse(LengthPercentage.parse, .{}).asValue(); + return .{ .result = .{ .side = .{ .side = side, .offset = offset } } }; + } + + pub fn toCss(this: *const @This(), comptime W: type, dest: *css.Printer(W)) css.PrintErr!void { + switch (this.*) { + .center => { + if (dest.minify) { + try dest.writeStr("50%"); + } else { + try dest.writeStr("center"); + } + }, + .length => |*lp| try lp.toCss(W, dest), + .side => |*s| { + try s.side.toCss(W, dest); + if (s.offset) |lp| { + try dest.writeStr(" "); + try lp.toCss(W, dest); + } + }, + } + } + + pub fn isCenter(this: *const This) bool { + switch (this.*) { + .center => return true, + .length => |*l| { + if (l.* == .percentage) return l.percentage.v == 0.5; + }, + else => {}, + } + return false; + } + }; +} + +pub const HorizontalPositionKeyword = enum { + /// The `left` keyword. + left, + /// The `right` keyword. + right, + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) HorizontalPositionKeyword { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn eql(this: *const HorizontalPositionKeyword, other: *const HorizontalPositionKeyword) bool { + return this.* == other.*; + } + + pub fn asStr(this: *const @This()) []const u8 { + return css.enum_property_util.asStr(@This(), this); + } + + pub fn parse(input: *css.Parser) Result(@This()) { + return css.enum_property_util.parse(@This(), input); + } + + pub fn toCss(this: *const @This(), comptime W: type, dest: *Printer(W)) PrintErr!void { + return css.enum_property_util.toCss(@This(), this, W, dest); + } + + pub fn intoLengthPercentage(this: *const @This()) LengthPercentage { + return switch (this.*) { + .left => LengthPercentage.zero(), + .right => .{ .percentage = .{ .v = 1.0 } }, + }; + } +}; + +pub const VerticalPositionKeyword = enum { + /// The `top` keyword. + top, + /// The `bottom` keyword. + bottom, + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn eql(this: *const VerticalPositionKeyword, other: *const VerticalPositionKeyword) bool { + return this.* == other.*; + } + + pub fn asStr(this: *const @This()) []const u8 { + return css.enum_property_util.asStr(@This(), this); + } + + pub fn parse(input: *css.Parser) Result(@This()) { + return css.enum_property_util.parse(@This(), input); + } + + pub fn toCss(this: *const @This(), comptime W: type, dest: *Printer(W)) PrintErr!void { + return css.enum_property_util.toCss(@This(), this, W, dest); + } + + pub fn intoLengthPercentage(this: *const @This()) LengthPercentage { + return switch (this.*) { + .top => LengthPercentage.zero(), + .bottom => LengthPercentage{ .percentage = Percentage{ .v = 1.0 } }, + }; + } +}; + +pub const HorizontalPosition = PositionComponent(HorizontalPositionKeyword); +pub const VerticalPosition = PositionComponent(VerticalPositionKeyword); diff --git a/src/css/values/ratio.zig b/src/css/values/ratio.zig new file mode 100644 index 0000000000..8784f898fd --- /dev/null +++ b/src/css/values/ratio.zig @@ -0,0 +1,75 @@ +const std = @import("std"); +const bun = @import("root").bun; +pub const css = @import("../css_parser.zig"); +const Result = css.Result; +const ArrayList = std.ArrayListUnmanaged; +const Printer = css.Printer; +const PrintErr = css.PrintErr; +const CSSNumber = css.css_values.number.CSSNumber; +const CSSNumberFns = css.css_values.number.CSSNumberFns; +const Calc = css.css_values.calc.Calc; +const DimensionPercentage = css.css_values.percentage.DimensionPercentage; +const LengthPercentage = css.css_values.length.LengthPercentage; +const Length = css.css_values.length.Length; +const Percentage = css.css_values.percentage.Percentage; +const CssColor = css.css_values.color.CssColor; +const Image = css.css_values.image.Image; +const Url = css.css_values.url.Url; +const CSSInteger = css.css_values.number.CSSInteger; +const CSSIntegerFns = css.css_values.number.CSSIntegerFns; +const Angle = css.css_values.angle.Angle; +const Time = css.css_values.time.Time; +const Resolution = css.css_values.resolution.Resolution; +const CustomIdent = css.css_values.ident.CustomIdent; +const CustomIdentFns = css.css_values.ident.CustomIdentFns; +const Ident = css.css_values.ident.Ident; + +/// A CSS [``](https://www.w3.org/TR/css-values-4/#ratios) value, +/// representing the ratio of two numeric values. +pub const Ratio = struct { + numerator: CSSNumber, + denominator: CSSNumber, + + pub fn parse(input: *css.Parser) Result(Ratio) { + const first = switch (CSSNumberFns.parse(input)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + const second = if (input.tryParse(css.Parser.expectDelim, .{'/'}).isOk()) switch (CSSNumberFns.parse(input)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + } else 1.0; + + return .{ .result = Ratio{ .numerator = first, .denominator = second } }; + } + + /// Parses a ratio where both operands are required. + pub fn parseRequired(input: *css.Parser) Result(Ratio) { + const first = switch (CSSNumberFns.parse(input)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + if (input.expectDelim('/').asErr()) |e| return .{ .err = e }; + const second = switch (CSSNumberFns.parse(input)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + return .{ .result = Ratio{ .numerator = first, .denominator = second } }; + } + + pub fn toCss(this: *const @This(), comptime W: type, dest: *Printer(W)) PrintErr!void { + try CSSNumberFns.toCss(&this.numerator, W, dest); + if (this.denominator != 1.0) { + try dest.delim('/', true); + try CSSNumberFns.toCss(&this.denominator, W, dest); + } + } + + pub fn addF32(this: Ratio, _: std.mem.Allocator, other: f32) Ratio { + return .{ .numerator = this.numerator + other, .denominator = this.denominator }; + } + + pub fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return css.implementEql(@This(), lhs, rhs); + } +}; diff --git a/src/css/values/rect.zig b/src/css/values/rect.zig new file mode 100644 index 0000000000..28b281c6d7 --- /dev/null +++ b/src/css/values/rect.zig @@ -0,0 +1,155 @@ +const std = @import("std"); +const bun = @import("root").bun; +pub const css = @import("../css_parser.zig"); +const Result = css.Result; +const ArrayList = std.ArrayListUnmanaged; +const Printer = css.Printer; +const PrintErr = css.PrintErr; +const CSSNumber = css.css_values.number.CSSNumber; +const CSSNumberFns = css.css_values.number.CSSNumberFns; +const Calc = css.css_values.calc.Calc; +const DimensionPercentage = css.css_values.percentage.DimensionPercentage; +const LengthPercentage = css.css_values.length.LengthPercentage; +const Length = css.css_values.length.Length; +const LengthOrNumber = css.css_values.length.LengthOrNumber; +const Percentage = css.css_values.percentage.Percentage; +const CssColor = css.css_values.color.CssColor; +const Image = css.css_values.image.Image; +const Url = css.css_values.url.Url; +const CSSInteger = css.css_values.number.CSSInteger; +const CSSIntegerFns = css.css_values.number.CSSIntegerFns; +const Angle = css.css_values.angle.Angle; +const Time = css.css_values.time.Time; +const Resolution = css.css_values.resolution.Resolution; +const CustomIdent = css.css_values.ident.CustomIdent; +const CustomIdentFns = css.css_values.ident.CustomIdentFns; +const Ident = css.css_values.ident.Ident; + +fn needsDeinit(comptime T: type) bool { + return switch (T) { + f32, i32, u32, []const u8 => false, + LengthPercentage => true, + LengthOrNumber => true, + css.css_values.percentage.NumberOrPercentage => false, + css.css_properties.border_image.BorderImageSideWidth => true, + *const css.css_values.percentage.DimensionPercentage(css.css_values.length.LengthValue) => true, + CssColor => true, + css.css_properties.border.LineStyle => false, + css.css_properties.border.BorderSideWidth => true, + css.css_values.length.LengthPercentageOrAuto => true, + else => @compileError("Don't know if " ++ @typeName(T) ++ " needs deinit. Please add it to this switch statement."), + }; +} + +/// A generic value that represents a value for four sides of a box, +/// e.g. border-width, margin, padding, etc. +/// +/// When serialized, as few components as possible are written when +/// there are duplicate values. +pub fn Rect(comptime T: type) type { + const needs_deinit = needsDeinit(T); + return struct { + /// The top component. + top: T, + /// The right component. + right: T, + /// The bottom component. + bottom: T, + /// The left component. + left: T, + + const This = @This(); + + pub fn eql(this: *const This, other: *const This) bool { + return css.generic.eql(T, &this.top, &other.top) and css.generic.eql(T, &this.right, &other.right) and css.generic.eql(T, &this.bottom, &other.bottom) and css.generic.eql(T, &this.left, &other.left); + } + + pub fn deepClone(this: *const This, allocator: std.mem.Allocator) This { + if (comptime needs_deinit or T == *const css.css_values.percentage.DimensionPercentage(css.css_values.length.LengthValue)) { + return This{ + .top = this.top.deepClone(allocator), + .right = this.right.deepClone(allocator), + .bottom = this.bottom.deepClone(allocator), + .left = this.left.deepClone(allocator), + }; + } + return This{ + .top = this.top, + .right = this.right, + .bottom = this.bottom, + .left = this.left, + }; + } + + pub fn all(val: T) This { + return This{ + .top = val, + .right = val, + .bottom = val, + .left = val, + }; + } + + pub fn deinit(this: *const This, allocator: std.mem.Allocator) void { + if (comptime needs_deinit) { + this.top.deinit(allocator); + this.right.deinit(allocator); + this.bottom.deinit(allocator); + this.left.deinit(allocator); + } + } + + pub fn parse(input: *css.Parser) Result(This) { + return This.parseWith(input, valParse); + } + + pub fn parseWith(input: *css.Parser, comptime parse_fn: anytype) Result(This) { + const first = switch (parse_fn(input)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + const second = switch (input.tryParse(parse_fn, .{})) { + .result => |v| v, + // + .err => return .{ .result = This{ .top = first, .right = first, .bottom = first, .left = first } }, + }; + const third = switch (input.tryParse(parse_fn, .{})) { + .result => |v| v, + // + .err => return .{ .result = This{ .top = first, .right = second, .bottom = first, .left = second } }, + }; + const fourth = switch (input.tryParse(parse_fn, .{})) { + .result => |v| v, + // + .err => return .{ .result = This{ .top = first, .right = second, .bottom = third, .left = second } }, + }; + // + return .{ .result = This{ .top = first, .right = second, .bottom = third, .left = fourth } }; + } + + pub fn toCss(this: *const This, comptime W: type, dest: *Printer(W)) PrintErr!void { + try css.generic.toCss(T, &this.top, W, dest); + const same_vertical = css.generic.eql(T, &this.top, &this.bottom); + const same_horizontal = css.generic.eql(T, &this.right, &this.left); + if (same_vertical and same_horizontal and css.generic.eql(T, &this.top, &this.right)) { + return; + } + try dest.writeStr(" "); + try css.generic.toCss(T, &this.right, W, dest); + if (same_vertical and same_horizontal) { + return; + } + try dest.writeStr(" "); + try css.generic.toCss(T, &this.bottom, W, dest); + if (same_horizontal) { + return; + } + try dest.writeStr(" "); + try css.generic.toCss(T, &this.left, W, dest); + } + + pub fn valParse(i: *css.Parser) Result(T) { + return css.generic.parse(T, i); + } + }; +} diff --git a/src/css/values/resolution.zig b/src/css/values/resolution.zig new file mode 100644 index 0000000000..951b809b21 --- /dev/null +++ b/src/css/values/resolution.zig @@ -0,0 +1,119 @@ +const std = @import("std"); +const bun = @import("root").bun; +pub const css = @import("../css_parser.zig"); +const Result = css.Result; +const ArrayList = std.ArrayListUnmanaged; +const Printer = css.Printer; +const PrintErr = css.PrintErr; +const CSSNumber = css.css_values.number.CSSNumber; +const CSSNumberFns = css.css_values.number.CSSNumberFns; +const Calc = css.css_values.calc.Calc; +const DimensionPercentage = css.css_values.percentage.DimensionPercentage; +const LengthPercentage = css.css_values.length.LengthPercentage; +const Length = css.css_values.length.Length; +const Percentage = css.css_values.percentage.Percentage; +const CssColor = css.css_values.color.CssColor; +const Image = css.css_values.image.Image; +const CSSInteger = css.css_values.number.CSSInteger; +const CSSIntegerFns = css.css_values.number.CSSIntegerFns; +const Angle = css.css_values.angle.Angle; +const Time = css.css_values.time.Time; +const CustomIdent = css.css_values.ident.CustomIdent; +const CustomIdentFns = css.css_values.ident.CustomIdentFns; +const Ident = css.css_values.ident.Ident; + +/// A CSS `` value. +pub const Resolution = union(enum) { + /// A resolution in dots per inch. + dpi: CSSNumber, + /// A resolution in dots per centimeter. + dpcm: CSSNumber, + /// A resolution in dots per px. + dppx: CSSNumber, + + // ~toCssImpl + const This = @This(); + + pub fn hash(this: *const @This(), hasher: *std.hash.Wyhash) void { + return css.implementHash(@This(), this, hasher); + } + + pub fn eql(this: *const Resolution, other: *const Resolution) bool { + return switch (this.*) { + .dpi => |*a| switch (other.*) { + .dpi => a.* == other.dpi, + else => false, + }, + .dpcm => |*a| switch (other.*) { + .dpcm => a.* == other.dpcm, + else => false, + }, + .dppx => |*a| switch (other.*) { + .dppx => a.* == other.dppx, + else => false, + }, + }; + } + + pub fn parse(input: *css.Parser) Result(Resolution) { + // TODO: calc? + const location = input.currentSourceLocation(); + const tok = switch (input.next()) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + if (tok.* == .dimension) { + const value = tok.dimension.num.value; + const unit = tok.dimension.unit; + // css.todo_stuff.match_ignore_ascii_case + if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(unit, "dpi")) return .{ .result = .{ .dpi = value } }; + if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(unit, "dpcm")) return .{ .result = .{ .dpcm = value } }; + if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(unit, "dppx") or bun.strings.eqlCaseInsensitiveASCIIICheckLength(unit, "x")) return .{ .result = .{ .dppx = value } }; + return .{ .err = location.newUnexpectedTokenError(.{ .ident = unit }) }; + } + return .{ .err = location.newUnexpectedTokenError(tok.*) }; + } + + pub fn tryFromToken(token: *const css.Token) css.Maybe(Resolution, void) { + switch (token.*) { + .dimension => |dim| { + const value = dim.num.value; + const unit = dim.unit; + // todo_stuff.match_ignore_ascii_case + if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(unit, "dpi")) { + return .{ .result = .{ .dpi = value } }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(unit, "dpcm")) { + return .{ .result = .{ .dpcm = value } }; + } else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(unit, "dppx") or + bun.strings.eqlCaseInsensitiveASCIIICheckLength(unit, "x")) + { + return .{ .result = .{ .dppx = value } }; + } else { + return .{ .err = {} }; + } + }, + else => return .{ .err = {} }, + } + } + + pub fn toCss(this: *const This, comptime W: type, dest: *Printer(W)) PrintErr!void { + const value, const unit = switch (this.*) { + .dpi => |dpi| .{ dpi, "dpi" }, + .dpcm => |dpcm| .{ dpcm, "dpcm" }, + .dppx => |dppx| if (dest.targets.isCompatible(.x_resolution_unit)) + .{ dppx, "x" } + else + .{ dppx, "dppx" }, + }; + + return try css.serializer.serializeDimension(value, unit, W, dest); + } + + pub fn addF32(this: This, _: std.mem.Allocator, other: f32) Resolution { + return switch (this) { + .dpi => |dpi| .{ .dpi = dpi + other }, + .dpcm => |dpcm| .{ .dpcm = dpcm + other }, + .dppx => |dppx| .{ .dppx = dppx + other }, + }; + } +}; diff --git a/src/css/values/size.zig b/src/css/values/size.zig new file mode 100644 index 0000000000..07aceaa9aa --- /dev/null +++ b/src/css/values/size.zig @@ -0,0 +1,88 @@ +const std = @import("std"); +const bun = @import("root").bun; +pub const css = @import("../css_parser.zig"); +const Result = css.Result; +const ArrayList = std.ArrayListUnmanaged; +const Printer = css.Printer; +const PrintErr = css.PrintErr; +const CSSNumber = css.css_values.number.CSSNumber; +const CSSNumberFns = css.css_values.number.CSSNumberFns; +const Calc = css.css_values.calc.Calc; +const DimensionPercentage = css.css_values.percentage.DimensionPercentage; +const LengthPercentage = css.css_values.length.LengthPercentage; +const Length = css.css_values.length.Length; +const Percentage = css.css_values.percentage.Percentage; +const CssColor = css.css_values.color.CssColor; +const Image = css.css_values.image.Image; +const Url = css.css_values.url.Url; +const CSSInteger = css.css_values.number.CSSInteger; +const CSSIntegerFns = css.css_values.number.CSSIntegerFns; +const Angle = css.css_values.angle.Angle; +const Time = css.css_values.time.Time; +const Resolution = css.css_values.resolution.Resolution; +const CustomIdent = css.css_values.ident.CustomIdent; +const CustomIdentFns = css.css_values.ident.CustomIdentFns; +const Ident = css.css_values.ident.Ident; + +/// A generic value that represents a value with two components, e.g. a border radius. +/// +/// When serialized, only a single component will be written if both are equal. +pub fn Size2D(comptime T: type) type { + return struct { + a: T, + b: T, + + fn parseVal(input: *css.Parser) Result(T) { + return switch (T) { + f32 => return CSSNumberFns.parse(input), + LengthPercentage => return LengthPercentage.parse(input), + else => T.parse(input), + }; + } + + pub fn parse(input: *css.Parser) Result(Size2D(T)) { + const first = switch (parseVal(input)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + const second = input.tryParse(parseVal, .{}).unwrapOrNoOptmizations(first); + return .{ .result = Size2D(T){ + .a = first, + .b = second, + } }; + } + + pub fn toCss(this: *const Size2D(T), comptime W: type, dest: *css.Printer(W)) css.PrintErr!void { + try valToCss(&this.a, W, dest); + if (!valEql(&this.b, &this.a)) { + try dest.writeStr(" "); + try valToCss(&this.b, W, dest); + } + } + + pub fn valToCss(val: *const T, comptime W: type, dest: *css.Printer(W)) css.PrintErr!void { + return switch (T) { + f32 => CSSNumberFns.toCss(val, W, dest), + else => val.toCss(W, dest), + }; + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub inline fn valEql(lhs: *const T, rhs: *const T) bool { + return switch (T) { + f32 => lhs.* == rhs.*, + else => lhs.eql(rhs), + }; + } + + pub inline fn eql(lhs: *const @This(), rhs: *const @This()) bool { + return switch (T) { + f32 => lhs.a == rhs.b, + else => lhs.a.eql(&rhs.b), + }; + } + }; +} diff --git a/src/css/values/syntax.zig b/src/css/values/syntax.zig new file mode 100644 index 0000000000..5f8a743367 --- /dev/null +++ b/src/css/values/syntax.zig @@ -0,0 +1,525 @@ +const std = @import("std"); +const bun = @import("root").bun; +pub const css = @import("../css_parser.zig"); +const Result = css.Result; +const ArrayList = std.ArrayListUnmanaged; +const Printer = css.Printer; +const PrintErr = css.PrintErr; +const CSSNumber = css.css_values.number.CSSNumber; +const CSSNumberFns = css.css_values.number.CSSNumberFns; +const Calc = css.css_values.calc.Calc; +const DimensionPercentage = css.css_values.percentage.DimensionPercentage; +const LengthPercentage = css.css_values.length.LengthPercentage; +const Length = css.css_values.length.Length; +const Percentage = css.css_values.percentage.Percentage; +const CssColor = css.css_values.color.CssColor; +const Image = css.css_values.image.Image; +const Url = css.css_values.url.Url; +const CSSInteger = css.css_values.number.CSSInteger; +const CSSIntegerFns = css.css_values.number.CSSIntegerFns; +const Angle = css.css_values.angle.Angle; +const Time = css.css_values.time.Time; +const Resolution = css.css_values.resolution.Resolution; +const CustomIdent = css.css_values.ident.CustomIdent; +const CustomIdentFns = css.css_values.ident.CustomIdentFns; +const Ident = css.css_values.ident.Ident; + +// https://drafts.csswg.org/css-syntax-3/#whitespace +const SPACE_CHARACTERS: []const u8 = &.{ 0x20, 0x09 }; + +/// A CSS [syntax string](https://drafts.css-houdini.org/css-properties-values-api/#syntax-strings) +/// used to define the grammar for a registered custom property. +pub const SyntaxString = union(enum) { + /// A list of syntax components. + components: ArrayList(SyntaxComponent), + /// The universal syntax definition. + universal, + + const This = @This(); + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } + + pub fn toCss(this: *const This, comptime W: type, dest: *Printer(W)) PrintErr!void { + try dest.writeChar('"'); + switch (this.*) { + .universal => try dest.writeChar('*'), + .components => |*components| { + var first = true; + for (components.items) |*component| { + if (first) { + first = false; + } else { + try dest.delim('|', true); + } + + try component.toCss(W, dest); + } + }, + } + + return dest.writeChar('"'); + } + + pub fn parse(input: *css.Parser) Result(SyntaxString) { + const string = switch (input.expectString()) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + const result = SyntaxString.parseString(input.allocator(), string); + if (result.isErr()) return .{ .err = input.newCustomError(css.ParserError.invalid_value) }; + return .{ .result = result.result }; + } + + /// Parses a syntax string. + pub fn parseString(allocator: std.mem.Allocator, input: []const u8) css.Maybe(SyntaxString, void) { + // https://drafts.css-houdini.org/css-properties-values-api/#parsing-syntax + var trimmed_input = std.mem.trimLeft(u8, input, SPACE_CHARACTERS); + if (trimmed_input.len == 0) { + return .{ .err = {} }; + } + + if (bun.strings.eqlComptime(trimmed_input, "*")) { + return .{ .result = SyntaxString.universal }; + } + + var components = ArrayList(SyntaxComponent){}; + + // PERF(alloc): count first? + while (true) { + const component = switch (SyntaxComponent.parseString(trimmed_input)) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + components.append( + allocator, + component, + ) catch bun.outOfMemory(); + + trimmed_input = std.mem.trimLeft(u8, trimmed_input, SPACE_CHARACTERS); + if (trimmed_input.len == 0) { + break; + } + + if (bun.strings.startsWithChar(trimmed_input, '|')) { + trimmed_input = trimmed_input[1..]; + continue; + } + + return .{ .err = {} }; + } + + return .{ .result = SyntaxString{ .components = components } }; + } + + /// Parses a value according to the syntax grammar. + pub fn parseValue(this: *const SyntaxString, input: *css.Parser) Result(ParsedComponent) { + switch (this.*) { + .universal => return .{ .result = ParsedComponent{ + .token_list = switch (css.css_properties.custom.TokenList.parse( + input, + &css.ParserOptions.default(input.allocator(), null), + 0, + )) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }, + } }, + .components => |components| { + // Loop through each component, and return the first one that parses successfully. + for (components.items) |component| { + const state = input.state(); + // PERF: deinit this on error + var parsed = ArrayList(ParsedComponent){}; + + while (true) { + const value_result = input.tryParse(struct { + fn parse( + i: *css.Parser, + comp: SyntaxComponent, + ) Result(ParsedComponent) { + const value = switch (comp.kind) { + .length => ParsedComponent{ .length = switch (Length.parse(i)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + } }, + .number => ParsedComponent{ .number = switch (CSSNumberFns.parse(i)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + } }, + .percentage => ParsedComponent{ .percentage = switch (Percentage.parse(i)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + } }, + .length_percentage => ParsedComponent{ .length_percentage = switch (LengthPercentage.parse(i)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + } }, + .color => ParsedComponent{ .color = switch (CssColor.parse(i)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + } }, + .image => ParsedComponent{ .image = switch (Image.parse(i)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + } }, + .url => ParsedComponent{ .url = switch (Url.parse(i)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + } }, + .integer => ParsedComponent{ .integer = switch (CSSIntegerFns.parse(i)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + } }, + .angle => ParsedComponent{ .angle = switch (Angle.parse(i)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + } }, + .time => ParsedComponent{ .time = switch (Time.parse(i)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + } }, + .resolution => ParsedComponent{ .resolution = switch (Resolution.parse(i)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + } }, + .transform_function => ParsedComponent{ .transform_function = switch (css.css_properties.transform.Transform.parse(i)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + } }, + .transform_list => ParsedComponent{ .transform_list = switch (css.css_properties.transform.TransformList.parse(i)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + } }, + .custom_ident => ParsedComponent{ .custom_ident = switch (CustomIdentFns.parse(i)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + } }, + .literal => |value| blk: { + const location = i.currentSourceLocation(); + const ident = switch (i.expectIdent()) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + if (!bun.strings.eql(ident, value)) { + return .{ .err = location.newUnexpectedTokenError(.{ .ident = ident }) }; + } + break :blk ParsedComponent{ .literal = .{ .v = ident } }; + }, + }; + return .{ .result = value }; + } + }.parse, .{component}); + + if (value_result.asValue()) |value| { + switch (component.multiplier) { + .none => return .{ .result = value }, + .space => { + parsed.append(input.allocator(), value) catch bun.outOfMemory(); + if (input.isExhausted()) { + return .{ .result = ParsedComponent{ .repeated = .{ + .components = parsed, + .multiplier = component.multiplier, + } } }; + } + }, + .comma => { + parsed.append(input.allocator(), value) catch bun.outOfMemory(); + if (input.next().asValue()) |token| { + if (token.* == .comma) continue; + break; + } else { + return .{ .result = ParsedComponent{ .repeated = .{ + .components = parsed, + .multiplier = component.multiplier, + } } }; + } + }, + } + } else { + break; + } + } + + input.reset(&state); + } + + return .{ .err = input.newErrorForNextToken() }; + }, + } + } +}; + +/// A [syntax component](https://drafts.css-houdini.org/css-properties-values-api/#syntax-component) +/// within a [SyntaxString](SyntaxString). +/// +/// A syntax component consists of a component kind an a multiplier, which indicates how the component +/// may repeat during parsing. +pub const SyntaxComponent = struct { + kind: SyntaxComponentKind, + multiplier: Multiplier, + + pub fn parseString(input_: []const u8) css.Maybe(SyntaxComponent, void) { + var input = input_; + const kind = switch (SyntaxComponentKind.parseString(input)) { + .result => |vv| vv, + .err => |e| return .{ .err = e }, + }; + + // Pre-multiplied types cannot have multipliers. + if (kind == .transform_list) { + return .{ .result = SyntaxComponent{ + .kind = kind, + .multiplier = .none, + } }; + } + + var multiplier: Multiplier = .none; + if (bun.strings.startsWithChar(input, '+')) { + input = input[1..]; + multiplier = .space; + } else if (bun.strings.startsWithChar(input, '#')) { + input = input[1..]; + multiplier = .comma; + } + + return .{ .result = SyntaxComponent{ .kind = kind, .multiplier = multiplier } }; + } + + pub fn toCss(this: *const SyntaxComponent, comptime W: type, dest: *Printer(W)) PrintErr!void { + try this.kind.toCss(W, dest); + return switch (this.multiplier) { + .none => {}, + .comma => dest.writeChar('#'), + .space => dest.writeChar('+'), + }; + } + + pub fn deepClone(this: *const @This(), allocator: std.mem.Allocator) @This() { + return css.implementDeepClone(@This(), this, allocator); + } +}; + +/// A [syntax component component name](https://drafts.css-houdini.org/css-properties-values-api/#supported-names). +pub const SyntaxComponentKind = union(enum) { + /// A `` component. + length, + /// A `` component. + number, + /// A `` component. + percentage, + /// A `` component. + length_percentage, + /// A `` component. + color, + /// An `` component. + image, + /// A `` component. + url, + /// An `` component. + integer, + /// An `` component. + angle, + /// A `