diff --git a/.github/actions/stage/action.yml b/.github/actions/stage/action.yml new file mode 100644 index 00000000..0275011a --- /dev/null +++ b/.github/actions/stage/action.yml @@ -0,0 +1,59 @@ +name: Stage Release Artefacts +description: Stage release artefacts using a TOML configuration file. + +inputs: + config-file: + description: Path to the project-specific TOML staging configuration file. + required: true + target: + description: The target key from the configuration file to be staged. + required: true + +outputs: + artifact_dir: + description: Absolute path to the directory containing the staged artefacts. + value: ${{ steps.run-stage.outputs.artifact_dir }} + dist_dir: + description: Absolute path to the workspace distribution directory. + value: ${{ steps.run-stage.outputs.dist_dir }} + staged_files: + description: Newline-separated list of staged file names. + value: ${{ steps.run-stage.outputs.staged_files }} + artefact_map: + description: JSON map of named artefact outputs to their absolute paths. + value: ${{ steps.run-stage.outputs.artefact_map }} + checksum_map: + description: JSON map of staged file names to their checksum digests. + value: ${{ steps.run-stage.outputs.checksum_map }} + binary_path: + description: Absolute path to the staged binary artefact, when available. + value: ${{ steps.run-stage.outputs.binary_path }} + man_path: + description: Absolute path to the staged manual page artefact, when available. + value: ${{ steps.run-stage.outputs.man_path }} + license_path: + description: Absolute path to the staged licence artefact, when available. + value: ${{ steps.run-stage.outputs.license_path }} + +runs: + using: composite + steps: + - name: Install uv + uses: astral-sh/setup-uv@8d55fbecc275b1c35dbe060458839f8d30439ccf + with: + python-version: '3.11' + - id: check-uv + name: Verify uv is available + shell: bash + run: | + set -euo pipefail + if ! command -v uv >/dev/null 2>&1; then + echo "::error title=Missing dependency::uv not found on PATH. Install it (e.g. with astral-sh/setup-uv) before this action." + exit 1 + fi + - id: run-stage + name: Run staging script + shell: bash + run: | + set -euo pipefail + uv run "${{ github.action_path }}/scripts/stage.py" "${{ inputs.config-file }}" "${{ inputs.target }}" diff --git a/.github/actions/stage/scripts/stage.py b/.github/actions/stage/scripts/stage.py new file mode 100644 index 00000000..64024e5d --- /dev/null +++ b/.github/actions/stage/scripts/stage.py @@ -0,0 +1,61 @@ +# /// script +# requires-python = ">=3.11" +# dependencies = [ +# "cyclopts>=0.14", +# ] +# /// + +"""Command-line entry point for the staging helper. + +Examples +-------- +Run the staging helper locally after exporting the required environment +variables:: + + export GITHUB_WORKSPACE="$(pwd)" + export GITHUB_OUTPUT="$(mktemp)" + uv run .github/actions/stage/scripts/stage.py \ + .github/release-staging.toml linux-x86_64 +""" + +from __future__ import annotations + +import sys +from pathlib import Path + +from stage_common import StageError, load_config, require_env_path, stage_artefacts + +import cyclopts + +app = cyclopts.App(help="Stage release artefacts using a TOML configuration file.") + + +@app.default +def main(config_file: Path, target: str) -> None: + """Stage artefacts for ``target`` using ``config_file``. + + Parameters + ---------- + config_file: + Path to the project-specific TOML configuration file. + target: + Target key in the configuration file (for example ``"linux-x86_64"``). + """ + try: + config_path = Path(config_file) + github_output = require_env_path("GITHUB_OUTPUT") + config = load_config(config_path, target) + result = stage_artefacts(config, github_output) + except (FileNotFoundError, StageError) as exc: + print(f"::error title=Staging Failure::{exc}", file=sys.stderr) + raise SystemExit(1) from exc + + staged_rel = result.staging_dir.relative_to(config.workspace) + print( + f"Staged {len(result.staged_artefacts)} artefact(s) into '{staged_rel}'.", + file=sys.stderr, + ) + + +if __name__ == "__main__": + app() diff --git a/.github/actions/stage/scripts/stage_common/__init__.py b/.github/actions/stage/scripts/stage_common/__init__.py new file mode 100644 index 00000000..96b0d094 --- /dev/null +++ b/.github/actions/stage/scripts/stage_common/__init__.py @@ -0,0 +1,17 @@ +"""Public interface for the staging helper package.""" + +from .config import ArtefactConfig, StagingConfig, load_config +from .environment import require_env_path +from .errors import StageError +from .staging import RESERVED_OUTPUT_KEYS, StageResult, stage_artefacts + +__all__ = [ + "ArtefactConfig", + "RESERVED_OUTPUT_KEYS", + "StageError", + "StageResult", + "StagingConfig", + "load_config", + "require_env_path", + "stage_artefacts", +] diff --git a/.github/actions/stage/scripts/stage_common/checksum_utils.py b/.github/actions/stage/scripts/stage_common/checksum_utils.py new file mode 100644 index 00000000..3c49dead --- /dev/null +++ b/.github/actions/stage/scripts/stage_common/checksum_utils.py @@ -0,0 +1,21 @@ +"""Checksum helpers for staged artefacts.""" + +from __future__ import annotations + +import hashlib +from pathlib import Path + +__all__ = ["write_checksum"] + + +def write_checksum(path: Path, algorithm: str) -> str: + """Write the checksum sidecar for ``path`` using ``algorithm``.""" + + hasher = hashlib.new(algorithm) + with path.open("rb") as handle: + for chunk in iter(lambda: handle.read(8192), b""): + hasher.update(chunk) + digest = hasher.hexdigest() + checksum_path = path.with_name(f"{path.name}.{algorithm}") + checksum_path.write_text(f"{digest} {path.name}\n", encoding="utf-8") + return digest diff --git a/.github/actions/stage/scripts/stage_common/config.py b/.github/actions/stage/scripts/stage_common/config.py new file mode 100644 index 00000000..ecf17b0d --- /dev/null +++ b/.github/actions/stage/scripts/stage_common/config.py @@ -0,0 +1,368 @@ +"""Configuration models and loader for the staging helper. + +This module provides dataclasses and a loader function for parsing TOML staging +configurations that describe artefact sources, target platforms, and staging +directory templates. + +Usage +----- +Load a staging configuration for a specific target:: + + from pathlib import Path + from stage_common.config import load_config + + config = load_config(Path(".github/release-staging.toml"), "windows-x86_64") + print(f"Staging directory: {config.staging_dir()}") +""" + +from __future__ import annotations + +import dataclasses +import hashlib +import typing as typ +from pathlib import Path + +import tomllib + +from .environment import require_env_path +from .errors import StageError + +__all__ = [ + "ArtefactConfig", + "StagingConfig", + "load_config", +] + + +@dataclasses.dataclass(slots=True) +class ArtefactConfig: + """Describe a single artefact to be staged. + + Parameters + ---------- + source : str + ``str.format`` template pointing to the primary artefact location. + required : bool, default=True + When ``True`` the staging run fails if the artefact is missing. + output : str | None, optional + Name exported to ``GITHUB_OUTPUT`` for downstream workflow steps. + destination : str | None, optional + Optional ``str.format`` template for the staged filename. + alternatives : list[str], optional + Fallback glob patterns probed when :attr:`source` is absent. + + Attributes + ---------- + source : str + Raw template used to discover the artefact in the workspace. + required : bool + Indicates whether missing artefacts should abort staging. + output : str | None + Output key recorded in ``StageResult.outputs`` when provided. + destination : str | None + Resolved filename relative to the staging directory. + alternatives : list[str] + Additional glob templates evaluated in order of recency. + + Examples + -------- + >>> cfg = ArtefactConfig( # doctest: +SKIP + ... source="target/{target}/release/{bin_name}{bin_ext}", + ... output="binary_path", + ... ) + >>> cfg.output # doctest: +SKIP + 'binary_path' + """ + + source: str + required: bool = True + output: str | None = None + destination: str | None = None + alternatives: list[str] = dataclasses.field(default_factory=list) + + +@dataclasses.dataclass(slots=True) +class StagingConfig: + """Concrete configuration produced by :func:`load_config`. + + Parameters + ---------- + workspace : Path + Repository root checked out by the GitHub Actions runner. + bin_name : str + Base executable name shared across targets. + dist_dir : str + Directory beneath :attr:`workspace` containing staged artefacts. + checksum_algorithm : str + Hashing algorithm used for checksum sidecars. + artefacts : list[ArtefactConfig] + Collection of artefacts that should be copied into staging. + platform : str + Human readable platform identifier (e.g. ``"linux"``). + arch : str + Architecture identifier appended to the staging directory name. + target : str + Full compilation target triple. + bin_ext : str, default="" + Optional suffix appended to the staged executable name. + staging_dir_template : str, default="{bin_name}_{platform}_{arch}" + ``str.format`` template used to build :attr:`staging_dir_name`. + target_key : str | None, optional + Name of the TOML ``[targets.*]`` entry used to build this config. + + Attributes + ---------- + workspace : Path + The root workspace directory as a :class:`pathlib.Path`. + bin_name : str + Executable base name reused across targets. + dist_dir : str + Directory containing staged release artefacts. + checksum_algorithm : str + Normalised checksum algorithm name. + artefacts : list[ArtefactConfig] + Ordered artefact definitions that will be staged. + platform : str + Platform identifier used for logging and templating. + arch : str + Architecture identifier used for templating. + target : str + Compilation target triple. + bin_ext : str + Optional executable extension such as ``".exe"``. + staging_dir_template : str + Template used to derive :attr:`staging_dir_name`. + target_key : str | None + Name of the target section consumed from the TOML file. + + Examples + -------- + >>> from pathlib import Path # doctest: +SKIP + >>> config = StagingConfig( # doctest: +SKIP + ... workspace=Path("/tmp/workspace"), + ... bin_name="netsuke", + ... dist_dir="dist", + ... checksum_algorithm="sha256", + ... artefacts=[ArtefactConfig(source="LICENSE")], + ... platform="linux", + ... arch="amd64", + ... target="x86_64-unknown-linux-gnu", + ... ) + >>> config.staging_dir_name # doctest: +SKIP + 'netsuke_linux_amd64' + """ + + workspace: Path + bin_name: str + dist_dir: str + checksum_algorithm: str + artefacts: list[ArtefactConfig] + platform: str + arch: str + target: str + bin_ext: str = "" + staging_dir_template: str = "{bin_name}_{platform}_{arch}" + target_key: str | None = None + + def staging_dir(self) -> Path: + """Return the absolute staging directory path.""" + return self.workspace / self.dist_dir / self.staging_dir_name + + @property + def staging_dir_name(self) -> str: + """Directory name rendered from :attr:`staging_dir_template`.""" + return self.as_template_context()["staging_dir_name"] + + def as_template_context(self) -> dict[str, typ.Any]: + """Return a mapping suitable for rendering ``str.format`` templates.""" + base_context: dict[str, typ.Any] = { + "workspace": self.workspace.as_posix(), + "bin_name": self.bin_name, + "dist_dir": self.dist_dir, + "checksum_algorithm": self.checksum_algorithm, + "platform": self.platform, + "arch": self.arch, + "target": self.target, + "bin_ext": self.bin_ext or "", + "target_key": self.target_key or "", + } + template_context = base_context | { + "staging_dir_template": self.staging_dir_template + } + rendered_name = self.staging_dir_template.format(**template_context) + return template_context | {"staging_dir_name": rendered_name} + + +def load_config(config_file: Path, target_key: str) -> StagingConfig: + """Load staging configuration from ``config_file`` for ``target_key``. + + Parameters + ---------- + config_file : Path + Path to the TOML configuration file describing staging inputs. + target_key : str + Key identifying the target-specific configuration section to load. + + Returns + ------- + StagingConfig + Fully realised configuration containing resolved paths and artefacts. + + Raises + ------ + FileNotFoundError + Raised when the configuration file is absent at ``config_file``. + StageError + Raised when required configuration keys are missing or invalid. + """ + config_file = Path(config_file) + if not config_file.is_file(): + message = f"Configuration file not found at {config_file}" + raise FileNotFoundError(message) + + data = _load_toml(config_file) + common, target_cfg = _extract_sections(data, config_file, target_key) + _require_keys(common, {"bin_name"}, "common", config_file) + _require_keys( + target_cfg, + {"platform", "arch", "target"}, + f"targets.{target_key}", + config_file, + ) + workspace = require_env_path("GITHUB_WORKSPACE") + algorithm = _validate_checksum(common.get("checksum_algorithm")) + artefacts = _make_artefacts(common, target_cfg, config_file) + + return StagingConfig( + workspace=workspace, + bin_name=common["bin_name"], + dist_dir=common.get("dist_dir", "dist"), + checksum_algorithm=algorithm, + artefacts=artefacts, + platform=target_cfg["platform"], + arch=target_cfg["arch"], + target=target_cfg["target"], + bin_ext=target_cfg.get("bin_ext", ""), + staging_dir_template=target_cfg.get( + "staging_dir_template", + common.get("staging_dir_template", "{bin_name}_{platform}_{arch}"), + ), + target_key=target_key, + ) + + +def _load_toml(path: Path) -> dict[str, typ.Any]: + with path.open("rb") as handle: + return tomllib.load(handle) + + +def _extract_sections( + data: dict[str, typ.Any], config_path: Path, target_key: str +) -> tuple[dict[str, typ.Any], dict[str, typ.Any]]: + try: + common = data["common"] + target_cfg = data["targets"][target_key] + except KeyError as exc: + message = f"Missing configuration key in {config_path}: {exc}" + raise StageError(message) from exc + return common, target_cfg + + +def _validate_checksum(name: str | None) -> str: + algorithm = (name or "sha256").lower() + supported = {item.lower() for item in hashlib.algorithms_guaranteed} + if algorithm not in supported: + message = f"Unsupported checksum algorithm: {algorithm}" + raise StageError(message) + try: + hashlib.new(algorithm) + except ValueError as exc: + message = ( + f"Checksum algorithm not supported by hashlib.new: {algorithm}" + ) + raise StageError(message) from exc + return algorithm + + +def _make_artefacts( + common: dict[str, typ.Any], + target_cfg: dict[str, typ.Any], + config_path: Path, +) -> list[ArtefactConfig]: + entries = [*common.get("artefacts", []), *target_cfg.get("artefacts", [])] + if not entries: + message = "No artefacts configured to stage." + raise StageError(message) + artefacts: list[ArtefactConfig] = [] + for index, entry in enumerate(entries, start=1): + source = entry.get("source") + if not isinstance(source, str) or not source: + message = ( + "Missing required artefact key 'source' " + f"in entry #{index} of {config_path}" + ) + raise StageError(message) + alternatives = _normalise_alternatives( + entry.get("alternatives", []), index, config_path + ) + artefacts.append( + ArtefactConfig( + source=source, + required=entry.get("required", True), + output=entry.get("output"), + destination=entry.get("destination"), + alternatives=alternatives, + ) + ) + return artefacts + + +def _require_keys( + section: dict[str, typ.Any], keys: set[str], label: str, config_path: Path +) -> None: + """Ensure ``section`` defines ``keys``. + + Examples + -------- + >>> _require_keys( # doctest: +SKIP + ... {'bin': 1}, + ... {'bin'}, + ... 'common', + ... Path('cfg'), + ... ) + """ + if missing := sorted(key for key in keys if key not in section): + joined = ", ".join(missing) + message = ( + "Missing required key(s) " + f"{joined} in [{label}] section of {config_path}" + ) + raise StageError(message) + + +def _normalise_alternatives( + value: object, index: int, config_path: Path +) -> list[str]: + """Return ``value`` as a list of alternative glob patterns.""" + + if value is None: + return [] + if isinstance(value, str): + return [value] if value else [] + if not isinstance(value, list): + message = ( + "Alternatives must be a list of strings " + f"(entry #{index} in {config_path})" + ) + raise StageError(message) + alternatives: list[str] = [] + for alternative in value: + if not isinstance(alternative, str): + message = ( + "Alternatives must be strings " + f"(entry #{index} in {config_path})" + ) + raise StageError(message) + if alternative: + alternatives.append(alternative) + return alternatives diff --git a/.github/actions/stage/scripts/stage_common/environment.py b/.github/actions/stage/scripts/stage_common/environment.py new file mode 100644 index 00000000..6f5deec2 --- /dev/null +++ b/.github/actions/stage/scripts/stage_common/environment.py @@ -0,0 +1,30 @@ +"""Environment helpers shared by the staging toolchain.""" + +from __future__ import annotations + +import os +from pathlib import Path + +from .errors import StageError + +__all__ = ["require_env_path"] + + +def require_env_path(name: str) -> Path: + """Return ``Path`` value for ``name`` or raise :class:`StageError`. + + Parameters + ---------- + name: + Name of the environment variable to fetch. + + Raises + ------ + StageError + Raised when the environment variable is unset or empty. + """ + value = os.environ.get(name) + if not value: + message = f"Environment variable '{name}' is not set." + raise StageError(message) + return Path(value) diff --git a/.github/actions/stage/scripts/stage_common/errors.py b/.github/actions/stage/scripts/stage_common/errors.py new file mode 100644 index 00000000..c4a18f94 --- /dev/null +++ b/.github/actions/stage/scripts/stage_common/errors.py @@ -0,0 +1,5 @@ +"""Error types shared across the staging helper package.""" + + +class StageError(RuntimeError): + """Raised when the staging pipeline cannot continue.""" diff --git a/.github/actions/stage/scripts/stage_common/fs_utils.py b/.github/actions/stage/scripts/stage_common/fs_utils.py new file mode 100644 index 00000000..0a1a1a40 --- /dev/null +++ b/.github/actions/stage/scripts/stage_common/fs_utils.py @@ -0,0 +1,21 @@ +"""Filesystem helpers for staging.""" + +from __future__ import annotations + +from pathlib import Path + +from .errors import StageError + +__all__ = ["safe_destination_path"] + + +def safe_destination_path(staging_dir: Path, destination: str) -> Path: + """Return ``destination`` resolved beneath ``staging_dir``.""" + + target = (staging_dir / destination).resolve() + staging_root = staging_dir.resolve() + if not target.is_relative_to(staging_root): + message = f"Destination escapes staging directory: {destination}" + raise StageError(message) + target.parent.mkdir(parents=True, exist_ok=True) + return target diff --git a/.github/actions/stage/scripts/stage_common/github_output.py b/.github/actions/stage/scripts/stage_common/github_output.py new file mode 100644 index 00000000..32fded86 --- /dev/null +++ b/.github/actions/stage/scripts/stage_common/github_output.py @@ -0,0 +1,26 @@ +"""Helpers for writing GitHub Actions outputs.""" + +from __future__ import annotations + +import uuid +from collections.abc import Mapping, Sequence +from pathlib import Path + +__all__ = ["write_github_output"] + + +def write_github_output( + file: Path, values: Mapping[str, str | Sequence[str]] +) -> None: + """Append ``values`` to ``file`` using GitHub's multiline syntax.""" + + file.parent.mkdir(parents=True, exist_ok=True) + with file.open("a", encoding="utf-8") as handle: + for key, value in values.items(): + delimiter = f"EOF_{uuid.uuid4().hex}" + handle.write(f"{key}<<{delimiter}\n") + if isinstance(value, Sequence) and not isinstance(value, str): + handle.write("\n".join(value)) + else: + handle.write(str(value)) + handle.write(f"\n{delimiter}\n") diff --git a/.github/actions/stage/scripts/stage_common/glob_utils.py b/.github/actions/stage/scripts/stage_common/glob_utils.py new file mode 100644 index 00000000..85f10f47 --- /dev/null +++ b/.github/actions/stage/scripts/stage_common/glob_utils.py @@ -0,0 +1,60 @@ +"""Path matching helpers for artefact discovery.""" + +from __future__ import annotations + +from pathlib import Path, PurePath, PurePosixPath, PureWindowsPath + +__all__ = ["glob_root_and_pattern", "match_candidate_path"] + + +def match_candidate_path(workspace: Path, rendered: str) -> Path | None: + """Return the newest path matching ``rendered`` relative to ``workspace``.""" + + candidate = Path(rendered) + base = candidate if candidate.is_absolute() else workspace / candidate + if any(ch in rendered for ch in "*?[]"): + if candidate.is_absolute(): + root_text, pattern = glob_root_and_pattern(candidate) + root = Path(root_text) + candidates = [ + path for path in root.glob(pattern) if path.is_file() + ] + else: + windows_candidate = PureWindowsPath(rendered) + if windows_candidate.is_absolute(): + anchor = Path(windows_candidate.anchor) + relative = PureWindowsPath(*windows_candidate.parts[1:]).as_posix() + candidates = [ + path for path in anchor.glob(relative) if path.is_file() + ] + else: + candidates = [ + path for path in workspace.glob(rendered) if path.is_file() + ] + return max(candidates, key=_mtime_key) if candidates else None + return base if base.is_file() else None + + +def glob_root_and_pattern(candidate: PurePath) -> tuple[str, str]: + """Return the filesystem root and relative glob pattern for ``candidate``.""" + + anchor = candidate.anchor + if not anchor: + message = f"Expected absolute path, received '{candidate}'" + raise ValueError(message) + + root_text = (candidate.drive + candidate.root) or anchor or "/" + relative_parts = candidate.parts[1:] + pattern = ( + PurePosixPath(*relative_parts).as_posix() if relative_parts else "*" + ) + return root_text, pattern + + +def _mtime_key(path: Path) -> tuple[int, str]: + """Return a sortable key derived from ``path``'s modification time.""" + + try: + return (int(path.stat().st_mtime_ns), path.as_posix()) + except OSError: # pragma: no cover - filesystem race guard. + return (0, path.as_posix()) diff --git a/.github/actions/stage/scripts/stage_common/staging.py b/.github/actions/stage/scripts/stage_common/staging.py new file mode 100644 index 00000000..9f0f72bd --- /dev/null +++ b/.github/actions/stage/scripts/stage_common/staging.py @@ -0,0 +1,271 @@ +"""Artefact staging logic shared across the CLI and composite action. + +This module provides the core staging pipeline that copies artefacts from a +workspace into a staging directory, computes checksums, and exports outputs for +GitHub Actions workflows. + +Usage +----- +From the CLI entry point:: + + import os + from pathlib import Path + from stage_common import load_config, stage_artefacts + + config = load_config(Path(".github/release-staging.toml"), "linux-x86_64") + result = stage_artefacts(config, Path(os.environ["GITHUB_OUTPUT"])) + print(f"Staged {len(result.staged_artefacts)} artefacts.") +""" + +from __future__ import annotations + +import dataclasses +import json +import shutil +import sys +import typing as typ +from pathlib import Path + +from .checksum_utils import write_checksum +from .config import ArtefactConfig, StagingConfig +from .errors import StageError +from .fs_utils import safe_destination_path +from .github_output import write_github_output +from .glob_utils import glob_root_and_pattern, match_candidate_path +from .template_utils import render_template, resolve_artefact_source + +RESERVED_OUTPUT_KEYS = { + "artifact_dir", + "dist_dir", + "staged_files", + "artefact_map", + "checksum_map", +} + +__all__ = [ + "RESERVED_OUTPUT_KEYS", + "StageResult", + "stage_artefacts", + "_glob_root_and_pattern", + "_match_candidate_path", +] + +_glob_root_and_pattern = glob_root_and_pattern +_match_candidate_path = match_candidate_path + + +@dataclasses.dataclass(slots=True) +class StageResult: + """Outcome of :func:`stage_artefacts`.""" + + staging_dir: Path + staged_artefacts: list[Path] + outputs: dict[str, Path] + checksums: dict[str, str] + + +@dataclasses.dataclass(slots=True) +class _StageOutcome: + """Result of staging a single artefact.""" + + path: Path + output_key: str | None + digest: str + + +def _relative_display(path: Path, base: Path) -> str: + """Return ``path`` relative to ``base`` when possible. + + Paths outside ``base`` can occur when staging artefacts from sibling + workspaces. ``Path.relative_to`` raises ``ValueError`` in that scenario, so + fall back to the absolute POSIX form to keep diagnostics informative. + """ + + try: + return path.relative_to(base).as_posix() + except ValueError: + return path.as_posix() + + +def _validate_and_record_destination( + outcome_path: Path, artefact_source: str, seen_destinations: set[Path] +) -> None: + """Guard against duplicate staged destinations.""" + + if outcome_path in seen_destinations: + message = ( + "Duplicate staged destination detected: " + f"{outcome_path.as_posix()} from source {artefact_source}" + ) + raise StageError(message) + seen_destinations.add(outcome_path) + + +def _validate_and_record_output( + output_key: str, + outcome_path: Path, + seen_outputs: set[str], + outputs: dict[str, Path], +) -> None: + """Guard against duplicate artefact output keys.""" + + if output_key in seen_outputs: + message = f"Duplicate artefact output key detected: {output_key}" + raise StageError(message) + seen_outputs.add(output_key) + outputs[output_key] = outcome_path + + +def stage_artefacts(config: StagingConfig, github_output_file: Path) -> StageResult: + """Copy artefacts into ``config``'s staging directory. + + Parameters + ---------- + config : StagingConfig + Fully resolved configuration describing the artefacts to stage. + github_output_file : Path + Path to the ``GITHUB_OUTPUT`` file used to export workflow outputs. + + Returns + ------- + StageResult + Summary object describing the staging directory, staged artefacts, + exported outputs, and checksum digests. + + Raises + ------ + StageError + Raised when required artefacts are missing or configuration templates + render invalid destinations. + + Examples + -------- + >>> from pathlib import Path # doctest: +SKIP + >>> from stage_common.config import load_config # doctest: +SKIP + >>> cfg = load_config( # doctest: +SKIP + ... Path(".github/release-staging.toml"), + ... "linux-x86_64", + ... ) + >>> result = stage_artefacts(cfg, Path("github_output.txt")) # doctest: +SKIP + >>> result.staged_artefacts[0].name # doctest: +SKIP + 'netsuke' + """ + staging_dir = config.staging_dir() + context = config.as_template_context() + + if staging_dir.exists(): + shutil.rmtree(staging_dir) + staging_dir.mkdir(parents=True, exist_ok=True) + + staged_paths: list[Path] = [] + outputs: dict[str, Path] = {} + checksums: dict[str, str] = {} + seen_destinations: set[Path] = set() + seen_outputs: set[str] = set() + + for artefact in config.artefacts: + outcome = _stage_single_artefact( + config=config, + artefact=artefact, + staging_dir=staging_dir, + context=context, + ) + if outcome is None: + continue + _validate_and_record_destination( + outcome.path, artefact.source, seen_destinations + ) + staged_paths.append(outcome.path) + checksums[outcome.path.name] = outcome.digest + if outcome.output_key: + _validate_and_record_output( + outcome.output_key, outcome.path, seen_outputs, outputs + ) + + if not staged_paths: + message = "No artefacts were staged." + raise StageError(message) + + staged_file_names = [path.name for path in sorted(staged_paths)] + artefact_map_json = json.dumps( + {key: path.as_posix() for key, path in sorted(outputs.items())} + ) + checksum_map_json = json.dumps(dict(sorted(checksums.items()))) + + if colliding_keys := RESERVED_OUTPUT_KEYS & outputs.keys(): + message = ( + "Artefact outputs collide with reserved keys: " + f"{sorted(colliding_keys)}. Please rename these keys in your " + "artefact configuration to avoid using reserved names: " + f"{sorted(RESERVED_OUTPUT_KEYS)}." + ) + raise StageError(message) + + exported_outputs: dict[str, str | list[str]] = { + "artifact_dir": staging_dir.as_posix(), + "dist_dir": staging_dir.parent.as_posix(), + "staged_files": staged_file_names, + "artefact_map": artefact_map_json, + "checksum_map": checksum_map_json, + } | {key: path.as_posix() for key, path in outputs.items()} + + write_github_output(github_output_file, exported_outputs) + + return StageResult(staging_dir, staged_paths, outputs, checksums) + + +def _stage_single_artefact( + *, + config: StagingConfig, + artefact: ArtefactConfig, + staging_dir: Path, + context: dict[str, typ.Any], +) -> _StageOutcome | None: + """Stage ``artefact`` and return its outcome.""" + + source_path, attempts = resolve_artefact_source( + config.workspace, artefact, context + ) + if source_path is None: + if artefact.required: + attempt_lines = ", ".join( + f"{attempt.template!r} -> {attempt.rendered!r}" for attempt in attempts + ) + message = ( + "Required artefact not found. " + f"Workspace={config.workspace.as_posix()} " + f"Attempts=[{attempt_lines}]" + ) + raise StageError(message) + warning = ( + "::warning title=Artefact Skipped::Optional artefact missing: " + f"{artefact.source}" + ) + print(warning, file=sys.stderr) + return None + + artefact_context = context | { + "source_path": source_path.as_posix(), + "source_name": source_path.name, + } + destination_text = ( + render_template(destination, artefact_context) + if (destination := artefact.destination) + else source_path.name + ) + + destination_path = safe_destination_path(staging_dir, destination_text) + if destination_path.exists(): + destination_path.unlink() + shutil.copy2(source_path, destination_path) + source_display = _relative_display(source_path, config.workspace) + destination_display = _relative_display(destination_path, config.workspace) + print(f"Staged '{source_display}' -> '{destination_display}'") + + digest = write_checksum(destination_path, config.checksum_algorithm) + + return _StageOutcome( + path=destination_path, + output_key=artefact.output, + digest=digest, + ) diff --git a/.github/actions/stage/scripts/stage_common/template_utils.py b/.github/actions/stage/scripts/stage_common/template_utils.py new file mode 100644 index 00000000..dd883438 --- /dev/null +++ b/.github/actions/stage/scripts/stage_common/template_utils.py @@ -0,0 +1,52 @@ +"""Template helpers used by the staging pipeline.""" + +from __future__ import annotations + +import dataclasses +import typing as typ +from pathlib import Path + +from .errors import StageError +from .glob_utils import match_candidate_path + +if typ.TYPE_CHECKING: + from .config import ArtefactConfig + +__all__ = [ + "RenderAttempt", + "render_template", + "resolve_artefact_source", +] + + +@dataclasses.dataclass(slots=True) +class RenderAttempt: + """Template render attempted when locating an artefact.""" + + template: str + rendered: str + + +def render_template(template: str, context: dict[str, typ.Any]) -> str: + """Return ``template`` formatted with ``context``.""" + + try: + return template.format(**context) + except KeyError as exc: # pragma: no cover - defensive guard. + message = f"Invalid template key {exc} in '{template}'" + raise StageError(message) from exc + + +def resolve_artefact_source( + workspace: Path, artefact: "ArtefactConfig", context: dict[str, typ.Any] +) -> tuple[Path | None, list[RenderAttempt]]: + """Return the first artefact path matching ``artefact``'s templates.""" + + attempts: list[RenderAttempt] = [] + patterns = [artefact.source, *artefact.alternatives] + for pattern in patterns: + rendered = render_template(pattern, context) + attempts.append(RenderAttempt(pattern, rendered)) + if (candidate := match_candidate_path(workspace, rendered)) is not None: + return candidate, attempts + return None, attempts diff --git a/.github/release-staging.toml b/.github/release-staging.toml new file mode 100644 index 00000000..ed54f0f1 --- /dev/null +++ b/.github/release-staging.toml @@ -0,0 +1,42 @@ +[common] +bin_name = "netsuke" +dist_dir = "dist" +checksum_algorithm = "sha256" +staging_dir_template = "{bin_name}_{platform}_{arch}" +artefacts = [ + { source = "target/{target}/release/{bin_name}{bin_ext}", required = true, output = "binary_path" }, + { source = "target/generated-man/{target}/release/{bin_name}.1", required = false, output = "man_path", alternatives = ["target/{target}/release/build/*/out/{bin_name}.1"] }, + { source = "LICENSE", required = true, output = "license_path" }, +] + +[targets.windows-x86_64] +platform = "windows" +arch = "amd64" +target = "x86_64-pc-windows-msvc" +bin_ext = ".exe" + +[targets.windows-aarch64] +platform = "windows" +arch = "arm64" +target = "aarch64-pc-windows-msvc" +bin_ext = ".exe" + +[targets.macos-x86_64] +platform = "macos" +arch = "x86_64" +target = "x86_64-apple-darwin" + +[targets.macos-aarch64] +platform = "macos" +arch = "arm64" +target = "aarch64-apple-darwin" + +[targets.linux-x86_64] +platform = "linux" +arch = "amd64" +target = "x86_64-unknown-linux-gnu" + +[targets.linux-aarch64] +platform = "linux" +arch = "arm64" +target = "aarch64-unknown-linux-gnu" diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 59d6df70..1ff2a168 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -172,9 +172,11 @@ jobs: - target: x86_64-pc-windows-msvc package_arch: amd64 msi_arch: x64 + target_key: windows-x86_64 - target: aarch64-pc-windows-msvc package_arch: arm64 msi_arch: arm64 + target_key: windows-aarch64 steps: - uses: >- actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 @@ -194,43 +196,18 @@ jobs: version: ${{ needs.metadata.outputs.version }} - id: stage name: Stage Windows artefacts - shell: bash - env: - BIN_NAME: ${{ needs.metadata.outputs.bin_name }} - TARGET: ${{ matrix.target }} - PLATFORM: windows - ARCH: ${{ matrix.package_arch }} - run: | - set -euo pipefail - uv run .github/workflows/scripts/stage_windows.py + uses: ./.github/actions/stage + with: + config-file: .github/release-staging.toml + target: ${{ matrix.target_key }} - id: normalize_windows_paths name: Normalise Windows paths shell: bash env: - BIN_PATH: ${{ steps.stage.outputs.binary_path }} - LICENSE_PATH: ${{ steps.stage.outputs.license_path }} + ARTEFACT_MAP: ${{ steps.stage.outputs.artefact_map }} run: | set -euo pipefail - if [[ -z "${BIN_PATH:-}" ]]; then - echo '::error title=Stage output missing::binary_path output empty' - exit 1 - fi - if [[ -z "${LICENSE_PATH:-}" ]]; then - echo '::error title=Stage output missing::license_path output empty' - exit 1 - fi - normalise_path() { - python -c 'from pathlib import PureWindowsPath; import sys; \ - print(PureWindowsPath(sys.argv[1]))' "$1" - } - - binary_path="$(normalise_path "${BIN_PATH}")" - license_path="$(normalise_path "${LICENSE_PATH}")" - - { - printf 'binary_path=%s\n' "$binary_path" - printf 'license_path=%s\n' "$license_path" - } >>"$GITHUB_OUTPUT" + python .github/workflows/scripts/normalise_windows_paths.py - id: package name: Build Windows installer package uses: >- @@ -269,10 +246,12 @@ jobs: runner: macos-13 artifact_suffix: macos-x86_64 package_arch: x86_64 + target_key: macos-x86_64 - target: aarch64-apple-darwin runner: macos-14 artifact_suffix: macos-arm64 package_arch: arm64 + target_key: macos-aarch64 runs-on: ${{ matrix.runner }} steps: - uses: >- @@ -293,15 +272,10 @@ jobs: version: ${{ needs.metadata.outputs.version }} - id: stage name: Stage macOS artefacts - shell: bash - env: - BIN_NAME: ${{ needs.metadata.outputs.bin_name }} - TARGET: ${{ matrix.target }} - PLATFORM: macos - ARCH: ${{ matrix.package_arch }} - run: | - set -euo pipefail - uv run .github/workflows/scripts/stage_macos.py + uses: ./.github/actions/stage + with: + config-file: .github/release-staging.toml + target: ${{ matrix.target_key }} - id: pkg name: Build macOS installer package uses: >- diff --git a/.github/workflows/scripts/normalise_windows_paths.py b/.github/workflows/scripts/normalise_windows_paths.py new file mode 100644 index 00000000..17cf9de2 --- /dev/null +++ b/.github/workflows/scripts/normalise_windows_paths.py @@ -0,0 +1,79 @@ +"""Normalise Windows paths from stage action outputs. + +This script reads the ``ARTEFACT_MAP`` environment variable (a JSON artefact +map emitted by the Stage composite action), validates that ``binary_path`` and +``license_path`` are present, and writes normalised Windows paths to +``GITHUB_OUTPUT`` for downstream workflow steps. + +Usage +----- +Invoke from a GitHub Actions workflow step:: + + - name: Normalise Windows paths + shell: bash + env: + ARTEFACT_MAP: ${{ steps.stage.outputs.artefact_map }} + run: python .github/workflows/scripts/normalise_windows_paths.py +""" + +from __future__ import annotations + +import json +import os +import sys +from pathlib import Path, PureWindowsPath + + +def main() -> int: + """Read ``ARTEFACT_MAP`` and write normalised paths to ``GITHUB_OUTPUT``.""" + try: + mapping = json.loads(os.environ["ARTEFACT_MAP"]) + except KeyError as exc: # pragma: no cover - exercised in workflow runtime + print( + f"::error title=Stage output missing::Missing env {exc}", + file=sys.stderr, + ) + return 1 + + try: + binary = mapping["binary_path"] + licence = mapping["license_path"] + except KeyError as exc: # pragma: no cover - exercised in workflow runtime + print( + f"::error title=Stage output missing::Missing artefact {exc}", + file=sys.stderr, + ) + return 1 + + if not binary: + print( + "::error title=Stage output missing::binary_path output empty", + file=sys.stderr, + ) + return 1 + if not licence: + print( + "::error title=Stage output missing::license_path output empty", + file=sys.stderr, + ) + return 1 + + github_output = os.environ.get("GITHUB_OUTPUT") + if not github_output: + print( + "::error title=Missing GITHUB_OUTPUT::Environment variable unset", + file=sys.stderr, + ) + return 1 + + binary_path = PureWindowsPath(binary) + license_path = PureWindowsPath(licence) + + with Path(github_output).open("a", encoding="utf-8") as handle: + handle.write(f"binary_path={binary_path}\n") + handle.write(f"license_path={license_path}\n") + return 0 + + +if __name__ == "__main__": # pragma: no cover - invoked by GitHub Actions + raise SystemExit(main()) diff --git a/.github/workflows/scripts/stage_common.py b/.github/workflows/scripts/stage_common.py deleted file mode 100644 index 027352fb..00000000 --- a/.github/workflows/scripts/stage_common.py +++ /dev/null @@ -1,215 +0,0 @@ -"""Shared helpers for staging release artefacts.""" - -from __future__ import annotations - -import dataclasses -import hashlib -import shutil -import typing as typ -from pathlib import Path - -__all__ = ["StageResult", "StagingConfig", "stage_artifacts"] - - -@dataclasses.dataclass(frozen=True) -class StagingConfig: - """Immutable bundle describing a staged binary.""" - - bin_name: str - target: str - platform: str - arch: str - workspace: Path - bin_ext: str = "" - - @property - def artifact_dir_name(self) -> str: - """Return the directory name used for the staged artefacts.""" - return f"{self.bin_name}_{self.platform}_{self.arch}" - - -class StageResult(typ.NamedTuple): - """Immutable summary of staged artefacts. - - Attributes - ---------- - artifact_dir : Path - Directory containing the staged artefact bundle. - binary_path : Path - File system path to the staged binary. - man_path : Path - File system path to the staged manual page. - license_path : Path - File system path to the bundled licence text. - """ - - artifact_dir: Path - binary_path: Path - man_path: Path - license_path: Path - - -def stage_artifacts(config: StagingConfig, github_output: Path) -> StageResult: - """Copy artefacts, emit checksums, and record metadata. - - Parameters - ---------- - config : StagingConfig - Aggregated configuration describing the binary, build target, and - staging workspace. ``config.bin_ext`` may append platform-specific - suffixes such as ``".exe"`` when locating binaries. - github_output : Path - File that receives workflow output key-value pairs. - - Returns - ------- - StageResult - Paths for the staged artefact directory, binary, and manual page. - - Raises - ------ - RuntimeError - If the binary or manual page cannot be located uniquely. - """ - workspace = config.workspace.resolve() - dist_dir = workspace / "dist" - - bin_src, man_src, licence_src = _validate_and_locate_sources( - workspace, config.target, config.bin_name, config.bin_ext - ) - - artifact_dir = _prepare_artifact_directory(dist_dir, config.artifact_dir_name) - - bin_dest = artifact_dir / bin_src.name - man_dest = artifact_dir / man_src.name - licence_dest = artifact_dir / licence_src.name - shutil.copy2(bin_src, bin_dest) - shutil.copy2(man_src, man_dest) - shutil.copy2(licence_src, licence_dest) - - for path in (bin_dest, man_dest, licence_dest): - _write_checksum(path) - - stage_result = StageResult(artifact_dir, bin_dest, man_dest, licence_dest) - _write_github_outputs(github_output, stage_result) - - return stage_result - - -def _validate_and_locate_sources( - workspace: Path, target: str, bin_name: str, bin_ext: str -) -> tuple[Path, Path, Path]: - """Resolve the source artefacts and ensure they exist.""" - - bin_src = ( - workspace / "target" / target / "release" / f"{bin_name}{bin_ext}" - ) - if not bin_src.is_file(): - message = f"Binary not found at {bin_src}" - raise RuntimeError(message) - - man_src = _find_manpage(workspace, target, bin_name) - - licence_src = workspace / "LICENSE" - if not licence_src.is_file(): - message = f"Licence file not found at {licence_src}" - raise RuntimeError(message) - - return bin_src, man_src, licence_src - - -def _prepare_artifact_directory(dist_dir: Path, artifact_dir_name: str) -> Path: - """Return a clean artefact directory within the distribution workspace.""" - - dist_dir.mkdir(parents=True, exist_ok=True) - - artifact_dir = dist_dir / artifact_dir_name - if artifact_dir.exists(): - # Previous runs may leave artefacts behind; start from a clean slate so - # releases never mix binaries or manuals from different builds. - shutil.rmtree(artifact_dir) - artifact_dir.mkdir(parents=True) - - return artifact_dir - - -def _write_github_outputs(github_output: Path, stage_result: StageResult) -> None: - """Emit the staged artefact metadata for downstream workflow steps.""" - - outputs = { - "artifact_dir": stage_result.artifact_dir, - "binary_path": stage_result.binary_path, - "man_path": stage_result.man_path, - "license_path": stage_result.license_path, - } - output_lines: list[str] = [] - for key, path in outputs.items(): - value = _escape_output_value(path) - if not value: - message = f"Resolved {key} output is unexpectedly empty" - raise RuntimeError(message) - output_lines.append(f"{key}={value}\n") - - with github_output.open("w", encoding="utf-8") as handle: - handle.writelines(output_lines) - - -def _find_manpage(workspace: Path, target: str, bin_name: str) -> Path: - """Locate exactly one man page. - - Precedence: - - 1) ``target/generated-man//release/{bin}.1`` (preferred). - 2) ``target//release/build/*/out/{bin}.1``; when several matches - exist, prefer the newest by modification time and fall back to - lexicographic ordering for ties. - - Raises - ------ - RuntimeError - If no candidate exists for ``target``. - """ - generated = ( - workspace / "target" / "generated-man" / target / "release" / f"{bin_name}.1" - ) - if generated.is_file(): - return generated - - build_root = workspace / "target" / target / "release" / "build" - if build_root.is_dir(): - matches = [ - candidate - for candidate in build_root.glob(f"*/out/{bin_name}.1") - if candidate.is_file() - ] - if matches: - if len(matches) == 1: - return matches[0] - - def _sort_key(path: Path) -> tuple[int, str]: - try: - return (int(path.stat().st_mtime_ns), path.as_posix()) - except OSError: - return (0, path.as_posix()) - - matches.sort(key=_sort_key) - return matches[-1] - - message = f"Man page not found for target {target}" - raise RuntimeError(message) - - -def _write_checksum(path: Path) -> None: - h = hashlib.sha256() - with path.open("rb") as fh: - for chunk in iter(lambda: fh.read(1024 * 1024), b""): - h.update(chunk) - digest = h.hexdigest() - checksum_path = Path(f"{path}.sha256") - checksum_path.write_text(f"{digest} {path.name}\n", encoding="utf-8") - - -def _escape_output_value(value: Path | str) -> str: - """Escape workflow output values per GitHub recommendations.""" - text = value.as_posix() if isinstance(value, Path) else str(value) - return text.replace("%", "%25").replace("\r", "%0D").replace("\n", "%0A") diff --git a/.github/workflows/scripts/stage_macos.py b/.github/workflows/scripts/stage_macos.py deleted file mode 100644 index e382927c..00000000 --- a/.github/workflows/scripts/stage_macos.py +++ /dev/null @@ -1,97 +0,0 @@ -# /// script -# requires-python = ">=3.11" -# dependencies = [ -# "cyclopts>=3.24.0,<4.0.0", -# ] -# /// -"""Stage macOS release artefacts using Cyclopts parameters. - -Examples --------- ->>> import os ->>> from pathlib import Path ->>> os.environ.update( -... { -... "BIN_NAME": "netsuke", -... "TARGET": "aarch64-apple-darwin", -... "PLATFORM": "macos", -... "ARCH": "arm64", -... "GITHUB_OUTPUT": str(Path("/tmp") / "out"), -... } -... ) ->>> stage() # doctest: +SKIP -""" - -from __future__ import annotations - -import os -import sys -import typing as typ -from pathlib import Path - -from cyclopts import App, Parameter -from stage_common import StagingConfig, stage_artifacts - -app = App() - - -@app.default -def stage( - bin_name: typ.Annotated[str, Parameter(env_var="BIN_NAME")], - target: typ.Annotated[str, Parameter(env_var="TARGET")], - platform: typ.Annotated[str, Parameter(env_var="PLATFORM")], - arch: typ.Annotated[str, Parameter(env_var="ARCH")], -) -> None: - """Stage macOS artefacts and emit GitHub Actions outputs. - - Parameters - ---------- - bin_name : str - Name of the compiled binary to collect. - target : str - Rust compilation target triple identifying the build output. - platform : str - Display label for the operating system flavour. - arch : str - CPU architecture string for packaging (for example ``"arm64"``). - - Notes - ----- - Reads environment variables provided by GitHub Actions and raises a - configuration error when required values are absent: - - - ``GITHUB_OUTPUT``: Required path receiving workflow outputs. - - ``GITHUB_WORKSPACE``: Optional checkout root. Defaults to ``Path('.')`` - when absent. - - ``BIN_EXT``: Optional binary suffix override for non-standard bundles. - """ - github_output_env = os.environ.get("GITHUB_OUTPUT") - if not github_output_env: - print( - "::error title=Configuration Error::", - "GITHUB_OUTPUT environment variable is missing", - file=sys.stderr, - ) - raise SystemExit(1) - - workspace = Path(os.environ.get("GITHUB_WORKSPACE", ".")) - bin_ext = os.environ.get("BIN_EXT", "") - - config = StagingConfig( - bin_name=bin_name, - target=target, - platform=platform, - arch=arch, - workspace=workspace, - bin_ext=bin_ext, - ) - - try: - stage_artifacts(config, Path(github_output_env)) - except RuntimeError as exc: - print(f"::error title=Packaging failure::{exc}", file=sys.stderr) - raise SystemExit(1) from exc - - -if __name__ == "__main__": - app() diff --git a/.github/workflows/scripts/stage_windows.py b/.github/workflows/scripts/stage_windows.py deleted file mode 100644 index 01711320..00000000 --- a/.github/workflows/scripts/stage_windows.py +++ /dev/null @@ -1,98 +0,0 @@ -# /// script -# requires-python = ">=3.11" -# dependencies = [ -# "cyclopts>=3.24.0,<4.0.0", -# ] -# /// -"""Stage Windows artefacts while reading GitHub paths from the environment. - -Examples --------- ->>> import os ->>> from pathlib import Path ->>> os.environ.update( -... { -... "BIN_NAME": "netsuke", -... "TARGET": "x86_64-pc-windows-msvc", -... "PLATFORM": "windows", -... "ARCH": "amd64", -... "GITHUB_OUTPUT": str(Path("/tmp") / "out"), -... "GITHUB_WORKSPACE": str(Path("/tmp") / "workspace"), -... } -... ) ->>> stage() # doctest: +SKIP -""" - -from __future__ import annotations - -import os -import sys -import typing as typ -from pathlib import Path - -from cyclopts import App, Parameter -from stage_common import StagingConfig, stage_artifacts - -app = App() - - -@app.default -def stage( - bin_name: typ.Annotated[str, Parameter(env_var="BIN_NAME")], - target: typ.Annotated[str, Parameter(env_var="TARGET")], - platform: typ.Annotated[str, Parameter(env_var="PLATFORM")], - arch: typ.Annotated[str, Parameter(env_var="ARCH")], -) -> None: - """Stage Windows artefacts and emit GitHub Actions outputs. - - Parameters - ---------- - bin_name : str - Name of the compiled binary to collect. - target : str - Rust compilation target triple identifying the build output. - platform : str - Display label for the operating system flavour. - arch : str - CPU architecture string for packaging (for example ``"amd64"``). - - Notes - ----- - Reads the following environment variables set by GitHub Actions: - - - ``GITHUB_OUTPUT``: Required path that records workflow outputs. - - ``GITHUB_WORKSPACE``: Optional checkout root. Defaults to ``Path('.')`` - when absent. - - The binary extension is forced to ``".exe"`` because Windows artefacts are - always PE executables. - """ - github_output_env = os.environ.get("GITHUB_OUTPUT") - if not github_output_env: - print( - "::error title=Configuration Error::" - "GITHUB_OUTPUT environment variable is missing", - file=sys.stderr, - ) - raise SystemExit(1) - - workspace = Path(os.environ.get("GITHUB_WORKSPACE", ".")) - - config = StagingConfig( - bin_name=bin_name, - target=target, - platform=platform, - arch=arch, - workspace=workspace, - bin_ext=".exe", - ) - - try: - stage_artifacts(config, Path(github_output_env)) - except RuntimeError as exc: - print(f"::error title=Packaging failure::{exc}", file=sys.stderr) - raise SystemExit(1) from exc - - -if __name__ == "__main__": - app() diff --git a/Cargo.lock b/Cargo.lock index 1b39a7e2..a8c37e89 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -134,6 +134,12 @@ dependencies = [ "backtrace", ] +[[package]] +name = "base64" +version = "0.22.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" + [[package]] name = "bitflags" version = "2.9.1" @@ -203,6 +209,16 @@ dependencies = [ "rustix", ] +[[package]] +name = "cc" +version = "1.2.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1d05d92f4b1fd76aad469d46cdd858ca761576082cd37df81416691e50199fb" +dependencies = [ + "find-msvc-tools", + "shlex", +] + [[package]] name = "cfg-if" version = "1.0.1" @@ -288,6 +304,15 @@ dependencies = [ "libc", ] +[[package]] +name = "crc32fast" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9481c1c90cbf2ac953f07c8d4a58aa3945c425b7185c9154d67a65e4230da511" +dependencies = [ + "cfg-if", +] + [[package]] name = "crossbeam-deque" version = "0.8.6" @@ -419,6 +444,17 @@ dependencies = [ "crypto-common", ] +[[package]] +name = "displaydoc" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.104", +] + [[package]] name = "doc-comment" version = "0.3.3" @@ -471,6 +507,22 @@ version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" +[[package]] +name = "find-msvc-tools" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0399f9d26e5191ce32c498bebd31e7a3ceabc2745f0ac54af3f335126c3f24b3" + +[[package]] +name = "flate2" +version = "1.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc5a4e564e38c699f2880d3fda590bedc2e69f3f84cd48b457bd892ce61d0aa9" +dependencies = [ + "crc32fast", + "miniz_oxide", +] + [[package]] name = "float-cmp" version = "0.9.0" @@ -480,6 +532,15 @@ dependencies = [ "num-traits", ] +[[package]] +name = "form_urlencoded" +version = "1.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf" +dependencies = [ + "percent-encoding", +] + [[package]] name = "fragile" version = "2.0.1" @@ -602,6 +663,17 @@ dependencies = [ "version_check", ] +[[package]] +name = "getrandom" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592" +dependencies = [ + "cfg-if", + "libc", + "wasi 0.11.1+wasi-snapshot-preview1", +] + [[package]] name = "getrandom" version = "0.3.3" @@ -691,6 +763,113 @@ version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9b112acc8b3adf4b107a8ec20977da0273a8c386765a3ec0229bd500a1443f9f" +[[package]] +name = "icu_collections" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "200072f5d0e3614556f94a9930d5dc3e0662a652823904c3a75dc3b0af7fee47" +dependencies = [ + "displaydoc", + "potential_utf", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_locale_core" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0cde2700ccaed3872079a65fb1a78f6c0a36c91570f28755dda67bc8f7d9f00a" +dependencies = [ + "displaydoc", + "litemap", + "tinystr", + "writeable", + "zerovec", +] + +[[package]] +name = "icu_normalizer" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "436880e8e18df4d7bbc06d58432329d6458cc84531f7ac5f024e93deadb37979" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_normalizer_data", + "icu_properties", + "icu_provider", + "smallvec", + "zerovec", +] + +[[package]] +name = "icu_normalizer_data" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00210d6893afc98edb752b664b8890f0ef174c8adbb8d0be9710fa66fbbf72d3" + +[[package]] +name = "icu_properties" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "016c619c1eeb94efb86809b015c58f479963de65bdb6253345c1a1276f22e32b" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_locale_core", + "icu_properties_data", + "icu_provider", + "potential_utf", + "zerotrie", + "zerovec", +] + +[[package]] +name = "icu_properties_data" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "298459143998310acd25ffe6810ed544932242d3f07083eee1084d83a71bd632" + +[[package]] +name = "icu_provider" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "03c80da27b5f4187909049ee2d72f276f0d9f99a42c306bd0131ecfe04d8e5af" +dependencies = [ + "displaydoc", + "icu_locale_core", + "stable_deref_trait", + "tinystr", + "writeable", + "yoke", + "zerofrom", + "zerotrie", + "zerovec", +] + +[[package]] +name = "idna" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de" +dependencies = [ + "idna_adapter", + "smallvec", + "utf8_iter", +] + +[[package]] +name = "idna_adapter" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344" +dependencies = [ + "icu_normalizer", + "icu_properties", +] + [[package]] name = "ignore" version = "0.4.23" @@ -880,6 +1059,12 @@ version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cd945864f07fe9f5371a27ad7b52a172b4b499999f1d97574c9fa68373937e12" +[[package]] +name = "litemap" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "241eaef5fd12c88705a01fc1066c48c4b36e0dd4377dcdc7ec3942cea7a69956" + [[package]] name = "lock_api" version = "0.4.13" @@ -978,6 +1163,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316" dependencies = [ "adler2", + "simd-adler32", ] [[package]] @@ -1071,6 +1257,8 @@ dependencies = [ "tokio", "tracing", "tracing-subscriber", + "ureq", + "wait-timeout", ] [[package]] @@ -1212,6 +1400,12 @@ version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9555b1514d2d99d78150d3c799d4c357a3e2c2a8062cd108e93a06d9057629c5" +[[package]] +name = "percent-encoding" +version = "2.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" + [[package]] name = "pin-project" version = "1.1.10" @@ -1244,6 +1438,15 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" +[[package]] +name = "potential_utf" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "84df19adbe5b5a0782edcab45899906947ab039ccf4573713735ee7de1e6b08a" +dependencies = [ + "zerovec", +] + [[package]] name = "powerfmt" version = "0.2.0" @@ -1365,6 +1568,20 @@ version = "1.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ba39f3699c378cd8970968dcbff9c43159ea4cfbd88d43c00b22f2ef10a435d2" +[[package]] +name = "ring" +version = "0.17.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4689e6c2294d81e88dc6261c768b63bc4fcdb852be6d1352498b114f61383b7" +dependencies = [ + "cc", + "cfg-if", + "getrandom 0.2.16", + "libc", + "untrusted", + "windows-sys 0.52.0", +] + [[package]] name = "roff" version = "0.2.2" @@ -1438,6 +1655,41 @@ dependencies = [ "rustix", ] +[[package]] +name = "rustls" +version = "0.23.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd3c25631629d034ce7cd9940adc9d45762d46de2b0f57193c4443b92c6d4d40" +dependencies = [ + "log", + "once_cell", + "ring", + "rustls-pki-types", + "rustls-webpki", + "subtle", + "zeroize", +] + +[[package]] +name = "rustls-pki-types" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "229a4a4c221013e7e1f1a043678c5cc39fe5171437c88fb47151a21e6f5b5c79" +dependencies = [ + "zeroize", +] + +[[package]] +name = "rustls-webpki" +version = "0.103.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e10b3f4191e8a80e6b43eebabfac91e5dcecebb27a71f04e820c47ec41d314bf" +dependencies = [ + "ring", + "rustls-pki-types", + "untrusted", +] + [[package]] name = "rustversion" version = "1.0.21" @@ -1639,6 +1891,12 @@ version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" +[[package]] +name = "simd-adler32" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d66dc143e6b11c1eddc06d5c423cfc97062865baf299914ab64caa38182078fe" + [[package]] name = "similar" version = "2.7.0" @@ -1674,6 +1932,12 @@ version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b7c388c1b5e93756d0c740965c41e8822f866621d41acbdf6336a6a168f8840c" +[[package]] +name = "stable_deref_trait" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596" + [[package]] name = "strip-ansi-escapes" version = "0.2.1" @@ -1689,6 +1953,12 @@ version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" +[[package]] +name = "subtle" +version = "2.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" + [[package]] name = "supports-color" version = "3.0.2" @@ -1732,6 +2002,17 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "synstructure" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.104", +] + [[package]] name = "synthez" version = "0.3.1" @@ -1772,7 +2053,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e8a64e3985349f2441a1a9ef0b853f869006c3855f2cda6862a94d26ebb9d6a1" dependencies = [ "fastrand", - "getrandom", + "getrandom 0.3.3", "once_cell", "rustix", "windows-sys 0.59.0", @@ -1876,6 +2157,16 @@ dependencies = [ "time-core", ] +[[package]] +name = "tinystr" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d4f6d1145dcb577acf783d4e601bc1d76a13337bb54e6233add580b07344c8b" +dependencies = [ + "displaydoc", + "zerovec", +] + [[package]] name = "tokio" version = "1.46.1" @@ -2009,6 +2300,46 @@ version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4a1a07cc7db3810833284e8d372ccdc6da29741639ecc70c9ec107df0fa6154c" +[[package]] +name = "untrusted" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" + +[[package]] +name = "ureq" +version = "2.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02d1a66277ed75f640d608235660df48c8e3c19f3b4edb6a263315626cc3c01d" +dependencies = [ + "base64", + "flate2", + "log", + "once_cell", + "rustls", + "rustls-pki-types", + "url", + "webpki-roots 0.26.11", +] + +[[package]] +name = "url" +version = "2.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08bc136a29a3d1758e07a9cca267be308aeebf5cfd5a10f3f67ab2097683ef5b" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", + "serde", +] + +[[package]] +name = "utf8_iter" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" + [[package]] name = "utf8parse" version = "0.2.2" @@ -2070,6 +2401,24 @@ dependencies = [ "wit-bindgen-rt", ] +[[package]] +name = "webpki-roots" +version = "0.26.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "521bc38abb08001b01866da9f51eb7c5d647a19260e00054a8c7fd5f9e57f7a9" +dependencies = [ + "webpki-roots 1.0.3", +] + +[[package]] +name = "webpki-roots" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32b130c0d2d49f8b6889abc456e795e82525204f27c42cf767cf0d7734e089b8" +dependencies = [ + "rustls-pki-types", +] + [[package]] name = "winapi" version = "0.3.9" @@ -2101,6 +2450,15 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets 0.52.6", +] + [[package]] name = "windows-sys" version = "0.59.0" @@ -2265,3 +2623,93 @@ checksum = "6f42320e61fe2cfd34354ecb597f86f413484a798ba44a8ca1165c58d42da6c1" dependencies = [ "bitflags", ] + +[[package]] +name = "writeable" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea2f10b9bb0928dfb1b42b65e1f9e36f7f54dbdf08457afefb38afcdec4fa2bb" + +[[package]] +name = "yoke" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f41bb01b8226ef4bfd589436a297c53d118f65921786300e427be8d487695cc" +dependencies = [ + "serde", + "stable_deref_trait", + "yoke-derive", + "zerofrom", +] + +[[package]] +name = "yoke-derive" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38da3c9736e16c5d3c8c597a9aaa5d1fa565d0532ae05e27c24aa62fb32c0ab6" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.104", + "synstructure", +] + +[[package]] +name = "zerofrom" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" +dependencies = [ + "zerofrom-derive", +] + +[[package]] +name = "zerofrom-derive" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.104", + "synstructure", +] + +[[package]] +name = "zeroize" +version = "1.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0" + +[[package]] +name = "zerotrie" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "36f0bbd478583f79edad978b407914f61b2972f5af6fa089686016be8f9af595" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", +] + +[[package]] +name = "zerovec" +version = "0.11.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7aa2bd55086f1ab526693ecbe444205da57e25f4489879da80635a46d90e73b" +dependencies = [ + "yoke", + "zerofrom", + "zerovec-derive", +] + +[[package]] +name = "zerovec-derive" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b96237efa0c878c64bd89c436f661be4e46b2f3eff1ebb976f7ef2321d2f58f" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.104", +] diff --git a/Cargo.toml b/Cargo.toml index 4d497fa0..06a37b1c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -44,6 +44,8 @@ ninja_env = { path = "ninja_env" } shell-quote = { version = "0.7.2", default-features = false, features = ["sh"] } shlex = "1.3.0" time = { version = "0.3.44", features = ["formatting", "macros", "parsing", "serde"] } +ureq = { version = "2.10.5" } +wait-timeout = "0.2" [build-dependencies] clap = { version = "4.5.0", features = ["derive"] } diff --git a/codex/refactor-python-staging-scripts-into-github-action b/codex/refactor-python-staging-scripts-into-github-action new file mode 100755 index 00000000..fa5ab5de --- /dev/null +++ b/codex/refactor-python-staging-scripts-into-github-action @@ -0,0 +1,8 @@ +#!/usr/bin/env bash +set -euo pipefail + +# Aggregate the quality gates enforced during review feedback. +uvx ruff format --check +uvx ruff check +uvx --with cyclopts ty check --extra-search-path .github/actions/stage/scripts/ .github/actions/stage/scripts/*.py +python -m pytest tests_python/test_stage_common.py tests_python/test_stage_cli.py tests_python/test_stage_action.py diff --git a/cyclopts/__init__.pyi b/cyclopts/__init__.pyi new file mode 100644 index 00000000..c6594e22 --- /dev/null +++ b/cyclopts/__init__.pyi @@ -0,0 +1,23 @@ +from __future__ import annotations + +import typing as typ + +P = typ.ParamSpec("P") +T = typ.TypeVar("T") + +class Parameter: + def __init__(self, *names: object, **kwargs: object) -> None: ... + +class App: + def __init__(self, *args: object, **kwargs: object) -> None: ... + def default(self, func: typ.Callable[P, T]) -> typ.Callable[P, T]: ... + def __call__(self, *args: object, **kwargs: object) -> object: ... + +class _Env: + def __init__(self, prefix: str, command: bool = ...) -> None: ... + def __call__(self, *args: object, **kwargs: object) -> object: ... + +class _ConfigModule: + Env: type[_Env] + +config: _ConfigModule diff --git a/docs/netsuke-design.md b/docs/netsuke-design.md index 0cfa582c..2946a970 100644 --- a/docs/netsuke-design.md +++ b/docs/netsuke-design.md @@ -922,6 +922,21 @@ Using `shell()` marks the template as *impure* and disables caching of the rendered YAML between Stage 2 and Stage 3. This avoids accidental reuse of results that depend on external commands. +Implementation details: + +- `fetch` issues HTTP requests through the `ureq` client. When caching is + enabled a SHA-256 digest of the URL becomes the cache key and responses are + written beneath `.netsuke/fetch` (or a user-provided directory) using + capability-restricted file handles. Any remote fetch or cache write marks the + stdlib state as impure so callers can discard memoised renders, and absolute + cache directories open with ambient authority so Windows drive prefixes work + correctly. +- `shell` and `grep` spawn the platform shell (`sh` or `cmd.exe`) with POSIX + single-quoted arguments emitted via `shell-quote`. The stdlib registers a + shared `StdlibState` that flips an `impure` flag whenever these helpers + execute so callers can detect templates that interacted with the outside + world. + Custom external commands can be registered as additional filters. Those should be marked `pure` if safe for caching or `impure` otherwise. @@ -1663,13 +1678,52 @@ composite action to cross-compile for `x86_64` and `aarch64`, generate the staged binary + man page directory, and then call the shared `linux-packages` composite a second time with explicit metadata so the resulting `.deb` and `.rpm` archives both declare a runtime dependency on `ninja-build`. Windows -builds reuse the same action for compilation and then run the uv staging -wrapper `.github/workflows/scripts/stage_windows.py`, which delegates to -`.github/workflows/scripts/stage_common.py`. The helper consumes Cyclopts -parameters for the build metadata, resolves GitHub-provided environment -variables for workspace details, mirrors the man page, and writes SHA-256 sums -ready for publishing while enforcing a Windows-specific binary suffix. The -staged artefacts feed a WiX v4 authoring template stored in +builds reuse the same action for compilation and now invoke the generic staging +composite defined in `.github/actions/stage`. The composite shells out to a +Cyclopts-driven script that reads `.github/release-staging.toml`, merges the +`[common]` configuration with the target-specific overrides, and copies the +configured artefacts into a fresh `dist/{bin}_{platform}_{arch}` directory. It +installs `uv` with `astral-sh/setup-uv`, double-checks the tool is present, and +only then launches the Python entry point so workflows stay declarative. The +helper writes SHA-256 sums for every staged file. It also exports a JSON map of +the artefact outputs, allowing the workflow to hydrate downstream steps without +hard-coded path logic. Figure 8.1 summarises the configuration entities, +including optional keys reserved for templated directories and explicit +artefact destinations that the helper can adopt without breaking compatibility. + +Figure 8.1: Entity relationship for the staging configuration schema. + +```mermaid +%% Figure 8.1: Entity relationship for the staging configuration schema. +erDiagram + COMMON { + string bin_name + string dist_dir + string checksum_algorithm + string staging_dir_template + ArtefactConfig[] artefacts + } + TARGETS { + string platform + string arch + string target + string bin_ext + string staging_dir_template + ArtefactConfig[] artefacts + } + ArtefactConfig { + string source + boolean required + string output + string destination + string[] alternatives + } + COMMON ||--o{ TARGETS : "has targets" + COMMON ||--o{ ArtefactConfig : "has artefacts" + TARGETS ||--o{ ArtefactConfig : "has artefacts" +``` + +The staged artefacts feed a WiX v4 authoring template stored in `installer/Package.wxs`; the workflow invokes the shared `windows-package@61340852250fe0c3cf1a06a16443629fccce746e` composite to convert the repository licence into RTF, embed the binary, and output a signed MSI @@ -1684,13 +1738,12 @@ platforms but is not bundled into the installer to avoid shipping an inaccessible help format. macOS releases execute the shared action twice: once on an Intel runner and -again on Apple Silicon. They invoke the uv staging wrapper -`.github/workflows/scripts/stage_macos.py`, which in turn calls the shared -module to mirror documentation and emit checksums before feeding the resulting -paths into the `macos-package` action. The macOS and Windows wrappers continue -to embed `uv` metadata blocks, keeping Cyclopts dependencies discoverable -without a repository-level `pyproject.toml`; that file is intentionally omitted -as unnecessary for the release flow. Python linting lives in the top-level +again on Apple Silicon. The same composite action interprets the TOML +configuration, emits checksums, and exposes artefact metadata via JSON outputs +before feeding the resulting paths into the `macos-package` action. Embedding +the PEP 723 metadata keeps Cyclopts discoverable without a repository-level +`pyproject.toml`, maintaining the existing approach where uv resolves +dependencies on demand. Python linting still lives in the top-level `ruff.toml`, so the dedicated staging scripts remain self-contained whilst the broader helper suite stays consistently linted. diff --git a/docs/roadmap.md b/docs/roadmap.md index 908201f6..ba6fb067 100644 --- a/docs/roadmap.md +++ b/docs/roadmap.md @@ -154,7 +154,7 @@ library, and CLI ergonomics. - [x] Refactor all error-producing code to provide the clear, contextual, and actionable error messages specified in the design document. -- [ ] **Template Standard Library:** +- [x] **Template Standard Library:** - [x] Implement the basic file-system tests (`dir`, `file`, `symlink`, `pipe`, `block_device`, `char_device`, legacy `device`). *(done)* @@ -165,7 +165,7 @@ library, and CLI ergonomics. - [x] Implement the generic collection filters (`uniq`, `flatten`, `group_by`). *(done)* - - [ ] Implement the network and command functions/filters (fetch, shell, + - [x] Implement the network and command functions/filters (fetch, shell, grep), ensuring shell marks templates as impure to disable caching. - [x] Implement the time helpers (`now`, `timedelta`). diff --git a/scripts/_release_upload_deps.py b/scripts/_release_upload_deps.py new file mode 100644 index 00000000..4ac91f1c --- /dev/null +++ b/scripts/_release_upload_deps.py @@ -0,0 +1,142 @@ +"""Optional dependency loaders and CLI fallback for the release uploader.""" + +from __future__ import annotations + +import os +import sys +from argparse import ArgumentParser +from pathlib import Path +from typing import Any, Callable, NamedTuple, Sequence + +__all__ = [ + "CycloptsSupport", + "PlumbumSupport", + "load_cyclopts", + "load_plumbum", + "run_cli", +] + + +class CycloptsSupport(NamedTuple): + """Expose Cyclopts components and availability information.""" + + available: bool + app: Any + parameter: type + + +class PlumbumSupport(NamedTuple): + """Expose Plumbum components required by the uploader.""" + + local: Any + command_not_found: type[Exception] + process_execution_error: type[Exception] + bound_command: Any + + +def load_cyclopts() -> CycloptsSupport: + """Return Cyclopts components with graceful fallbacks.""" + + try: + import cyclopts as _cyclopts + from cyclopts import App, Parameter + except ModuleNotFoundError as exc: # pragma: no cover - lean test environments. + def _raise_missing_cyclopts(*_args: object, **_kwargs: object) -> None: + message = ( + "Cyclopts is required for CLI usage; install it to run the script" + ) + raise RuntimeError(message) from exc + + _raise_missing_cyclopts.default = lambda func: func # type: ignore[attr-defined] + + class _ParameterStub: + def __init__(self, *_args: object, **_kwargs: object) -> None: + """Accept arguments for ``typing.Annotated`` compatibility.""" + + return CycloptsSupport(False, _raise_missing_cyclopts, _ParameterStub) + + app = App(config=_cyclopts.config.Env("INPUT_", command=False)) + return CycloptsSupport(True, app, Parameter) + + +def load_plumbum() -> PlumbumSupport: + """Return Plumbum components with graceful fallbacks.""" + + try: # pragma: no cover - exercised indirectly when dependencies exist. + from plumbum import local # type: ignore[import-not-found] + from plumbum.commands import ( # type: ignore[import-not-found] + CommandNotFound, + ProcessExecutionError, + ) + from plumbum.commands.base import BoundCommand # type: ignore[import-not-found] + except ModuleNotFoundError as exc: # pragma: no cover - lean type-check envs. + class _LocalStub: + def __getitem__(self, name: str) -> None: # pragma: no cover - defensive. + message = ( + "plumbum is required to execute release uploads; " + "install it or run in dry-run mode" + ) + raise ModuleNotFoundError(message) from exc + + return PlumbumSupport( + local=_LocalStub(), + command_not_found=ModuleNotFoundError, + process_execution_error=RuntimeError, + bound_command=Any, + ) + + return PlumbumSupport( + local=local, + command_not_found=CommandNotFound, + process_execution_error=ProcessExecutionError, + bound_command=BoundCommand, + ) + + +def run_cli( + support: CycloptsSupport, + *, + coerce_bool: Callable[[object], bool], + main: Callable[..., int], + tokens: Sequence[str] | None = None, +) -> int: + """Execute the uploader CLI using Cyclopts or ``argparse`` fallback.""" + + arguments = list(tokens) if tokens is not None else None + + if support.available: + return support.app(arguments) + + parser = ArgumentParser(description=main.__doc__ or "Upload release assets.") + parser.add_argument("--release-tag") + parser.add_argument("--bin-name") + parser.add_argument("--dist-dir") + parser.add_argument("--dry-run", action="store_true") + args = parser.parse_args(arguments if arguments is not None else sys.argv[1:]) + + release_tag = args.release_tag or os.environ.get("INPUT_RELEASE_TAG") + bin_name = args.bin_name or os.environ.get("INPUT_BIN_NAME") + dist_dir_value = args.dist_dir or os.environ.get("INPUT_DIST_DIR") or "dist" + dry_run_flag = args.dry_run + + if not dry_run_flag and (env_flag := os.environ.get("INPUT_DRY_RUN")): + dry_run_flag = coerce_bool(env_flag) + + missing = [ + label + for label, present in ( + ("--release-tag", release_tag), + ("--bin-name", bin_name), + ) + if not present + ] + if missing: + joined = ", ".join(missing) + parser.exit(status=1, message=f"Missing required argument(s): {joined}\n") + + return main( + release_tag=release_tag, + bin_name=bin_name, + dist_dir=Path(dist_dir_value), + dry_run=dry_run_flag, + ) diff --git a/scripts/upload_release_assets.py b/scripts/upload_release_assets.py index d69330b0..7d70ac03 100755 --- a/scripts/upload_release_assets.py +++ b/scripts/upload_release_assets.py @@ -29,15 +29,7 @@ import typing as typ from pathlib import Path -import cyclopts -from cyclopts import App, Parameter -from plumbum import local -from plumbum.commands import CommandNotFound, ProcessExecutionError - -if typ.TYPE_CHECKING: - import collections.abc as cabc - - from plumbum.commands.base import BoundCommand +from _release_upload_deps import load_cyclopts, load_plumbum, run_cli class AssetError(RuntimeError): @@ -53,7 +45,19 @@ class ReleaseAsset: size: int -app = App(config=cyclopts.config.Env("INPUT_", command=False)) +cyclopts_support = load_cyclopts() +plumbum_support = load_plumbum() + +Parameter = cyclopts_support.parameter +app = cyclopts_support.app +CommandNotFound = plumbum_support.command_not_found +ProcessExecutionError = plumbum_support.process_execution_error +local = plumbum_support.local + +if typ.TYPE_CHECKING: + from plumbum.commands.base import BoundCommand # type: ignore[import-not-found] +else: + BoundCommand = plumbum_support.bound_command def _is_candidate(path: Path, bin_name: str) -> bool: @@ -90,7 +94,8 @@ def _resolve_asset_name(path: Path, *, dist_dir: Path) -> str: return f"{prefix}-{relative_path.name}" -def _iter_candidate_paths(dist_dir: Path, bin_name: str) -> cabc.Iterator[Path]: +def _iter_candidate_paths(dist_dir: Path, bin_name: str) -> typ.Iterator[Path]: + """Yield candidate artefact file paths under ``dist_dir`` for ``bin_name``.""" for path in sorted(dist_dir.rglob("*")): if path.is_file() and _is_candidate(path, bin_name): yield path @@ -160,7 +165,7 @@ def discover_assets(dist_dir: Path, *, bin_name: str) -> list[ReleaseAsset]: return assets -def _render_summary(assets: cabc.Iterable[ReleaseAsset]) -> str: +def _render_summary(assets: typ.Iterable[ReleaseAsset]) -> str: lines = ["Planned uploads:"] lines.extend( f" - {asset.asset_name} ({asset.size} bytes) -> {asset.path}" @@ -170,7 +175,7 @@ def _render_summary(assets: cabc.Iterable[ReleaseAsset]) -> str: def upload_assets( - *, release_tag: str, assets: cabc.Iterable[ReleaseAsset], dry_run: bool = False + *, release_tag: str, assets: typ.Iterable[ReleaseAsset], dry_run: bool = False ) -> None: """Upload artefacts to GitHub using the ``gh`` CLI. @@ -274,19 +279,28 @@ def main( @app.default def cli( *, - release_tag: typ.Annotated[str, Parameter(required=True)], - bin_name: typ.Annotated[str, Parameter(required=True)], - dist_dir: Path = Path("dist"), - dry_run: bool | str = False, + release_tag: typ.Annotated[ + str, Parameter(required=True, env="INPUT_RELEASE_TAG") + ], + bin_name: typ.Annotated[str, Parameter(required=True, env="INPUT_BIN_NAME")], + dist_dir: typ.Annotated[Path, Parameter(env="INPUT_DIST_DIR")] = Path("dist"), + dry_run: typ.Annotated[bool, Parameter(env="INPUT_DRY_RUN")] = False, ) -> int: """Cyclopts-bound CLI entry point.""" return main( release_tag=release_tag, bin_name=bin_name, dist_dir=dist_dir, - dry_run=_coerce_bool(dry_run), + dry_run=dry_run, ) if __name__ == "__main__": # pragma: no cover - exercised via CLI - raise SystemExit(app()) + raise SystemExit( + run_cli( + cyclopts_support, + coerce_bool=_coerce_bool, + main=main, + tokens=sys.argv[1:], + ) + ) diff --git a/src/manifest.rs b/src/manifest.rs index f117ce08..cc39b7fe 100644 --- a/src/manifest.rs +++ b/src/manifest.rs @@ -75,7 +75,7 @@ fn from_str_named(yaml: &str, name: &str) -> Result { // Expose custom helpers to templates. jinja.add_function("env", |name: String| env_var(&name)); jinja.add_function("glob", |pattern: String| glob_paths(&pattern)); - crate::stdlib::register(&mut jinja); + let _stdlib_state = crate::stdlib::register(&mut jinja); if let Some(vars) = doc.get("vars").and_then(|v| v.as_mapping()).cloned() { for (k, v) in vars { diff --git a/src/stdlib/command.rs b/src/stdlib/command.rs new file mode 100644 index 00000000..703ed4b5 --- /dev/null +++ b/src/stdlib/command.rs @@ -0,0 +1,380 @@ +//! Shell-oriented helpers for the `MiniJinja` standard library. +//! +//! The helpers bridge template values into the local shell while keeping +//! behaviour predictable across platforms. All helpers mark the stdlib state as +//! impure so the caller can invalidate any caching layer that depends on pure +//! template evaluation. +//! +//! # Security +//! +//! The `shell` and `grep` filters execute external commands based on template +//! content. Templates using these filters must come from trusted sources only. +//! Never allow untrusted input to control command strings or patterns, as this +//! enables arbitrary code execution. + +use std::{ + fmt::Write as FmtWrite, + io::{self, Read, Write}, + process::{Child, Command, ExitStatus, Stdio}, + sync::{ + Arc, + atomic::{AtomicBool, Ordering}, + }, + thread, + time::Duration, +}; + +use minijinja::{ + Error, ErrorKind, State, + value::{Value, ValueKind}, +}; +#[cfg(windows)] +use shell_quote::windows::quote as windows_quote; +#[cfg(not(windows))] +use shell_quote::{QuoteRefExt, Sh}; +use wait_timeout::ChildExt; + +#[cfg(windows)] +const SHELL: &str = "cmd"; +#[cfg(windows)] +const SHELL_ARGS: &[&str] = &["/C"]; + +#[cfg(not(windows))] +const SHELL: &str = "sh"; +#[cfg(not(windows))] +const SHELL_ARGS: &[&str] = &["-c"]; + +// Cap commands at five seconds so template renders fail fast on hung helpers +// while still allowing short, legitimate processes to complete. The limit keeps +// feedback responsive during behavioural tests without imposing noticeable +// delays for happy-path renders. +// Commands run during template evaluation must not hang renders. Five seconds +// keeps `shell` and `grep` responsive for tests and typical helper binaries +// while still surfacing timeouts for misbehaving commands. +const COMMAND_TIMEOUT: Duration = Duration::from_secs(5); + +pub(crate) fn register(env: &mut minijinja::Environment<'_>, impure: Arc) { + let shell_flag = Arc::clone(&impure); + env.add_filter( + "shell", + move |state: &State, value: Value, command: String| { + shell_flag.store(true, Ordering::Relaxed); + execute_shell(state, &value, &command) + }, + ); + + let grep_flag = impure; + env.add_filter( + "grep", + move |state: &State, value: Value, pattern: String, flags: Option| { + grep_flag.store(true, Ordering::Relaxed); + execute_grep(state, &value, &pattern, flags) + }, + ); +} + +fn execute_shell(state: &State, value: &Value, command: &str) -> Result { + let cmd = command.trim(); + if cmd.is_empty() { + return Err(Error::new( + ErrorKind::InvalidOperation, + "shell filter requires a non-empty command", + )); + } + + let input = to_bytes(value)?; + let output = run_command(cmd, &input).map_err(|err| command_error(err, state.name(), cmd))?; + Ok(value_from_bytes(output)) +} + +fn execute_grep( + state: &State, + value: &Value, + pattern: &str, + flags: Option, +) -> Result { + if pattern.is_empty() { + return Err(Error::new( + ErrorKind::InvalidOperation, + "grep filter requires a search pattern", + )); + } + + let mut args = collect_flag_args(flags)?; + args.push(pattern.to_owned()); + let command = format_command("grep", &args); + let input = to_bytes(value)?; + + #[cfg(windows)] + let output = run_program("grep", &args, &input) + .map_err(|err| command_error(err, state.name(), &command))?; + + #[cfg(not(windows))] + let output = + run_command(&command, &input).map_err(|err| command_error(err, state.name(), &command))?; + + Ok(value_from_bytes(output)) +} + +fn collect_flag_args(flags: Option) -> Result, Error> { + let Some(value) = flags else { + return Ok(Vec::new()); + }; + match value.kind() { + ValueKind::Undefined => Ok(Vec::new()), + ValueKind::Seq | ValueKind::Iterable => value + .try_iter()? + .map(|item| { + item.as_str().map_or_else( + || { + Err(Error::new( + ErrorKind::InvalidOperation, + "grep flags must be strings", + )) + }, + |s| Ok(s.to_owned()), + ) + }) + .collect(), + _ => value + .as_str() + .map(|s| vec![s.to_owned()]) + .ok_or_else(|| Error::new(ErrorKind::InvalidOperation, "grep flags must be strings")), + } +} + +fn format_command(base: &str, args: &[String]) -> String { + let mut command = String::from(base); + for arg in args { + command.push(' '); + command.push_str("e(arg)); + } + command +} + +#[cfg(windows)] +fn quote(arg: &str) -> String { + windows_quote(arg) +} + +#[cfg(not(windows))] +fn quote(arg: &str) -> String { + let bytes = arg.quoted(Sh); + String::from_utf8(bytes).expect("quoted args are valid UTF-8") +} + +fn to_bytes(value: &Value) -> Result, Error> { + if value.is_undefined() { + return Err(Error::new( + ErrorKind::InvalidOperation, + "input value is undefined", + )); + } + + if let Some(bytes) = value.as_bytes() { + return Ok(bytes.to_vec()); + } + + Ok(value.to_string().into_bytes()) +} + +fn run_command(command: &str, input: &[u8]) -> Result, CommandFailure> { + let mut cmd = Command::new(SHELL); + cmd.args(SHELL_ARGS) + .arg(command) + .stdin(Stdio::piped()) + .stdout(Stdio::piped()) + .stderr(Stdio::piped()); + + run_child(cmd, input) +} + +#[cfg(windows)] +fn run_program(program: &str, args: &[String], input: &[u8]) -> Result, CommandFailure> { + let mut cmd = Command::new(program); + cmd.args(args) + .stdin(Stdio::piped()) + .stdout(Stdio::piped()) + .stderr(Stdio::piped()); + + run_child(cmd, input) +} + +fn run_child(mut command: Command, input: &[u8]) -> Result, CommandFailure> { + let mut child = command.spawn().map_err(CommandFailure::Spawn)?; + let mut broken_pipe = None; + if let Some(mut stdin) = child.stdin.take() { + match stdin.write_all(input) { + Ok(()) => {} + Err(err) => { + if err.kind() == io::ErrorKind::BrokenPipe { + broken_pipe = Some(err); + } else { + return Err(CommandFailure::Io(err)); + } + } + } + } + + let mut stdout_reader = spawn_pipe_reader(child.stdout.take()); + let mut stderr_reader = spawn_pipe_reader(child.stderr.take()); + + let status = match wait_for_exit(&mut child, COMMAND_TIMEOUT) { + Ok(status) => status, + Err(err) => { + let _ = join_reader(stdout_reader.take()); + let _ = join_reader(stderr_reader.take()); + return Err(err); + } + }; + + let stdout = join_reader(stdout_reader.take()).map_err(CommandFailure::Io)?; + let stderr = join_reader(stderr_reader.take()).map_err(CommandFailure::Io)?; + + if let Some(err) = broken_pipe { + return Err(CommandFailure::BrokenPipe { + source: err, + status: status.code(), + stderr, + }); + } + + if status.success() { + Ok(stdout) + } else { + Err(CommandFailure::Exit { + status: status.code(), + stderr, + }) + } +} + +fn value_from_bytes(bytes: Vec) -> Value { + match String::from_utf8(bytes) { + Ok(text) => Value::from(text), + Err(err) => Value::from_bytes(err.into_bytes()), + } +} + +fn command_error(err: CommandFailure, template: &str, command: &str) -> Error { + match err { + CommandFailure::Spawn(spawn) => Error::new( + ErrorKind::InvalidOperation, + format!("failed to spawn command '{command}' in template '{template}': {spawn}"), + ), + CommandFailure::Io(io_err) => { + let pipe_msg = if io_err.kind() == io::ErrorKind::BrokenPipe { + " (command closed input early)" + } else { + "" + }; + Error::new( + ErrorKind::InvalidOperation, + format!("command '{command}' in template '{template}' failed: {io_err}{pipe_msg}"), + ) + } + CommandFailure::BrokenPipe { + source, + status, + stderr, + } => { + let mut msg = format!( + "command '{command}' in template '{template}' failed: {source} (command closed input early)" + ); + if let Some(code) = status { + let _ = FmtWrite::write_fmt(&mut msg, format_args!("; exited with status {code}")); + } else { + msg.push_str("; terminated by signal"); + } + append_stderr(&mut msg, &stderr); + Error::new(ErrorKind::InvalidOperation, msg) + } + CommandFailure::Exit { status, stderr } => { + let mut msg = status.map_or_else( + || format!("command '{command}' in template '{template}' terminated by signal"), + |code| { + format!( + "command '{command}' in template '{template}' exited with status {code}" + ) + }, + ); + append_stderr(&mut msg, &stderr); + Error::new(ErrorKind::InvalidOperation, msg) + } + CommandFailure::Timeout(duration) => Error::new( + ErrorKind::InvalidOperation, + format!( + "command '{command}' in template '{template}' timed out after {}s", + duration.as_secs() + ), + ), + } +} + +enum CommandFailure { + Spawn(io::Error), + Io(io::Error), + BrokenPipe { + source: io::Error, + status: Option, + stderr: Vec, + }, + Exit { + status: Option, + stderr: Vec, + }, + Timeout(Duration), +} + +impl From for CommandFailure { + fn from(err: io::Error) -> Self { + Self::Io(err) + } +} + +fn wait_for_exit(child: &mut Child, timeout: Duration) -> Result { + if let Some(status) = child.wait_timeout(timeout).map_err(CommandFailure::Io)? { + Ok(status) + } else { + if let Err(err) = child.kill() + && err.kind() != io::ErrorKind::InvalidInput + { + return Err(CommandFailure::Io(err)); + } + let _ = child.wait(); + Err(CommandFailure::Timeout(timeout)) + } +} + +fn spawn_pipe_reader(pipe: Option) -> Option>>> +where + R: Read + Send + 'static, +{ + pipe.map(|mut reader| { + thread::spawn(move || { + let mut buf = Vec::new(); + reader.read_to_end(&mut buf)?; + Ok(buf) + }) + }) +} + +fn join_reader(handle: Option>>>) -> io::Result> { + handle.map_or_else( + || Ok(Vec::new()), + |handle| { + handle + .join() + .map_err(|_| io::Error::other("pipe reader panicked"))? + }, + ) +} + +fn append_stderr(message: &mut String, stderr: &[u8]) { + let stderr = String::from_utf8_lossy(stderr); + let trimmed = stderr.trim(); + if !trimmed.is_empty() { + message.push_str(": "); + message.push_str(trimmed); + } +} diff --git a/src/stdlib/mod.rs b/src/stdlib/mod.rs index 9051c2f6..78e6c959 100644 --- a/src/stdlib/mod.rs +++ b/src/stdlib/mod.rs @@ -1,14 +1,17 @@ //! Standard library registration for `MiniJinja` templates. //! //! The module wires the platform-aware file tests, the path manipulation -//! filters, and the collection filters into a single entrypoint so template -//! authors can rely on consistent behaviour across projects. Tests such as -//! `dir`, `file`, and `symlink` inspect metadata without following symlinks, -//! while filters expose conveniences like `basename`, `with_suffix`, -//! `realpath`, content hashing, and collection utilities including -//! `flatten`, `group_by`, and `uniq`. +//! filters, the collection helpers, the network utilities, and the command +//! wrappers into a single entrypoint so template authors can rely on +//! consistent behaviour across projects. Tests such as `dir`, `file`, and +//! `symlink` inspect metadata without following symlinks, while filters +//! expose conveniences like `basename`, `with_suffix`, `realpath`, content +//! hashing, collection utilities including `flatten`, `group_by`, and `uniq`, +//! HTTP helpers like `fetch`, and shell bridges such as `shell` and `grep`. mod collections; +mod command; +mod network; mod path; mod time; @@ -17,9 +20,36 @@ use cap_std::fs; #[cfg(unix)] use cap_std::fs::FileTypeExt; use minijinja::{Environment, Error, value::Value}; +use std::{ + sync::Arc, + sync::atomic::{AtomicBool, Ordering}, +}; type FileTest = (&'static str, fn(fs::FileType) -> bool); +/// Captures mutable state shared between stdlib helpers. +#[derive(Clone, Default, Debug)] +pub struct StdlibState { + impure: Arc, +} + +impl StdlibState { + /// Returns whether any impure helper executed during the last render. + #[must_use] + pub fn is_impure(&self) -> bool { + self.impure.load(Ordering::Relaxed) + } + + /// Resets the impurity marker so callers can track helper usage per render. + pub fn reset_impure(&self) { + self.impure.store(false, Ordering::Relaxed); + } + + pub(crate) fn impure_flag(&self) -> Arc { + Arc::clone(&self.impure) + } +} + /// Register standard library helpers with the `MiniJinja` environment. /// /// # Examples @@ -28,7 +58,7 @@ type FileTest = (&'static str, fn(fs::FileType) -> bool); /// use netsuke::stdlib; /// /// let mut env = Environment::new(); -/// stdlib::register(&mut env); +/// let _state = stdlib::register(&mut env); /// env.add_template("t", "{{ path | basename }}").expect("add template"); /// let tmpl = env.get_template("t").expect("get template"); /// let rendered = tmpl @@ -36,11 +66,16 @@ type FileTest = (&'static str, fn(fs::FileType) -> bool); /// .expect("render"); /// assert_eq!(rendered, "bar.txt"); /// ``` -pub fn register(env: &mut Environment<'_>) { +pub fn register(env: &mut Environment<'_>) -> StdlibState { + let state = StdlibState::default(); register_file_tests(env); path::register_filters(env); collections::register_filters(env); + let impure = state.impure_flag(); + network::register_functions(env, Arc::clone(&impure)); + command::register(env, impure); time::register_functions(env); + state } fn register_file_tests(env: &mut Environment<'_>) { diff --git a/src/stdlib/network.rs b/src/stdlib/network.rs new file mode 100644 index 00000000..293c044d --- /dev/null +++ b/src/stdlib/network.rs @@ -0,0 +1,296 @@ +//! Network helpers exposed to `MiniJinja` templates. +//! +//! Currently this module provides the `fetch` function that retrieves remote +//! resources with optional on-disk caching. + +use std::{ + io::{self, Read}, + sync::{ + Arc, + atomic::{AtomicBool, Ordering}, + }, + time::Duration, +}; + +use camino::{Utf8Component, Utf8Path, Utf8PathBuf}; +use cap_std::{ambient_authority, fs_utf8::Dir}; +use minijinja::{ + Environment, Error, ErrorKind, + value::{Kwargs, Value}, +}; +use sha2::{Digest, Sha256}; + +const DEFAULT_CACHE_DIR: &str = ".netsuke/fetch"; + +pub(crate) fn register_functions(env: &mut Environment<'_>, impure: Arc) { + env.add_function("fetch", move |url: String, kwargs: Kwargs| { + fetch(&url, &kwargs, &impure) + }); +} + +fn fetch(url: &str, kwargs: &Kwargs, impure: &Arc) -> Result { + let use_cache = kwargs.get::>("cache")?.unwrap_or(false); + let cache_dir = kwargs.get::>("cache_dir")?; + kwargs.assert_all_used()?; + + let bytes = if use_cache { + let dir = open_cache_dir(cache_dir.as_deref().unwrap_or(DEFAULT_CACHE_DIR))?; + let key = cache_key(url); + if let Some(cached) = read_cached(&dir, &key)? { + impure.store(true, Ordering::Relaxed); + cached + } else { + let data = fetch_remote(url, impure)?; + write_cache(&dir, &key, &data, impure)?; + data + } + } else { + fetch_remote(url, impure)? + }; + + Ok(to_value(bytes)) +} + +fn fetch_remote(url: &str, impure: &Arc) -> Result, Error> { + impure.store(true, Ordering::Relaxed); + let agent = ureq::AgentBuilder::new() + .timeout_connect(Duration::from_secs(10)) + .timeout_read(Duration::from_secs(30)) + .timeout_write(Duration::from_secs(30)) + .timeout(Duration::from_secs(60)) + .build(); + let response = agent.get(url).call().map_err(|err| { + Error::new( + ErrorKind::InvalidOperation, + format!("fetch failed for '{url}': {err}"), + ) + })?; + let mut reader = response.into_reader(); + let mut bytes = Vec::new(); + reader.read_to_end(&mut bytes).map_err(|err| { + Error::new( + ErrorKind::InvalidOperation, + format!("failed to read response from '{url}': {err}"), + ) + })?; + Ok(bytes) +} + +fn open_cache_dir(path: &str) -> Result { + let utf_path = Utf8Path::new(path); + if utf_path.as_str().is_empty() { + return Err(Error::new( + ErrorKind::InvalidOperation, + "cache_dir must not be empty", + )); + } + + if utf_path.is_absolute() { + let mut root = String::new(); + for component in utf_path.components() { + match component { + Utf8Component::Prefix(prefix) => root.push_str(prefix.as_str()), + Utf8Component::RootDir => { + root.push('/'); + break; + } + _ => break, + } + } + if root.is_empty() { + root.push('/'); + } + let root_path = Utf8PathBuf::from(root); + + let root_dir = Dir::open_ambient_dir(&root_path, ambient_authority()) + .map_err(|err| io_error("open root cache dir", &root_path, &err))?; + let rel = utf_path.strip_prefix(&root_path).map_err(|_| { + Error::new( + ErrorKind::InvalidOperation, + format!("failed to compute relative path for '{utf_path}'"), + ) + })?; + if rel.as_str().is_empty() { + return Ok(root_dir); + } + + root_dir + .create_dir_all(rel) + .map_err(|err| io_error("create cache dir", utf_path, &err))?; + return root_dir + .open_dir(rel) + .map_err(|err| io_error("open cache dir", utf_path, &err)); + } + + let cwd = Dir::open_ambient_dir(".", ambient_authority()) + .map_err(|err| io_error("open working dir", utf_path, &err))?; + cwd.create_dir_all(utf_path) + .map_err(|err| io_error("create cache dir", utf_path, &err))?; + cwd.open_dir(utf_path) + .map_err(|err| io_error("open cache dir", utf_path, &err)) +} + +fn read_cached(dir: &Dir, name: &str) -> Result>, Error> { + match dir.open(name) { + Ok(mut file) => { + let mut buf = Vec::new(); + file.read_to_end(&mut buf).map_err(|err| { + Error::new( + ErrorKind::InvalidOperation, + format!("failed to read cache entry '{name}': {err}"), + ) + })?; + Ok(Some(buf)) + } + Err(err) if err.kind() == io::ErrorKind::NotFound => Ok(None), + Err(err) => Err(Error::new( + ErrorKind::InvalidOperation, + format!("failed to open cache entry '{name}': {err}"), + )), + } +} + +fn write_cache(dir: &Dir, name: &str, data: &[u8], impure: &Arc) -> Result<(), Error> { + impure.store(true, Ordering::Relaxed); + dir.write(name, data).map_err(|err| { + Error::new( + ErrorKind::InvalidOperation, + format!("failed to write cache entry '{name}': {err}"), + ) + }) +} + +fn cache_key(url: &str) -> String { + let digest = Sha256::digest(url.as_bytes()); + hex_string(&digest) +} + +fn hex_string(bytes: &[u8]) -> String { + let mut out = String::with_capacity(bytes.len() * 2); + for byte in bytes { + use std::fmt::Write; + let _ = write!(out, "{byte:02x}"); + } + out +} + +fn to_value(bytes: Vec) -> Value { + match String::from_utf8(bytes) { + Ok(text) => Value::from(text), + Err(err) => Value::from_bytes(err.into_bytes()), + } +} + +fn io_error(action: &str, path: &Utf8Path, err: &io::Error) -> Error { + Error::new( + ErrorKind::InvalidOperation, + format!("{action} for '{path}' failed: {err}"), + ) +} + +#[cfg(test)] +mod tests { + use super::*; + + use std::{ + fs, + path::{Path, PathBuf}, + sync::{Mutex, MutexGuard, OnceLock}, + }; + + use tempfile::tempdir; + + fn cwd_lock() -> &'static Mutex<()> { + static LOCK: OnceLock> = OnceLock::new(); + LOCK.get_or_init(|| Mutex::new(())) + } + + struct DirGuard { + original: PathBuf, + _lock: MutexGuard<'static, ()>, + } + + impl DirGuard { + fn change_to(path: &Path) -> Self { + let lock = cwd_lock().lock().expect("cwd lock"); + let original = std::env::current_dir().expect("current dir"); + std::env::set_current_dir(path).expect("set current dir"); + Self { + original, + _lock: lock, + } + } + } + + impl Drop for DirGuard { + fn drop(&mut self) { + std::env::set_current_dir(&self.original).expect("restore current dir"); + } + } + + #[test] + fn cache_key_stable() { + assert_eq!( + cache_key("http://example.com"), + cache_key("http://example.com") + ); + } + + #[test] + fn hex_string_formats_bytes() { + assert_eq!(hex_string(&[0x0f, 0xa0, 0x3c]), "0fa03c"); + } + + #[test] + fn to_value_preserves_utf8() { + let value = to_value(b"payload".to_vec()); + assert_eq!(value.as_str(), Some("payload")); + } + + #[test] + fn to_value_returns_bytes_for_invalid_utf8() { + let value = to_value(vec![0xff, 0xfe, 0xfd]); + assert_eq!(value.as_bytes(), Some(&[0xff, 0xfe, 0xfd][..])); + } + + #[test] + fn open_cache_dir_rejects_empty_path() { + let err = open_cache_dir("").expect_err("empty path should fail"); + assert_eq!(err.kind(), ErrorKind::InvalidOperation); + } + + #[test] + fn open_cache_dir_errors_for_file_path() { + let temp = tempdir().expect("tempdir"); + let file_path = temp.path().join("file"); + fs::write(&file_path, b"data").expect("write file"); + let utf_path = Utf8PathBuf::from_path_buf(file_path).expect("utf8 path"); + let err = open_cache_dir(utf_path.as_str()).expect_err("file path should fail"); + assert_eq!(err.kind(), ErrorKind::InvalidOperation); + } + + #[test] + fn open_cache_dir_creates_relative_directory() { + let temp = tempdir().expect("tempdir"); + let _guard = DirGuard::change_to(temp.path()); + let dir = open_cache_dir("cache").expect("open relative cache dir"); + dir.write("entry", b"data").expect("write cache entry"); + drop(dir); + let entry = temp.path().join("cache").join("entry"); + assert!(fs::metadata(entry).is_ok(), "cache entry should exist"); + } + + #[test] + fn open_cache_dir_creates_absolute_directory() { + let temp = tempdir().expect("tempdir"); + let absolute = + Utf8PathBuf::from_path_buf(temp.path().join("cache")).expect("utf8 cache path"); + let dir = open_cache_dir(absolute.as_str()).expect("open absolute cache dir"); + dir.write("entry", b"data").expect("write cache entry"); + let entry = absolute.join("entry"); + assert!( + fs::metadata(entry.as_std_path()).is_ok(), + "cache entry should exist" + ); + } +} diff --git a/test_support/src/command_helper.rs b/test_support/src/command_helper.rs new file mode 100644 index 00000000..f1e6e14b --- /dev/null +++ b/test_support/src/command_helper.rs @@ -0,0 +1,114 @@ +//! Test-support helpers for compiling tiny Rust programmes used by command +//! filter tests. + +use std::{ffi::OsString, process::Command}; + +use camino::Utf8PathBuf; +use cap_std::fs_utf8::Dir; + +const UPPERCASE_SOURCE: &str = concat!( + "use std::io::{self, Read};\n", + "fn main() {\n", + " let mut input = String::new();\n", + " io::stdin().read_to_string(&mut input).expect(\"stdin\");\n", + " print!(\"{}\", input.to_uppercase());\n", + "}\n", +); + +const FAILURE_SOURCE: &str = concat!( + "use std::io::{self, Read};\n", + "fn main() {\n", + " let mut input = String::new();\n", + " let _ = io::stdin().read_to_string(&mut input);\n", + " std::process::exit(1);\n", + "}\n", +); + +/// Compile a helper binary that converts stdin to upper case and return the +/// executable path. +/// +/// # Examples +/// +/// ```rust,no_run +/// use camino::Utf8PathBuf; +/// use cap_std::{ambient_authority, fs_utf8::Dir}; +/// use tempfile::tempdir; +/// use test_support::command_helper::compile_uppercase_helper; +/// +/// let temp = tempdir().expect("tempdir"); +/// let root = Utf8PathBuf::from_path_buf(temp.path().to_path_buf()) +/// .expect("utf8 path"); +/// let dir = Dir::open_ambient_dir(&root, ambient_authority()) +/// .expect("open temp dir"); +/// let exe = compile_uppercase_helper(&dir, &root, "cmd_upper"); +/// assert!(exe.as_std_path().exists()); +/// ``` +pub fn compile_uppercase_helper(dir: &Dir, root: &Utf8PathBuf, name: &str) -> Utf8PathBuf { + compile_rust_helper(dir, root, name, UPPERCASE_SOURCE) +} + +/// Compile a helper binary that exits with status code `1` after consuming +/// stdin. +/// +/// # Examples +/// +/// ```rust,no_run +/// # use camino::Utf8PathBuf; +/// # use cap_std::{ambient_authority, fs_utf8::Dir}; +/// # use tempfile::tempdir; +/// # use test_support::command_helper::compile_failure_helper; +/// let temp = tempdir().expect("tempdir"); +/// let root = Utf8PathBuf::from_path_buf(temp.path().to_path_buf()) +/// .expect("utf8 path"); +/// let dir = Dir::open_ambient_dir(&root, ambient_authority()) +/// .expect("open temp dir"); +/// let exe = compile_failure_helper(&dir, &root, "cmd_fail"); +/// assert!(exe.as_std_path().exists()); +/// ``` +pub fn compile_failure_helper(dir: &Dir, root: &Utf8PathBuf, name: &str) -> Utf8PathBuf { + compile_rust_helper(dir, root, name, FAILURE_SOURCE) +} + +/// Compile an arbitrary Rust helper source to an executable. +/// +/// Writes `{name}.rs` into `dir`, invokes the toolchain, and returns the +/// executable path, which remains valid whilst `dir`'s backing directory +/// exists. +/// +/// # Examples +/// +/// ```rust,no_run +/// # use camino::Utf8PathBuf; +/// # use cap_std::{ambient_authority, fs_utf8::Dir}; +/// # use tempfile::tempdir; +/// # use test_support::command_helper::compile_rust_helper; +/// let temp = tempdir().expect("tempdir"); +/// let root = Utf8PathBuf::from_path_buf(temp.path().to_path_buf()) +/// .expect("utf8 path"); +/// let dir = Dir::open_ambient_dir(&root, ambient_authority()) +/// .expect("open temp dir"); +/// let exe = compile_rust_helper( +/// &dir, +/// &root, +/// "cmd", +/// "fn main() {}\n", +/// ); +/// assert!(exe.as_std_path().exists()); +/// ``` +pub fn compile_rust_helper(dir: &Dir, root: &Utf8PathBuf, name: &str, source: &str) -> Utf8PathBuf { + dir.write(&format!("{name}.rs"), source.as_bytes()) + .expect("write helper source"); + + let src_path = root.join(format!("{name}.rs")); + let exe_path = root.join(format!("{name}{}", std::env::consts::EXE_SUFFIX)); + let rustc = std::env::var_os("RUSTC").unwrap_or_else(|| OsString::from("rustc")); + let status = Command::new(&rustc) + .arg(src_path.as_std_path()) + .arg("-o") + .arg(exe_path.as_std_path()) + .status() + .expect("compile helper"); + + assert!(status.success(), "failed to compile helper: {status:?}"); + exe_path +} diff --git a/test_support/src/lib.rs b/test_support/src/lib.rs index 1a9ee2fe..f83c0111 100644 --- a/test_support/src/lib.rs +++ b/test_support/src/lib.rs @@ -11,6 +11,7 @@ //! Platform notes: fake executables are implemented for Unix and Windows. pub mod check_ninja; +pub mod command_helper; pub mod env; pub mod env_guard; pub mod env_lock; diff --git a/tests/cucumber.rs b/tests/cucumber.rs index 8e98985a..49238700 100644 --- a/tests/cucumber.rs +++ b/tests/cucumber.rs @@ -4,7 +4,7 @@ use camino::Utf8PathBuf; use cucumber::World; #[cfg(unix)] use std::os::unix::fs::FileTypeExt; -use std::{collections::HashMap, ffi::OsString}; +use std::{collections::HashMap, ffi::OsString, net::TcpStream, thread::JoinHandle}; use test_support::{PathGuard, env::restore_many}; /// Shared state for Cucumber scenarios. @@ -36,12 +36,58 @@ pub struct CliWorld { pub stdlib_output: Option, /// Error from the last stdlib render. pub stdlib_error: Option, + /// Stdlib impurity state captured for the last render. + pub stdlib_state: Option, + /// Quoted command string for stdlib shell scenarios. + pub stdlib_command: Option, + /// Last HTTP server fixture started by stdlib steps. + pub http_server: Option>, + /// URL exposed by the active HTTP server fixture. + pub stdlib_url: Option, /// Snapshot of pre-scenario values for environment variables that were overridden. /// Stores the original value (`Some`) or `None` if the variable was previously unset. pub env_vars: HashMap>, } mod steps; +use steps::stdlib_steps::spawn_http_server; + +impl CliWorld { + pub(crate) fn start_http_server(&mut self, body: String) { + if let Some(host) = self.extract_host_from_stdlib_url() { + let _ = TcpStream::connect(host); + } + self.shutdown_http_server(); + let (url, handle) = spawn_http_server(body); + self.stdlib_url = Some(url); + self.http_server = Some(handle); + } + + pub(crate) fn shutdown_http_server(&mut self) { + let Some(handle) = self.http_server.take() else { + return; + }; + let _ = handle.join(); + self.stdlib_url = None; + } + + /// Returns the host component of the active stdlib HTTP fixture URL. + /// + /// The caller uses the host to unblock the listener during teardown. + fn extract_host_from_stdlib_url(&self) -> Option<&str> { + self.stdlib_url + .as_deref() + .and_then(steps::stdlib_steps::server_host) + } + + fn restore_environment(&mut self) { + if self.env_vars.is_empty() { + return; + } + + restore_many(self.env_vars.drain().collect()); + } +} #[cfg(unix)] fn block_device_exists() -> bool { @@ -58,9 +104,11 @@ fn block_device_exists() -> bool { impl Drop for CliWorld { fn drop(&mut self) { - if !self.env_vars.is_empty() { - restore_many(self.env_vars.drain().collect()); + if let Some(host) = self.extract_host_from_stdlib_url() { + let _ = TcpStream::connect(host); } + self.shutdown_http_server(); + self.restore_environment(); } } diff --git a/tests/features/stdlib.feature b/tests/features/stdlib.feature index e328983c..83ce246b 100644 --- a/tests/features/stdlib.feature +++ b/tests/features/stdlib.feature @@ -72,3 +72,31 @@ Feature: Template stdlib filters When I render "{{ ([{'name': 'one'}] | group_by('kind')) }}" with stdlib path "file" Then the stdlib error contains "could not resolve" + Scenario: shell filter transforms text and marks templates impure + Given an uppercase stdlib command helper + When I render the stdlib template "{{ 'hello' | shell(cmd) | trim }}" using the stdlib command helper + Then the stdlib output is "HELLO" + And the stdlib template is impure + + Scenario: shell filter reports command failures + Given a failing stdlib command helper + When I render the stdlib template "{{ 'data' | shell(cmd) }}" using the stdlib command helper + Then the stdlib error contains "exited" + And the stdlib template is impure + + Scenario: grep filter extracts matching lines + When I render the stdlib template "{{ 'alpha\nbeta\n' | grep('beta') | trim }}" + Then the stdlib output is "beta" + And the stdlib template is impure + + Scenario: fetch retrieves remote content and marks templates impure + Given an HTTP server returning "payload" + When I render "{{ fetch(url) }}" with stdlib url + Then the stdlib output is "payload" + And the stdlib template is impure + + Scenario: fetch reports network errors + When I render the stdlib template "{{ fetch('http://127.0.0.1:9') }}" + Then the stdlib error contains "fetch failed" + And the stdlib template is impure + diff --git a/tests/std_filter_tests.rs b/tests/std_filter_tests.rs index 3d210add..111c89bf 100644 --- a/tests/std_filter_tests.rs +++ b/tests/std_filter_tests.rs @@ -1,9 +1,13 @@ #[path = "std_filter_tests/collection_filters.rs"] mod collection_filters; +#[path = "std_filter_tests/command_filters.rs"] +mod command_filters; #[path = "std_filter_tests/hash_filters.rs"] mod hash_filters; #[path = "std_filter_tests/io_filters.rs"] mod io_filters; +#[path = "std_filter_tests/network_functions.rs"] +mod network_functions; #[path = "std_filter_tests/path_filters.rs"] mod path_filters; #[path = "std_filter_tests/support.rs"] diff --git a/tests/std_filter_tests/command_filters.rs b/tests/std_filter_tests/command_filters.rs new file mode 100644 index 00000000..a5ca7345 --- /dev/null +++ b/tests/std_filter_tests/command_filters.rs @@ -0,0 +1,300 @@ +use camino::Utf8PathBuf; +use cap_std::{ambient_authority, fs_utf8::Dir}; +use minijinja::{Environment, ErrorKind, context, value::Value}; +use rstest::rstest; +use tempfile::tempdir; +use test_support::command_helper::{compile_failure_helper, compile_uppercase_helper}; + +#[cfg(windows)] +use test_support::command_helper::compile_rust_helper; + +use super::support::stdlib_env_with_state; + +struct CommandFixture { + _temp: tempfile::TempDir, + env: Environment<'static>, + state: netsuke::stdlib::StdlibState, + command: String, +} + +impl CommandFixture { + fn new(compiler: impl Fn(&Dir, &Utf8PathBuf, &str) -> Utf8PathBuf, binary: &str) -> Self { + let temp = tempdir().expect("tempdir"); + let root = Utf8PathBuf::from_path_buf(temp.path().to_path_buf()).expect("utf8"); + let dir = Dir::open_ambient_dir(&root, ambient_authority()).expect("dir"); + let helper = compiler(&dir, &root, binary); + let command = format!("\"{}\"", helper.as_str()); + let (env, state) = stdlib_env_with_state(); + state.reset_impure(); + Self { + _temp: temp, + env, + state, + command, + } + } + + fn env(&mut self) -> &mut Environment<'static> { + &mut self.env + } + + fn command(&self) -> &str { + &self.command + } + + fn state(&self) -> &netsuke::stdlib::StdlibState { + &self.state + } +} + +type CommandCompiler = fn(&Dir, &Utf8PathBuf, &str) -> Utf8PathBuf; + +enum ShellExpectation { + Success(&'static str), + Failure { substrings: &'static [&'static str] }, +} + +#[cfg(windows)] +use { + super::support::{EnvLock, EnvVarGuard}, + rstest::fixture, + std::ffi::OsString, +}; + +#[rstest] +#[case::uppercase( + compile_uppercase_helper, + "cmd_upper", + "shell_upper", + "{{ 'hello' | shell(cmd) | trim }}", + ShellExpectation::Success("HELLO") +)] +#[case::failure( + compile_failure_helper, + "cmd_fail", + "shell_fail", + "{{ 'data' | shell(cmd) }}", + ShellExpectation::Failure { + substrings: &["command", "exited"], + }, +)] +fn shell_filter_behaviour( + #[case] compiler: CommandCompiler, + #[case] binary: &'static str, + #[case] template_name: &'static str, + #[case] template_src: &'static str, + #[case] expectation: ShellExpectation, +) { + let mut fixture = CommandFixture::new(compiler, binary); + { + let env = fixture.env(); + env.add_template(template_name, template_src) + .expect("template"); + } + let command = fixture.command().to_owned(); + let template = { + let env = fixture.env(); + env.get_template(template_name).expect("get template") + }; + + match expectation { + ShellExpectation::Success(expected) => { + let rendered = template + .render(context!(cmd => command.clone())) + .expect("render shell"); + assert_eq!(rendered, expected); + } + ShellExpectation::Failure { substrings } => { + let err = template + .render(context!(cmd => command.clone())) + .expect_err("shell should propagate failures"); + assert_eq!(err.kind(), ErrorKind::InvalidOperation); + let message = err.to_string(); + for needle in substrings { + assert!( + message.contains(needle), + "error should mention {needle}: {message}", + ); + } + } + } + + assert!( + fixture.state().is_impure(), + "shell filter should mark template impure", + ); +} + +#[cfg(unix)] +#[rstest] +fn shell_filter_times_out_long_commands() { + let (mut env, state) = stdlib_env_with_state(); + state.reset_impure(); + env.add_template("shell_timeout", "{{ '' | shell('sleep 10') }}") + .expect("template"); + let template = env.get_template("shell_timeout").expect("get template"); + let err = template + .render(context! {}) + .expect_err("sleep should exceed shell timeout"); + assert_eq!(err.kind(), ErrorKind::InvalidOperation); + assert!(state.is_impure(), "timeout should mark template impure"); + assert!( + err.to_string().contains("timed out"), + "timeout error should mention duration: {err}", + ); +} + +#[rstest] +fn grep_filter_filters_lines() { + let (mut env, state) = stdlib_env_with_state(); + state.reset_impure(); + env.add_template("grep", "{{ 'alpha\\nbeta\\n' | grep('beta') | trim }}") + .expect("template"); + let template = env.get_template("grep").expect("get template"); + let rendered = template.render(context! {}).expect("render"); + assert_eq!(rendered, "beta"); + assert!(state.is_impure(), "grep should mark template impure"); +} + +#[rstest] +fn grep_filter_rejects_invalid_flags() { + let (mut env, _state) = stdlib_env_with_state(); + env.add_template("grep_invalid", "{{ 'alpha' | grep('a', [1, 2, 3]) }}") + .expect("template"); + let template = env.get_template("grep_invalid").expect("get template"); + let err = template + .render(context! {}) + .expect_err("non-string flags should be rejected"); + assert_eq!(err.kind(), ErrorKind::InvalidOperation); + assert!( + err.to_string().contains("grep flags must be strings"), + "error should explain invalid flags: {err}", + ); +} + +fn empty_context() -> Value { + context! {} +} + +fn shell_context() -> Value { + context!(cmd => "echo ignored") +} + +#[rstest] +#[case( + "shell_undefined", + "{{ missing | shell(cmd) }}", + shell_context as fn() -> Value, + "shell filter should mark template impure", +)] +#[case( + "grep_undefined", + "{{ missing | grep('pattern') }}", + empty_context as fn() -> Value, + "grep filter should mark template impure", +)] +fn filters_reject_undefined_input( + #[case] name: &str, + #[case] template_src: &str, + #[case] context_fn: fn() -> Value, + #[case] impure_message: &str, +) { + let (mut env, state) = stdlib_env_with_state(); + state.reset_impure(); + env.add_template(name, template_src).expect("template"); + let template = env.get_template(name).expect("get template"); + let err = template + .render(context_fn()) + .expect_err("filter should reject undefined input"); + assert_eq!(err.kind(), ErrorKind::InvalidOperation); + assert!( + err.to_string().contains("input value is undefined"), + "error should mention undefined input: {err}", + ); + assert!(state.is_impure(), "{impure_message}"); +} + +#[cfg(windows)] +#[fixture] +fn env_lock() -> EnvLock { + EnvLock::acquire() +} + +#[cfg(windows)] +const GREP_STUB: &str = concat!( + "use std::io::{self, Read};\n", + "fn main() {\n", + " let mut args: Vec = std::env::args().skip(1).collect();\n", + " let pattern = args.pop().expect(\"pattern\");\n", + " let mut input = String::new();\n", + " io::stdin().read_to_string(&mut input).expect(\"stdin\");\n", + " if pattern == \"^line2\" && input.contains(\"line2\") {\n", + " print!(\"line2\\n\");\n", + " } else {\n", + " eprintln!(\"pattern:{pattern} input:{input}\", pattern = pattern, input = input);\n", + " std::process::exit(1);\n", + " }\n", + "}\n", +); + +#[cfg(windows)] +const ARGS_STUB: &str = concat!( + "fn main() {\n", + " let mut args = std::env::args().skip(1);\n", + " if let Some(arg) = args.next() {\n", + " print!(\"{arg}\", arg = arg);\n", + " }\n", + "}\n", +); + +#[cfg(windows)] +#[rstest] +fn grep_on_windows_bypasses_shell(env_lock: EnvLock) { + let _lock = env_lock; + let temp = tempdir().expect("tempdir"); + let root = Utf8PathBuf::from_path_buf(temp.path().to_path_buf()).expect("utf8 temp"); + let dir = Dir::open_ambient_dir(&root, ambient_authority()).expect("open temp dir"); + compile_rust_helper(&dir, &root, "grep", GREP_STUB); + + let mut path_value = OsString::from(root.as_str()); + path_value.push(";"); + path_value.push(std::env::var_os("PATH").unwrap_or_default()); + let _path = EnvVarGuard::set("PATH", &path_value); + + let (mut env, state) = stdlib_env_with_state(); + state.reset_impure(); + env.add_template( + "grep_win", + r#"{{ 'line1\nline2\n' | grep('^line2') | trim }}"#, + ) + .expect("template"); + let template = env.get_template("grep_win").expect("get template"); + let rendered = template.render(context! {}).expect("render"); + assert_eq!(rendered, "line2"); + assert!(state.is_impure(), "grep should mark template impure"); +} + +#[cfg(windows)] +#[rstest] +fn shell_preserves_cmd_meta_characters(env_lock: EnvLock) { + let _lock = env_lock; + let temp = tempdir().expect("tempdir"); + let root = Utf8PathBuf::from_path_buf(temp.path().to_path_buf()).expect("utf8 temp"); + let dir = Dir::open_ambient_dir(&root, ambient_authority()).expect("open temp dir"); + let exe = compile_rust_helper(&dir, &root, "echo_args", ARGS_STUB); + + let command = format!("\"{}\" \"literal %%^!\"", exe); + let (mut env, state) = stdlib_env_with_state(); + state.reset_impure(); + env.add_template("shell_meta", "{{ '' | shell(cmd) }}") + .expect("template"); + let template = env.get_template("shell_meta").expect("get template"); + let rendered = template + .render(context!(cmd => command)) + .expect("render shell"); + assert_eq!(rendered.trim(), "literal %^!"); + assert!( + state.is_impure(), + "shell filter should mark template impure" + ); +} diff --git a/tests/std_filter_tests/hash_filters.rs b/tests/std_filter_tests/hash_filters.rs index bd293411..b5499f5b 100644 --- a/tests/std_filter_tests/hash_filters.rs +++ b/tests/std_filter_tests/hash_filters.rs @@ -1,9 +1,8 @@ use cap_std::{ambient_authority, fs_utf8::Dir}; -use minijinja::{Environment, ErrorKind, context}; -use netsuke::stdlib; +use minijinja::{ErrorKind, context}; use rstest::rstest; -use super::support::{Workspace, filter_workspace, register_template}; +use super::support::{Workspace, filter_workspace, register_template, stdlib_env}; #[rstest] #[case( @@ -49,8 +48,7 @@ fn hash_and_digest_filters( #[case] expected_digest: &str, ) { let (_temp, root) = filter_workspace; - let mut env = Environment::new(); - stdlib::register(&mut env); + let mut env = stdlib_env(); let dir = Dir::open_ambient_dir(&root, ambient_authority()).expect("dir"); let (file, algorithm) = alg.strip_suffix("-empty").map_or_else( @@ -88,8 +86,7 @@ fn hash_and_digest_filters( #[rstest] fn hash_filter_legacy_algorithms_disabled(filter_workspace: Workspace) { let (_temp, root) = filter_workspace; - let mut env = Environment::new(); - stdlib::register(&mut env); + let mut env = stdlib_env(); register_template(&mut env, "hash_sha1", "{{ path | hash('sha1') }}"); let template = env.get_template("hash_sha1").expect("get template"); @@ -105,8 +102,7 @@ fn hash_filter_legacy_algorithms_disabled(filter_workspace: Workspace) { #[rstest] fn hash_filter_rejects_unknown_algorithm(filter_workspace: Workspace) { let (_temp, root) = filter_workspace; - let mut env = Environment::new(); - stdlib::register(&mut env); + let mut env = stdlib_env(); let file = root.join("file"); register_template(&mut env, "hash_unknown", "{{ path | hash('whirlpool') }}"); diff --git a/tests/std_filter_tests/io_filters.rs b/tests/std_filter_tests/io_filters.rs index 3f7e72c6..817d16a6 100644 --- a/tests/std_filter_tests/io_filters.rs +++ b/tests/std_filter_tests/io_filters.rs @@ -1,15 +1,13 @@ use cap_std::{ambient_authority, fs_utf8::Dir}; -use minijinja::{Environment, ErrorKind, context}; -use netsuke::stdlib; +use minijinja::{ErrorKind, context}; use rstest::rstest; -use super::support::{Workspace, filter_workspace, render}; +use super::support::{Workspace, filter_workspace, render, stdlib_env}; #[rstest] fn contents_and_linecount_filters(filter_workspace: Workspace) { let (_temp, root) = filter_workspace; - let mut env = Environment::new(); - stdlib::register(&mut env); + let mut env = stdlib_env(); let file = root.join("file"); let text = render(&mut env, "contents", "{{ path | contents }}", &file); assert_eq!(text, "data"); @@ -38,8 +36,7 @@ fn contents_and_linecount_filters(filter_workspace: Workspace) { #[rstest] fn contents_filter_unsupported_encoding(filter_workspace: Workspace) { let (_temp, root) = filter_workspace; - let mut env = Environment::new(); - stdlib::register(&mut env); + let mut env = stdlib_env(); env.add_template("contents_bad_encoding", "{{ path | contents('latin-1') }}") .expect("template"); let template = env @@ -58,8 +55,7 @@ fn contents_filter_unsupported_encoding(filter_workspace: Workspace) { #[rstest] fn size_filter(filter_workspace: Workspace) { let (_temp, root) = filter_workspace; - let mut env = Environment::new(); - stdlib::register(&mut env); + let mut env = stdlib_env(); let file = root.join("file"); let size = render(&mut env, "size", "{{ path | size }}", &file); assert_eq!(size.parse::().expect("u64"), 4); @@ -68,8 +64,7 @@ fn size_filter(filter_workspace: Workspace) { #[rstest] fn size_filter_missing_file(filter_workspace: Workspace) { let (_temp, root) = filter_workspace; - let mut env = Environment::new(); - stdlib::register(&mut env); + let mut env = stdlib_env(); env.add_template("size_missing", "{{ path | size }}") .expect("template"); let template = env.get_template("size_missing").expect("get template"); diff --git a/tests/std_filter_tests/network_functions.rs b/tests/std_filter_tests/network_functions.rs new file mode 100644 index 00000000..a11a20ed --- /dev/null +++ b/tests/std_filter_tests/network_functions.rs @@ -0,0 +1,107 @@ +use std::{ + io::{Read, Write}, + net::TcpListener, + thread, +}; + +use minijinja::{ErrorKind, context}; +use rstest::rstest; +use tempfile::tempdir; + +use super::support::stdlib_env_with_state; + +fn start_server(body: &'static str) -> (String, thread::JoinHandle<()>) { + let listener = TcpListener::bind(("127.0.0.1", 0)).expect("bind listener"); + let addr = listener.local_addr().expect("local addr"); + let url = format!("http://{addr}"); + let handle = thread::spawn(move || { + if let Ok((mut stream, _)) = listener.accept() { + let mut buf = [0u8; 512]; + let bytes_read = stream.read(&mut buf).unwrap_or(0); + if bytes_read > 0 { + let response = format!( + "HTTP/1.1 200 OK\r\nContent-Length: {}\r\nConnection: close\r\n\r\n{}", + body.len(), + body + ); + let _ = stream.write_all(response.as_bytes()); + } + } + }); + (url, handle) +} + +#[rstest] +fn fetch_function_downloads_content() { + let (url, handle) = start_server("payload"); + let (mut env, state) = stdlib_env_with_state(); + state.reset_impure(); + env.add_template("fetch", "{{ fetch(url) }}") + .expect("template"); + let tmpl = env.get_template("fetch").expect("get template"); + let rendered = tmpl + .render(context!(url => url.clone())) + .expect("render fetch"); + assert_eq!(rendered, "payload"); + assert!( + state.is_impure(), + "network fetch should mark template impure" + ); + handle.join().expect("join server"); +} + +#[rstest] +fn fetch_function_respects_cache() { + let temp = tempdir().expect("tempdir"); + let cache_dir = temp.path().join("cache"); + let cache_str = cache_dir.to_str().expect("utf8 cache dir").to_owned(); + let (url, handle) = start_server("cached"); + let (mut env, state) = stdlib_env_with_state(); + state.reset_impure(); + env.add_template( + "fetch_cache", + "{{ fetch(url, cache=True, cache_dir=cache_dir) }}", + ) + .expect("template"); + let tmpl = env.get_template("fetch_cache").expect("get template"); + let rendered = tmpl + .render(context!(url => url.clone(), cache_dir => cache_str.clone())) + .expect("render fetch"); + assert_eq!(rendered, "cached"); + assert!( + state.is_impure(), + "network-backed cache fill should mark template impure" + ); + state.reset_impure(); + handle.join().expect("join server"); + + // Drop the listener and verify the cached response is returned. + let rendered_again = tmpl + .render(context!(url => url, cache_dir => cache_str)) + .expect("render cached fetch"); + assert_eq!(rendered_again, "cached"); + assert!( + state.is_impure(), + "cached responses should mark template impure", + ); +} + +#[rstest] +fn fetch_function_reports_errors() { + let (mut env, state) = stdlib_env_with_state(); + state.reset_impure(); + env.add_template("fetch_fail", "{{ fetch(url) }}") + .expect("template"); + let tmpl = env.get_template("fetch_fail").expect("get template"); + let result = tmpl.render(context!(url => "http://127.0.0.1:9")); + let err = result.expect_err("fetch should report connection errors"); + assert_eq!(err.kind(), ErrorKind::InvalidOperation); + assert!( + err.to_string().contains("fetch failed"), + "error should mention failure: {err}", + ); + assert!( + state.is_impure(), + "failed fetch should still mark template impure", + ); +} diff --git a/tests/std_filter_tests/support.rs b/tests/std_filter_tests/support.rs index d90d3723..82a7ca19 100644 --- a/tests/std_filter_tests/support.rs +++ b/tests/std_filter_tests/support.rs @@ -1,7 +1,7 @@ use camino::Utf8PathBuf; use cap_std::{ambient_authority, fs_utf8::Dir}; use minijinja::{Environment, context}; -use netsuke::stdlib; +use netsuke::stdlib::{self, StdlibState}; use rstest::fixture; use tempfile::tempdir; @@ -19,9 +19,14 @@ pub(crate) fn register_template( env.add_template_owned(name, source).expect("template"); } -pub(crate) fn stdlib_env() -> Environment<'static> { +pub(crate) fn stdlib_env_with_state() -> (Environment<'static>, StdlibState) { let mut env = Environment::new(); - stdlib::register(&mut env); + let state = stdlib::register(&mut env); + (env, state) +} + +pub(crate) fn stdlib_env() -> Environment<'static> { + let (env, _) = stdlib_env_with_state(); env } diff --git a/tests/steps/mod.rs b/tests/steps/mod.rs index 1a90b67d..f1279657 100644 --- a/tests/steps/mod.rs +++ b/tests/steps/mod.rs @@ -10,4 +10,4 @@ mod ir_steps; mod manifest_steps; mod ninja_steps; mod process_steps; -mod stdlib_steps; +pub(crate) mod stdlib_steps; diff --git a/tests/steps/stdlib_steps.rs b/tests/steps/stdlib_steps.rs index 2ab196cf..1346af55 100644 --- a/tests/steps/stdlib_steps.rs +++ b/tests/steps/stdlib_steps.rs @@ -1,7 +1,9 @@ -//! Cucumber step implementations for stdlib path and file filters. +//! Cucumber step implementations for stdlib path, file, network, and command +//! helpers. //! //! Sets up a temporary workspace, renders templates with stdlib registered, -//! and asserts outputs and errors. +//! and asserts outputs and errors. Provides HTTP fixtures for fetch scenarios +//! and compiles reusable command helpers for shell and grep coverage. use crate::CliWorld; use camino::{Utf8Path, Utf8PathBuf}; use cap_std::{ambient_authority, fs_utf8::Dir}; @@ -9,7 +11,15 @@ use cucumber::{given, then, when}; use minijinja::{Environment, context, value::Value}; use netsuke::stdlib; use std::ffi::OsStr; -use test_support::env::set_var; +use std::{ + io::{Read, Write}, + net::{TcpListener, TcpStream}, + thread, +}; +use test_support::{ + command_helper::{compile_failure_helper, compile_uppercase_helper}, + env::set_var, +}; use time::{Duration, OffsetDateTime, UtcOffset, format_description::well_known::Iso8601}; const LINES_FIXTURE: &str = concat!( @@ -89,6 +99,31 @@ impl From for RelativePath { } } +pub(crate) fn server_host(url: &str) -> Option<&str> { + extract_host_from_url(url) +} + +pub(crate) fn spawn_http_server(body: String) -> (String, thread::JoinHandle<()>) { + let listener = TcpListener::bind(("127.0.0.1", 0)).expect("bind http listener"); + let addr = listener.local_addr().expect("local addr"); + let url = format!("http://{addr}"); + let handle = thread::spawn(move || { + if let Ok((mut stream, _)) = listener.accept() { + let mut buf = [0u8; 512]; + let bytes_read = stream.read(&mut buf).unwrap_or(0); + if bytes_read > 0 { + let response = format!( + "HTTP/1.1 200 OK\r\nContent-Length: {}\r\nConnection: close\r\n\r\n{}", + body.len(), + body + ); + let _ = stream.write_all(response.as_bytes()); + } + } + }); + (url, handle) +} + fn ensure_workspace(world: &mut CliWorld) -> Utf8PathBuf { if let Some(root) = &world.stdlib_root { return root.clone(); @@ -111,7 +146,9 @@ fn ensure_workspace(world: &mut CliWorld) -> Utf8PathBuf { fn render_template_with_context(world: &mut CliWorld, template: &TemplateContent, ctx: Value) { let mut env = Environment::new(); - stdlib::register(&mut env); + let state = stdlib::register(&mut env); + state.reset_impure(); + world.stdlib_state = Some(state.clone()); let render = env.render_str(template.as_str(), ctx); match render { Ok(output) => { @@ -136,6 +173,48 @@ fn stdlib_workspace(world: &mut CliWorld) { world.stdlib_root = Some(root); } +#[given("an uppercase stdlib command helper")] +fn uppercase_stdlib_command_helper(world: &mut CliWorld) { + let root = ensure_workspace(world); + let handle = Dir::open_ambient_dir(&root, ambient_authority()).expect("open workspace"); + let helper = compile_uppercase_helper(&handle, &root, "cmd_upper"); + world.stdlib_command = Some(format!("\"{}\"", helper.as_str())); +} + +#[given("a failing stdlib command helper")] +fn failing_stdlib_command_helper(world: &mut CliWorld) { + let root = ensure_workspace(world); + let handle = Dir::open_ambient_dir(&root, ambient_authority()).expect("open workspace"); + let helper = compile_failure_helper(&handle, &root, "cmd_fail"); + world.stdlib_command = Some(format!("\"{}\"", helper.as_str())); +} + +/// Extracts the host portion from an HTTP or HTTPS URL. +/// +/// Returns None if the URL doesn't have a valid http/https prefix or host. +fn extract_host_from_url(url: &str) -> Option<&str> { + let addr = url + .strip_prefix("http://") + .or_else(|| url.strip_prefix("https://"))?; + addr.split('/').next() +} + +// The reviewer requested nested optional checks to keep the host parsing explicit +// for readability, so silence the lint that prefers collapsing them. +#[allow( + clippy::collapsible_if, + reason = "Review requested explicit nested host extraction checks" +)] +#[given(regex = r#"^an HTTP server returning "(.+)"$"#)] +fn http_server_returning(world: &mut CliWorld, body: String) { + if let Some(url) = world.stdlib_url.as_ref() { + if let Some(host) = extract_host_from_url(url) { + let _ = TcpStream::connect(host); + } + } + world.start_http_server(body); +} + #[given(regex = r#"^the stdlib file "(.+)" contains "(.+)"$"#)] fn write_stdlib_file(world: &mut CliWorld, path: String, contents: String) { let root = ensure_workspace(world); @@ -200,6 +279,26 @@ fn render_stdlib_template_without_path(world: &mut CliWorld, template: String) { render_template_with_context(world, &template_content, context! {}); } +#[when(regex = r#"^I render "(.+)" with stdlib url$"#)] +fn render_stdlib_template_with_url(world: &mut CliWorld, template: String) { + let url = world + .stdlib_url + .clone() + .expect("expected HTTP server to be initialised"); + let template_content = TemplateContent::from(template); + render_template_with_context(world, &template_content, context!(url => url)); +} + +#[when(regex = r#"^I render the stdlib template "(.+)" using the stdlib command helper$"#)] +fn render_stdlib_template_with_command(world: &mut CliWorld, template: String) { + let command = world + .stdlib_command + .clone() + .expect("expected stdlib command helper to be compiled"); + let template_content = TemplateContent::from(template); + render_template_with_context(world, &template_content, context!(cmd => command)); +} + #[expect( clippy::needless_pass_by_value, reason = "Cucumber requires owned capture arguments" @@ -282,6 +381,24 @@ fn assert_stdlib_error(world: &mut CliWorld, fragment: String) { ); } +#[then("the stdlib template is impure")] +fn assert_stdlib_impure(world: &mut CliWorld) { + let state = world + .stdlib_state + .as_ref() + .expect("stdlib state should be initialised"); + assert!(state.is_impure(), "expected template to be impure"); +} + +#[then("the stdlib template is pure")] +fn assert_stdlib_pure(world: &mut CliWorld) { + let state = world + .stdlib_state + .as_ref() + .expect("stdlib state should be initialised"); + assert!(!state.is_impure(), "expected template to remain pure"); +} + #[then("the stdlib output equals the workspace root")] fn assert_stdlib_output_is_root(world: &mut CliWorld) { let (root, output) = stdlib_root_and_output(world); diff --git a/tests_python/conftest.py b/tests_python/conftest.py new file mode 100644 index 00000000..6be39b4f --- /dev/null +++ b/tests_python/conftest.py @@ -0,0 +1,34 @@ +"""Shared fixtures for staging helper tests.""" + +from __future__ import annotations + +import importlib +import sys +from pathlib import Path + +import pytest + +REPO_ROOT = Path(__file__).resolve().parent.parent +MODULE_DIR = REPO_ROOT / ".github" / "actions" / "stage" / "scripts" + + +@pytest.fixture(scope="session") +def stage_common() -> object: + """Load the staging helper package once for reuse across tests.""" + + sys_path = str(MODULE_DIR) + sys.path.insert(0, sys_path) + try: + return importlib.import_module("stage_common") + finally: + sys.path.remove(sys_path) + + +@pytest.fixture +def workspace(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> Path: + """Create an isolated workspace and set ``GITHUB_WORKSPACE`` accordingly.""" + + root = tmp_path / "workspace" + root.mkdir() + monkeypatch.setenv("GITHUB_WORKSPACE", str(root)) + return root diff --git a/tests_python/stage_test_helpers.py b/tests_python/stage_test_helpers.py new file mode 100644 index 00000000..0a5b8edb --- /dev/null +++ b/tests_python/stage_test_helpers.py @@ -0,0 +1,52 @@ +"""Shared helpers for the staging test suites.""" + +from __future__ import annotations + +from pathlib import Path + +__all__ = ["decode_output_file", "write_workspace_inputs"] + + +def decode_output_file(path: Path) -> dict[str, str]: + """Parse GitHub output records written with ``write_github_output``.""" + + lines = path.read_text(encoding="utf-8").splitlines() + values: dict[str, str] = {} + index = 0 + while index < len(lines): + line = lines[index] + if "<<" in line: + key, delimiter = line.split("<<", 1) + index += 1 + buffer: list[str] = [] + while index < len(lines) and lines[index] != delimiter: + buffer.append(lines[index]) + index += 1 + values[key] = "\n".join(buffer) + index += 1 # Skip the delimiter terminator. + continue + if "=" in line: + key, value = line.split("=", 1) + decoded = ( + value.replace("%0A", "\n") + .replace("%0D", "\r") + .replace("%25", "%") + ) + values[key] = decoded + index += 1 + return values + + +def write_workspace_inputs(root: Path, target: str) -> None: + """Populate ``root`` with staged artefacts for the provided ``target``.""" + root = Path(root) + bin_path = root / "target" / target / "release" / "netsuke" + bin_path.parent.mkdir(parents=True, exist_ok=True) + bin_path.write_bytes(b"binary") + + man_path = root / "target" / "generated-man" / target / "release" / "netsuke.1" + man_path.parent.mkdir(parents=True, exist_ok=True) + man_path.write_text(".TH NETSUKE 1", encoding="utf-8") + + licence = root / "LICENSE" + licence.write_text("Copyright Netsuke", encoding="utf-8") diff --git a/tests_python/test_stage_action.py b/tests_python/test_stage_action.py new file mode 100644 index 00000000..4508530a --- /dev/null +++ b/tests_python/test_stage_action.py @@ -0,0 +1,42 @@ +"""Lightweight checks for the staging composite GitHub Action.""" + +from __future__ import annotations + +from pathlib import Path + +REPO_ROOT = Path(__file__).resolve().parent.parent +ACTION_FILE = REPO_ROOT / ".github" / "actions" / "stage" / "action.yml" + + +def test_action_declares_required_outputs() -> None: + """The composite action should expose the expected top-level outputs.""" + content = ACTION_FILE.read_text(encoding="utf-8") + + assert "steps.run-stage.outputs.binary_path" in content + assert "steps.run-stage.outputs.man_path" in content + assert "steps.run-stage.outputs.license_path" in content + + +def test_action_installs_uv() -> None: + """The composite action must ensure ``uv`` is available.""" + content = ACTION_FILE.read_text(encoding="utf-8") + + assert "uses: astral-sh/setup-uv@" in content + assert "python-version: '3.11'" in content + + +def test_action_invokes_cli_script() -> None: + """The action should run the staging script via ``uv run``.""" + content = ACTION_FILE.read_text(encoding="utf-8") + + assert "uv run" in content + assert "scripts/stage.py" in content + + +def test_action_declares_reserved_outputs(stage_common: object) -> None: + """Reserved outputs must remain aligned with the composite action.""" + + content = ACTION_FILE.read_text(encoding="utf-8") + for key in stage_common.RESERVED_OUTPUT_KEYS: + needle = f"steps.run-stage.outputs.{key}" + assert needle in content, f"Composite action missing output for {key}" diff --git a/tests_python/test_stage_cli.py b/tests_python/test_stage_cli.py new file mode 100644 index 00000000..5c94b6b7 --- /dev/null +++ b/tests_python/test_stage_cli.py @@ -0,0 +1,96 @@ +"""Behavioural tests for the staging CLI entry point.""" + +from __future__ import annotations + +import importlib +import json +import sys +import typing as typ +from pathlib import Path +from types import ModuleType + +import pytest +from stage_test_helpers import decode_output_file, write_workspace_inputs + +REPO_ROOT = Path(__file__).resolve().parent.parent +SCRIPTS_DIR = REPO_ROOT / ".github" / "actions" / "stage" / "scripts" + + +class _StubCycloptsApp: + """Minimal stand-in for :mod:`cyclopts` used during testing.""" + + def __init__(self, *args: object, **kwargs: object) -> None: # noqa: ARG002 - stub signature must match cyclopts.App + self._handler: typ.Callable[..., object] | None = None + + def default(self, func: typ.Callable[..., object]) -> typ.Callable[..., object]: + self._handler = func + return func + + def __call__(self, *args: object, **kwargs: object) -> None: # noqa: ARG002 - stub signature must match cyclopts.App + """Prevent the stub from being invoked directly.""" + message = "Stub CLI should not be invoked directly" + raise RuntimeError(message) # pragma: no cover - not exercised + + +@pytest.fixture +def workspace(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> Path: + """Provide an isolated workspace and set ``GITHUB_WORKSPACE``.""" + root = tmp_path / "workspace" + root.mkdir() + monkeypatch.setenv("GITHUB_WORKSPACE", str(root)) + return root + + +@pytest.fixture +def stage_cli(monkeypatch: pytest.MonkeyPatch) -> ModuleType: + """Import the CLI module with a stubbed :mod:`cyclopts`.""" + sys.path.insert(0, str(SCRIPTS_DIR)) + monkeypatch.setitem(sys.modules, "cyclopts", ModuleType("cyclopts")) + cyclopts_module = sys.modules["cyclopts"] + cyclopts_module.App = _StubCycloptsApp # type: ignore[attr-defined] + yield importlib.import_module("stage") + sys.path.remove(str(SCRIPTS_DIR)) + sys.modules.pop("stage", None) + sys.modules.pop("cyclopts", None) + + +def test_stage_cli_stages_and_reports( + stage_cli: ModuleType, workspace: Path, monkeypatch: pytest.MonkeyPatch +) -> None: + """The CLI should stage artefacts and emit GitHub outputs.""" + config_src = REPO_ROOT / ".github" / "release-staging.toml" + config_copy = workspace / "release-staging.toml" + config_copy.write_text(config_src.read_text(encoding="utf-8"), encoding="utf-8") + + target = "linux-x86_64" + write_workspace_inputs(workspace, "x86_64-unknown-linux-gnu") + + github_output = workspace / "outputs.txt" + monkeypatch.setenv("GITHUB_OUTPUT", str(github_output)) + + stage_cli.main(config_copy, target) + + outputs = decode_output_file(github_output) + artefact_map = json.loads(outputs["artefact_map"]) + assert artefact_map["binary_path"].endswith( + "netsuke" + ), "artefact map should expose the staged binary" + assert outputs["binary_path"].endswith( + "netsuke" + ), "CLI output should provide the staged binary path" + + +def test_stage_cli_requires_github_output( + stage_cli: ModuleType, workspace: Path, monkeypatch: pytest.MonkeyPatch +) -> None: + """The CLI should exit with an error when ``GITHUB_OUTPUT`` is missing.""" + config_src = REPO_ROOT / ".github" / "release-staging.toml" + config_copy = workspace / "release-staging.toml" + config_copy.write_text(config_src.read_text(encoding="utf-8"), encoding="utf-8") + + monkeypatch.delenv("GITHUB_OUTPUT", raising=False) + + with pytest.raises(SystemExit) as exc: + stage_cli.main(config_copy, "linux-x86_64") + + assert exc.value.code == 1, "CLI should exit with status 1 when outputs are missing" diff --git a/tests_python/test_stage_common.py b/tests_python/test_stage_common.py deleted file mode 100644 index 403f793b..00000000 --- a/tests_python/test_stage_common.py +++ /dev/null @@ -1,276 +0,0 @@ -"""Tests for the shared staging helpers used by release workflows. - -Summary -------- -Validate that ``stage_common.stage_artifacts`` bundles the licence file -alongside binaries and manual pages so GitHub Actions outputs include the -expected ``license_path`` entry. - -Purpose -------- -Regression tests for the Windows release job that previously failed when -``license_path`` was missing from the recorded outputs. Ensures staging -fails fast if the licence is absent and records the correct path when -present. - -Usage ------ -Run the tests directly:: - - python -m pytest tests_python/test_stage_common.py - -Examples --------- -Create a dummy workspace with a binary, manual page, and licence:: - - from pathlib import Path - from stage_common import StagingConfig, stage_artifacts - - workspace = Path("/tmp/workspace") - workspace.mkdir(parents=True, exist_ok=True) - (workspace / "LICENSE").write_text("Example", encoding="utf-8") - binary = workspace / "target" / "x" / "release" / "netsuke" - binary.parent.mkdir(parents=True, exist_ok=True) - binary.write_bytes(b"binary") - man = workspace / "target" / "generated-man" / "x" / "release" / "netsuke.1" - man.parent.mkdir(parents=True, exist_ok=True) - man.write_text(".TH", encoding="utf-8") - - config = StagingConfig( - bin_name="netsuke", - target="x", - platform="linux", - arch="amd64", - workspace=workspace, - ) - stage_artifacts(config, workspace / "out.txt") -""" - -from __future__ import annotations - -import importlib.util -import sys -import typing as typ -from pathlib import Path - -import pytest - -REPO_ROOT = Path(__file__).resolve().parent.parent -MODULE_PATH = REPO_ROOT / ".github" / "workflows" / "scripts" / "stage_common.py" - - -if typ.TYPE_CHECKING: - from types import ModuleType -else: - ModuleType = type(sys) - - -@pytest.fixture(scope="session") -def stage_common() -> ModuleType: - """Load the ``stage_common`` helper once for reuse across tests.""" - - spec = importlib.util.spec_from_file_location("stage_common", MODULE_PATH) - if spec is None or spec.loader is None: - message = "Failed to load stage_common module" - raise RuntimeError(message) - - module = importlib.util.module_from_spec(spec) - sys.modules[spec.name] = module - spec.loader.exec_module(module) - return module - - -def _prepare_workspace(root: Path, *, bin_ext: str = "") -> tuple[str, str]: - """Create a fake cargo workspace with binary, man page, and licence.""" - - bin_name = "netsuke" - target = "x86_64-pc-windows-msvc" - - licence = root / "LICENSE" - licence.write_text("Copyright Netsuke", encoding="utf-8") - - bin_path = root / "target" / target / "release" / f"{bin_name}{bin_ext}" - bin_path.parent.mkdir(parents=True, exist_ok=True) - bin_path.write_bytes(b"binary") - - man_path = ( - root - / "target" - / "generated-man" - / target - / "release" - / f"{bin_name}.1" - ) - man_path.parent.mkdir(parents=True, exist_ok=True) - man_path.write_text(".TH NETSUKE 1", encoding="utf-8") - - return bin_name, target - - -def test_stage_artifacts_records_license(stage_common: ModuleType, tmp_path: Path) -> None: - """The staged bundle should include and reference the licence file.""" - - workspace = tmp_path / "workspace" - workspace.mkdir() - bin_name, target = _prepare_workspace(workspace, bin_ext=".exe") - - config = stage_common.StagingConfig( - bin_name=bin_name, - target=target, - platform="windows", - arch="amd64", - workspace=workspace, - bin_ext=".exe", - ) - github_output = workspace / "outputs.txt" - - result = stage_common.stage_artifacts(config, github_output) - - expected_dir = workspace / "dist" / config.artifact_dir_name - expected_license = expected_dir / "LICENSE" - - assert result.artifact_dir == expected_dir - assert result.binary_path.name.endswith(".exe") - assert result.man_path.name.endswith(".1") - assert result.license_path == expected_license - assert expected_license.read_text(encoding="utf-8") == "Copyright Netsuke" - license_checksum = Path(f"{expected_license}.sha256") - assert license_checksum.exists() - assert "LICENSE" in license_checksum.read_text(encoding="utf-8") - - outputs = github_output.read_text(encoding="utf-8").splitlines() - output_map = dict(line.split("=", 1) for line in outputs if line) - assert output_map["artifact_dir"] == expected_dir.as_posix() - assert output_map["binary_path"] == result.binary_path.as_posix() - assert output_map["man_path"] == result.man_path.as_posix() - assert output_map["license_path"] == expected_license.as_posix() - - -def test_stage_artifacts_requires_license(stage_common: ModuleType, tmp_path: Path) -> None: - """It should surface a descriptive error when the licence is missing.""" - - workspace = tmp_path / "workspace" - workspace.mkdir() - bin_name, target = _prepare_workspace(workspace) - (workspace / "LICENSE").unlink() - - config = stage_common.StagingConfig( - bin_name=bin_name, - target=target, - platform="linux", - arch="amd64", - workspace=workspace, - ) - - with pytest.raises(RuntimeError, match="Licence file not found"): - stage_common.stage_artifacts(config, workspace / "outputs.txt") - - -def test_validate_and_locate_sources_success( - stage_common: ModuleType, tmp_path: Path -) -> None: - """Helper should resolve the binary, manual, and licence paths.""" - - workspace = tmp_path / "workspace" - workspace.mkdir() - bin_name, target = _prepare_workspace(workspace) - - bin_src, man_src, licence_src = stage_common._validate_and_locate_sources( - workspace, target, bin_name, "" - ) - - assert bin_src.name == bin_name - assert man_src.name == f"{bin_name}.1" - assert licence_src.name == "LICENSE" - - -def test_validate_and_locate_sources_requires_binary( - stage_common: ModuleType, tmp_path: Path -) -> None: - """Helper should fail fast when the compiled binary is missing.""" - - workspace = tmp_path / "workspace" - workspace.mkdir() - bin_name, target = _prepare_workspace(workspace) - (workspace / "target" / target / "release" / bin_name).unlink() - - with pytest.raises(RuntimeError, match="Binary not found"): - stage_common._validate_and_locate_sources(workspace, target, bin_name, "") - - -def test_prepare_artifact_directory_cleans_previous( - stage_common: ModuleType, tmp_path: Path -) -> None: - """Existing artefacts should be cleared so stale files never leak.""" - - dist_dir = tmp_path / "workspace" / "dist" - artifact_dir = dist_dir / "bundle" - artifact_dir.mkdir(parents=True, exist_ok=True) - stale = artifact_dir / "old.txt" - stale.write_text("stale", encoding="utf-8") - - result = stage_common._prepare_artifact_directory(dist_dir, "bundle") - - assert result == artifact_dir - assert result.is_dir() - assert not stale.exists() - - -def test_write_github_outputs_overwrites_existing_content( - stage_common: ModuleType, tmp_path: Path -) -> None: - """Outputs file should be rewritten each run to avoid duplicate keys.""" - - workspace = tmp_path / "workspace" - artifact_dir = workspace / "dist" / "bundle" - artifact_dir.mkdir(parents=True, exist_ok=True) - bin_path = artifact_dir / "netsuke" - man_path = artifact_dir / "netsuke.1" - licence_path = artifact_dir / "LICENSE" - for file in (bin_path, man_path, licence_path): - file.write_text(file.name, encoding="utf-8") - - github_output = workspace / "outputs.txt" - github_output.write_text("stale", encoding="utf-8") - - stage_common._write_github_outputs( - github_output, - stage_common.StageResult( - artifact_dir=artifact_dir, - binary_path=bin_path, - man_path=man_path, - license_path=licence_path, - ), - ) - - contents = github_output.read_text(encoding="utf-8").splitlines() - keys = [line.split("=", 1)[0] for line in contents if line] - assert keys == ["artifact_dir", "binary_path", "man_path", "license_path"] - assert all("stale" not in line for line in contents) - - -def test_write_github_outputs_errors_on_empty_value( - stage_common: ModuleType, tmp_path: Path, monkeypatch: pytest.MonkeyPatch -) -> None: - """Guard against empty outputs by surfacing a descriptive error.""" - - workspace = tmp_path / "workspace" - artifact_dir = workspace / "dist" - artifact_dir.mkdir(parents=True, exist_ok=True) - github_output = workspace / "outputs.txt" - - def _always_empty(_: Path) -> str: - return "" - - monkeypatch.setattr(stage_common, "_escape_output_value", _always_empty) - - with pytest.raises(RuntimeError, match="unexpectedly empty"): - stage_common._write_github_outputs( - github_output, - stage_common.StageResult( - artifact_dir=artifact_dir, - binary_path=artifact_dir / "bin", - man_path=artifact_dir / "man", - license_path=artifact_dir / "license", - ), - ) diff --git a/tests_python/test_stage_common_config.py b/tests_python/test_stage_common_config.py new file mode 100644 index 00000000..40458249 --- /dev/null +++ b/tests_python/test_stage_common_config.py @@ -0,0 +1,223 @@ +"""Configuration loader tests for the staging helper.""" + +from __future__ import annotations + +from pathlib import Path + +import pytest + +REPO_ROOT = Path(__file__).resolve().parent.parent + + +def test_staging_config_template_context(stage_common: object, workspace: Path) -> None: + """The configuration should expose a rich template context.""" + + config = stage_common.StagingConfig( + workspace=workspace, + bin_name="netsuke", + dist_dir="dist", + checksum_algorithm="sha256", + artefacts=[], + platform="linux", + arch="amd64", + target="x86_64-unknown-linux-gnu", + bin_ext=".exe", + target_key="linux-x86_64", + ) + + context = config.as_template_context() + + assert context["workspace"] == workspace.as_posix() + assert context["staging_dir_name"] == "netsuke_linux_amd64" + assert context["staging_dir_template"] == "{bin_name}_{platform}_{arch}" + assert context["target_key"] == "linux-x86_64" + + +def test_load_config_merges_common_and_target( + stage_common: object, workspace: Path +) -> None: + """``load_config`` should merge common values with the requested target.""" + + config_file = workspace / "release-staging.toml" + config_file.write_text( + """\ +[common] +bin_name = "netsuke" +dist_dir = "dist" +checksum_algorithm = "sha256" +artefacts = [ + { source = "target/{target}/release/{bin_name}{bin_ext}", required = true, output = "binary_path" }, + { source = "LICENSE", required = true, output = "license_path" }, +] + +[targets.test] +platform = "linux" +arch = "amd64" +target = "x86_64-unknown-linux-gnu" +""", + encoding="utf-8", + ) + + config = stage_common.load_config(config_file, "test") + + assert config.workspace == workspace + assert config.bin_name == "netsuke" + assert config.platform == "linux" + assert config.arch == "amd64" + assert config.target == "x86_64-unknown-linux-gnu" + assert config.checksum_algorithm == "sha256" + assert [item.output for item in config.artefacts] == ["binary_path", "license_path"] + + +def test_load_config_reads_repository_file( + stage_common: object, workspace: Path, monkeypatch: pytest.MonkeyPatch +) -> None: + """The repository TOML configuration should parse without modification.""" + + config_source = REPO_ROOT / ".github" / "release-staging.toml" + config_copy = workspace / "release-staging.toml" + config_copy.write_text(config_source.read_text(encoding="utf-8"), encoding="utf-8") + + monkeypatch.setenv("GITHUB_WORKSPACE", str(workspace)) + + config = stage_common.load_config(config_copy, "linux-x86_64") + + assert config.bin_name == "netsuke" + assert config.staging_dir().name == "netsuke_linux_amd64" + + +def test_load_config_requires_workspace_env( + stage_common: object, tmp_path: Path, monkeypatch: pytest.MonkeyPatch +) -> None: + """``load_config`` should fail when ``GITHUB_WORKSPACE`` is unset.""" + + config_file = tmp_path / "release-staging.toml" + config_file.write_text( + """\ +[common] +bin_name = "netsuke" +checksum_algorithm = "sha256" +artefacts = [ { source = "LICENSE" } ] + +[targets.test] +platform = "linux" +arch = "amd64" +target = "x86_64-unknown-linux-gnu" +""", + encoding="utf-8", + ) + + monkeypatch.delenv("GITHUB_WORKSPACE", raising=False) + + with pytest.raises(stage_common.StageError) as exc: + stage_common.load_config(config_file, "test") + assert "Environment variable 'GITHUB_WORKSPACE' is not set" in str(exc.value) + + +@pytest.mark.parametrize( + ("config_content", "error_substring", "test_id"), + [ + pytest.param( + """\ +[common] +bin_name = \"netsuke\" +checksum_algorithm = \"unknown\" +artefacts = [ { source = \"LICENSE\" } ] + +[targets.test] +platform = \"linux\" +arch = \"amd64\" +target = \"x86_64-unknown-linux-gnu\" +""", + "Unsupported checksum algorithm", + "rejects_unknown_checksum", + id="rejects_unknown_checksum", + ), + pytest.param( + """\ +[common] +checksum_algorithm = \"sha256\" +artefacts = [ { source = \"LICENSE\" } ] + +[targets.test] +arch = \"amd64\" +target = \"x86_64-unknown-linux-gnu\" +platform = \"linux\" +""", + "bin_name", + "requires_common_bin_name", + id="requires_common_bin_name", + ), + pytest.param( + """\ +[common] +bin_name = \"netsuke\" +checksum_algorithm = \"sha256\" +artefacts = [ { source = \"LICENSE\" } ] + +[targets.test] +arch = \"amd64\" +target = \"x86_64-unknown-linux-gnu\" +""", + "platform", + "requires_target_platform", + id="requires_target_platform", + ), + pytest.param( + """\ +[common] +bin_name = \"netsuke\" +checksum_algorithm = \"sha256\" +artefacts = [ { output = \"binary\" } ] + +[targets.test] +platform = \"linux\" +arch = \"amd64\" +target = \"x86_64-unknown-linux-gnu\" +""", + "source", + "requires_artefact_source", + id="requires_artefact_source", + ), + pytest.param( + """\ +[common] +bin_name = \"netsuke\" +checksum_algorithm = \"sha256\" +artefacts = [ { source = \"LICENSE\" } ] + +[targets.other] +platform = \"linux\" +arch = \"amd64\" +target = \"x86_64-unknown-linux-gnu\" +""", + "Missing configuration key", + "requires_target_section", + id="requires_target_section", + ), + ], +) +def test_load_config_validation_errors( + stage_common: object, + workspace: Path, + config_content: str, + error_substring: str, + test_id: str, +) -> None: + """``load_config`` should raise ``StageError`` for invalid configurations.""" + + config_file = workspace / "release-staging.toml" + config_file.write_text(config_content, encoding="utf-8") + + with pytest.raises(stage_common.StageError) as exc: + stage_common.load_config(config_file, "test") + + message = str(exc.value) + assert error_substring in message, f"{test_id} missing expected substring" + + if test_id == "requires_common_bin_name": + assert "[common]" in message + elif test_id == "requires_target_platform": + assert "[targets.test]" in message + elif test_id == "requires_artefact_source": + assert "entry #1" in message diff --git a/tests_python/test_stage_common_module.py b/tests_python/test_stage_common_module.py new file mode 100644 index 00000000..e606e82f --- /dev/null +++ b/tests_python/test_stage_common_module.py @@ -0,0 +1,49 @@ +"""Sanity checks for the staging helper package.""" + +from __future__ import annotations + +from pathlib import Path + +import pytest + + +def test_public_interface(stage_common: object) -> None: + """The package should expose the documented public API.""" + + expected = { + "ArtefactConfig", + "RESERVED_OUTPUT_KEYS", + "StageError", + "StageResult", + "StagingConfig", + "load_config", + "require_env_path", + "stage_artefacts", + } + assert set(stage_common.__all__) == expected + + +def test_stage_error_is_runtime_error(stage_common: object) -> None: + """``StageError`` should subclass :class:`RuntimeError`.""" + + error = stage_common.StageError("boom") + assert isinstance(error, RuntimeError) + assert str(error) == "boom" + + +def test_require_env_path_returns_path(stage_common: object, workspace: Path) -> None: + """The environment helper should return a ``Path`` when set.""" + + path = stage_common.require_env_path("GITHUB_WORKSPACE") + assert path == workspace + + +def test_require_env_path_missing_env( + stage_common: object, monkeypatch: pytest.MonkeyPatch +) -> None: + """A missing environment variable should raise ``StageError``.""" + + monkeypatch.delenv("GITHUB_WORKSPACE", raising=False) + with pytest.raises(stage_common.StageError) as exc: + stage_common.require_env_path("GITHUB_WORKSPACE") + assert "Environment variable 'GITHUB_WORKSPACE' is not set" in str(exc.value) diff --git a/tests_python/test_stage_common_staging.py b/tests_python/test_stage_common_staging.py new file mode 100644 index 00000000..80b1c4fd --- /dev/null +++ b/tests_python/test_stage_common_staging.py @@ -0,0 +1,445 @@ +"""Behavioural tests for staging artefact handling.""" + +from __future__ import annotations + +import importlib +import json +import os +from pathlib import Path, PurePosixPath, PureWindowsPath + +import pytest + +from stage_test_helpers import decode_output_file, write_workspace_inputs + + +def create_staging_config( + stage_common: object, + workspace: Path, + artefacts: list[object], + target: str | None = None, +) -> object: + """Return a staging configuration with standard defaults.""" + + return stage_common.StagingConfig( + workspace=workspace, + bin_name="netsuke", + dist_dir="dist", + checksum_algorithm="sha256", + artefacts=artefacts, + platform="linux", + arch="amd64", + target=target or "x86_64-unknown-linux-gnu", + ) + + +@pytest.fixture +def standard_target() -> str: + """Return the canonical test target triple.""" + + return "x86_64-unknown-linux-gnu" + + +@pytest.fixture +def prepared_workspace(workspace: Path, standard_target: str) -> Path: + """Populate ``workspace`` with staged artefact inputs for tests.""" + + write_workspace_inputs(workspace, standard_target) + return workspace + + +def test_stage_artefacts_exports_metadata( + stage_common: object, prepared_workspace: Path, standard_target: str +) -> None: + """The staging pipeline should copy inputs, hash them, and export outputs.""" + + config = create_staging_config( + stage_common, + prepared_workspace, + artefacts=[ + stage_common.ArtefactConfig( + source="target/{target}/release/{bin_name}{bin_ext}", + required=True, + output="binary_path", + ), + stage_common.ArtefactConfig( + source="target/generated-man/{target}/release/{bin_name}.1", + required=True, + output="man_path", + ), + stage_common.ArtefactConfig( + source="LICENSE", + required=True, + output="license_path", + ), + ], + target=standard_target, + ) + + github_output = prepared_workspace / "outputs.txt" + result = stage_common.stage_artefacts(config, github_output) + + staging_dir = prepared_workspace / "dist" / "netsuke_linux_amd64" + assert result.staging_dir == staging_dir, "StageResult must record the staging directory" + assert staging_dir.exists(), "Expected staging directory to be created" + + staged_files = {path.name for path in result.staged_artefacts} + assert staged_files == {"netsuke", "netsuke.1", "LICENSE"}, "Unexpected artefacts staged" + assert set(result.outputs) == {"binary_path", "man_path", "license_path"}, "Outputs missing expected keys" + expected_checksums = { + "netsuke": staging_dir / "netsuke.sha256", + "netsuke.1": staging_dir / "netsuke.1.sha256", + "LICENSE": staging_dir / "LICENSE.sha256", + } + assert set(result.checksums) == set(expected_checksums), "Checksum outputs missing entries" + for path in expected_checksums.values(): + assert path.exists(), f"Checksum file {path.name} was not written" + + outputs = decode_output_file(github_output) + assert outputs["artifact_dir"] == staging_dir.as_posix(), "artifact_dir output should reference staging directory" + assert outputs["binary_path"].endswith("netsuke"), "binary_path output should point to the staged executable" + assert outputs["license_path"].endswith("LICENSE"), "license_path output should point to the staged licence" + artefact_map = json.loads(outputs["artefact_map"]) + assert artefact_map["binary_path"].endswith("netsuke"), "artefact map should include the binary path" + checksum_map = json.loads(outputs["checksum_map"]) + assert set(checksum_map) == {"netsuke", "netsuke.1", "LICENSE"}, "Checksum map missing entries" + + +def test_stage_artefacts_uses_alternative_glob( + stage_common: object, prepared_workspace: Path, standard_target: str +) -> None: + """Fallback paths should be used when the preferred template is absent.""" + + generated = ( + prepared_workspace + / "target" + / "generated-man" + / standard_target + / "release" + / "netsuke.1" + ) + generated.unlink() + + build_dir = ( + prepared_workspace / "target" / standard_target / "release" / "build" + ) + first = build_dir / "1" / "out" / "netsuke.1" + second = build_dir / "2" / "out" / "netsuke.1" + first.parent.mkdir(parents=True, exist_ok=True) + second.parent.mkdir(parents=True, exist_ok=True) + first.write_text(".TH 1", encoding="utf-8") + second.write_text(".TH 2", encoding="utf-8") + os.utime(first, (first.stat().st_atime, first.stat().st_mtime - 100)) + + config = create_staging_config( + stage_common, + prepared_workspace, + artefacts=[ + stage_common.ArtefactConfig( + source="target/generated-man/{target}/release/{bin_name}.1", + required=True, + output="man_path", + alternatives=["target/{target}/release/build/*/out/{bin_name}.1"], + ), + ], + target=standard_target, + ) + + github_output = prepared_workspace / "outputs.txt" + result = stage_common.stage_artefacts(config, github_output) + staged_path = result.outputs["man_path"] + assert ( + staged_path.read_text(encoding="utf-8") == ".TH 2" + ), "Fallback glob should pick the newest man page" + + +def test_stage_artefacts_glob_selects_newest_candidate( + stage_common: object, prepared_workspace: Path, standard_target: str +) -> None: + """Glob matches should resolve to the most recently modified file.""" + + generated = ( + prepared_workspace + / "target" + / "generated-man" + / standard_target + / "release" + / "netsuke.1" + ) + generated.unlink() + + build_dir = ( + prepared_workspace / "target" / standard_target / "release" / "build" + ) + build_dir.mkdir(parents=True, exist_ok=True) + for idx in range(3): + candidate = build_dir / f"{idx}" / "out" / "netsuke.1" + candidate.parent.mkdir(parents=True, exist_ok=True) + candidate.write_text(f".TH {idx}", encoding="utf-8") + os.utime(candidate, (100 + idx, 100 + idx)) + + config = create_staging_config( + stage_common, + prepared_workspace, + artefacts=[ + stage_common.ArtefactConfig( + source="target/generated-man/{target}/release/{bin_name}.1", + required=True, + output="man_path", + alternatives=["target/{target}/release/build/*/out/{bin_name}.1"], + ), + ], + target=standard_target, + ) + + github_output = prepared_workspace / "outputs.txt" + result = stage_common.stage_artefacts(config, github_output) + staged_path = result.outputs["man_path"] + assert ( + staged_path.read_text(encoding="utf-8") == ".TH 2" + ), "Glob resolution should select the most recent candidate" + + +def test_match_candidate_path_handles_windows_drive( + stage_common: object, workspace: Path, monkeypatch: pytest.MonkeyPatch +) -> None: + """Absolute Windows-style globs should resolve relative to the drive root.""" + + monkeypatch.chdir(workspace) + + drive_root = Path("C:\\") + windows_workspace = drive_root / "workspace" + man_dir = windows_workspace / "man" + man_dir.mkdir(parents=True, exist_ok=True) + candidate = man_dir / "netsuke.1" + candidate.write_text(".TH WINDOWS", encoding="utf-8") + + staging = importlib.import_module("stage_common.staging") + matched = staging._match_candidate_path( + windows_workspace, "C:/workspace/man/*.1" + ) + + assert matched == candidate + + +def test_stage_artefacts_warns_for_optional( + stage_common: object, + prepared_workspace: Path, + capfd: pytest.CaptureFixture[str], + standard_target: str, +) -> None: + """Optional artefacts should emit a warning when absent but not abort.""" + + config = create_staging_config( + stage_common, + prepared_workspace, + artefacts=[ + stage_common.ArtefactConfig( + source="target/{target}/release/{bin_name}{bin_ext}", + required=True, + ), + stage_common.ArtefactConfig( + source="missing.txt", + required=False, + output="missing", + ), + ], + target=standard_target, + ) + + stage_common.stage_artefacts(config, prepared_workspace / "out.txt") + captured = capfd.readouterr() + assert ( + "::warning title=Artefact Skipped::Optional artefact missing" in captured.err + ), "Optional artefact warning missing" + + +@pytest.mark.parametrize( + ("artefact_definitions", "expected_match"), + [ + pytest.param( + [ + { + "source": "target/{target}/release/{bin_name}{bin_ext}", + "required": True, + "output": "artifact_dir", + } + ], + "collide.*artifact_dir", + id="rejects_reserved_outputs", + ), + pytest.param( + [ + { + "source": "target/{target}/release/{bin_name}{bin_ext}", + "required": True, + "destination": "netsuke", + }, + { + "source": "LICENSE", + "required": True, + "destination": "netsuke", + }, + ], + "Duplicate staged destination", + id="rejects_duplicate_destinations", + ), + pytest.param( + [ + { + "source": "target/{target}/release/{bin_name}{bin_ext}", + "required": True, + "output": "binary_path", + }, + { + "source": "LICENSE", + "required": True, + "output": "binary_path", + }, + ], + "Duplicate artefact output key", + id="rejects_duplicate_outputs", + ), + ], +) +def test_stage_artefacts_validation_errors( + stage_common: object, + prepared_workspace: Path, + standard_target: str, + artefact_definitions: list[dict[str, object]], + expected_match: str, +) -> None: + """Staging should fail fast when reserved names or duplicates are present.""" + + artefacts = [ + stage_common.ArtefactConfig(**definition) + for definition in artefact_definitions + ] + + config = create_staging_config( + stage_common, + prepared_workspace, + artefacts=artefacts, + target=standard_target, + ) + + with pytest.raises(stage_common.StageError, match=expected_match): + stage_common.stage_artefacts(config, prepared_workspace / "outputs.txt") + + +def test_stage_artefacts_fails_with_attempt_context( + stage_common: object, workspace: Path +) -> None: + """Missing required artefacts should include context in the error message.""" + + config = stage_common.StagingConfig( + workspace=workspace, + bin_name="netsuke", + dist_dir="dist", + checksum_algorithm="sha256", + artefacts=[ + stage_common.ArtefactConfig( + source="missing-{target}", + required=True, + ), + ], + platform="linux", + arch="amd64", + target="x86_64-unknown-linux-gnu", + ) + + with pytest.raises(stage_common.StageError) as exc: + stage_common.stage_artefacts(config, workspace / "outputs.txt") + + message = str(exc.value) + assert "Workspace=" in message, "Workspace context missing from error" + assert "missing-{target}" in message, "Template pattern missing from error" + assert ( + "missing-x86_64-unknown-linux-gnu" in message + ), "Rendered path missing from error" + + +def test_glob_root_and_pattern_handles_windows_drive(stage_common: object) -> None: + """Absolute Windows globs should strip the drive before globbing.""" + + staging = importlib.import_module("stage_common.staging") + helper = staging._glob_root_and_pattern + + root, pattern = helper(PureWindowsPath("C:/dist/*.zip")) + assert root == "C:\\" + assert pattern == "dist/*.zip" + + +def test_glob_root_and_pattern_returns_wildcard_for_root_only( # noqa: ARG001 + stage_common: object, +) -> None: + """Root-only absolute paths should glob all children.""" + + # Fixture import triggers plugin registration; the value itself is unused. + staging = importlib.import_module("stage_common.staging") + helper = staging._glob_root_and_pattern + + root, pattern = helper(PureWindowsPath("C:/")) + assert root == "C:\\" + assert pattern == "*" + + +def test_glob_root_and_pattern_handles_posix_absolute( # noqa: ARG001 + stage_common: object, +) -> None: + """POSIX absolute paths should preserve relative segments for globbing.""" + + # Fixture import triggers plugin registration; the value itself is unused. + staging = importlib.import_module("stage_common.staging") + helper = staging._glob_root_and_pattern + + root, pattern = helper(PurePosixPath("/tmp/dist/*.zip")) + assert root == "/" + assert pattern.endswith("dist/*.zip"), pattern + + +def test_glob_root_and_pattern_rejects_relative_paths( # noqa: ARG001 + stage_common: object, +) -> None: + """Relative globs should be rejected to avoid ambiguous anchors.""" + + # Fixture import triggers plugin registration; the value itself is unused. + staging = importlib.import_module("stage_common.staging") + helper = staging._glob_root_and_pattern + + with pytest.raises(ValueError, match="Expected absolute path"): + helper(PurePosixPath("dist/*.zip")) + + +def test_stage_artefacts_matches_absolute_glob( + stage_common: object, workspace: Path +) -> None: + """Absolute glob patterns should allow staging artefacts.""" + + absolute_root = workspace / "absolute" / "1.2.3" + absolute_root.mkdir(parents=True, exist_ok=True) + source_path = absolute_root / "netsuke.txt" + source_path.write_text("payload", encoding="utf-8") + + artefact = stage_common.ArtefactConfig( + source=f"{workspace.as_posix()}/absolute/*/netsuke.txt", + required=True, + output="absolute_path", + ) + config = stage_common.StagingConfig( + workspace=workspace, + bin_name="netsuke", + dist_dir="dist", + checksum_algorithm="sha256", + artefacts=[artefact], + platform="linux", + arch="amd64", + target="x86_64-unknown-linux-gnu", + ) + + github_output = workspace / "github_output.txt" + result = stage_common.stage_artefacts(config, github_output) + + assert result.staged_artefacts, "Expected artefact to be staged from absolute glob" + staged_path = result.staged_artefacts[0] + assert staged_path.read_text(encoding="utf-8") == "payload" + assert result.outputs["absolute_path"] == staged_path