diff --git a/.github/actions/generate-coverage/scripts/run_rust.py b/.github/actions/generate-coverage/scripts/run_rust.py
index 59cde881..85d57f78 100644
--- a/.github/actions/generate-coverage/scripts/run_rust.py
+++ b/.github/actions/generate-coverage/scripts/run_rust.py
@@ -149,6 +149,14 @@ def get_line_coverage_percent_from_cobertura(xml_file: Path) -> str:
return _format_percent(covered, total)
+def _safe_close_text_stream(stream: typ.TextIO | None) -> None:
+ """Close ``stream`` while suppressing any cleanup errors."""
+ if stream is None:
+ return
+ with contextlib.suppress(Exception):
+ stream.close()
+
+
def _run_cargo(args: list[str]) -> str:
"""Run ``cargo`` with ``args`` streaming output and return ``stdout``."""
typer.echo(f"$ cargo {shlex.join(args)}")
@@ -160,104 +168,115 @@ def _run_cargo(args: list[str]) -> str:
encoding="utf-8",
errors="replace",
)
- if proc.stdout is None or proc.stderr is None:
- msg = "cargo output streams not captured"
- raise RuntimeError(msg)
- stdout_lines: list[str] = []
-
- if os.name == "nt":
- thread_exceptions: list[Exception] = []
-
- def pump(src: typ.TextIO, *, to_stdout: bool) -> None:
- dest = sys.stdout if to_stdout else sys.stderr
- try:
- for line in iter(src.readline, ""):
- dest.write(line)
- dest.flush()
- if to_stdout:
- stdout_lines.append(line.rstrip("\r\n"))
- except Exception as exc: # noqa: BLE001
- thread_exceptions.append(exc)
- if os.environ.get("RUN_RUST_DEBUG") == "1" or os.environ.get(
- "DEBUG_UTF8"
- ):
- sys.stderr.write(f"Exception in pump thread: {exc}\n")
- sys.stderr.write(traceback.format_exc())
-
- threads = [
- threading.Thread(
- name="cargo-stdout",
- target=pump,
- args=(proc.stdout,),
- kwargs={"to_stdout": True},
- daemon=True,
- ),
- threading.Thread(
- name="cargo-stderr",
- target=pump,
- args=(proc.stderr,),
- kwargs={"to_stdout": False},
- daemon=True,
- ),
- ]
- for thread in threads:
- thread.start()
- # Kill cargo promptly if a pump fails to avoid deadlocks on the other pipe.
- while True:
+ try:
+ if proc.stdout is None or proc.stderr is None:
+ missing_streams = []
+ if proc.stdout is None:
+ missing_streams.append("stdout")
+ if proc.stderr is None:
+ missing_streams.append("stderr")
+ missing = ", ".join(missing_streams)
+ message = f"cargo output streams not captured: missing {missing}"
+ with contextlib.suppress(Exception):
+ proc.kill()
+ with contextlib.suppress(Exception):
+ proc.wait(timeout=5)
+ _safe_close_text_stream(proc.stdout)
+ _safe_close_text_stream(proc.stderr)
+ typer.echo(f"::error::{message}", err=True)
+ raise typer.Exit(1)
+ stdout_lines: list[str] = []
+
+ if os.name == "nt":
+ thread_exceptions: list[Exception] = []
+
+ def pump(src: typ.TextIO, *, to_stdout: bool) -> None:
+ dest = sys.stdout if to_stdout else sys.stderr
+ try:
+ for line in iter(src.readline, ""):
+ dest.write(line)
+ dest.flush()
+ if to_stdout:
+ stdout_lines.append(line.rstrip("\r\n"))
+ except Exception as exc: # noqa: BLE001
+ thread_exceptions.append(exc)
+ if os.environ.get("RUN_RUST_DEBUG") == "1" or os.environ.get(
+ "DEBUG_UTF8"
+ ):
+ sys.stderr.write(f"Exception in pump thread: {exc}\n")
+ sys.stderr.write(traceback.format_exc())
+
+ threads = [
+ threading.Thread(
+ name="cargo-stdout",
+ target=pump,
+ args=(proc.stdout,),
+ kwargs={"to_stdout": True},
+ daemon=True,
+ ),
+ threading.Thread(
+ name="cargo-stderr",
+ target=pump,
+ args=(proc.stderr,),
+ kwargs={"to_stdout": False},
+ daemon=True,
+ ),
+ ]
+ for thread in threads:
+ thread.start()
+ # Kill cargo promptly if a pump fails to avoid deadlocks on the other pipe.
+ while True:
+ if thread_exceptions:
+ with contextlib.suppress(Exception):
+ proc.kill()
+ break
+ if not any(t.is_alive() for t in threads):
+ break
+ for t in threads:
+ t.join(timeout=0.1)
+ # Ensure all threads have finished before handling results.
+ for thread in threads:
+ thread.join()
if thread_exceptions:
+ proc.wait()
+ raise thread_exceptions[0]
+ else:
+ sel = selectors.DefaultSelector()
+ try:
+ sel.register(proc.stdout, selectors.EVENT_READ, data="stdout")
+ sel.register(proc.stderr, selectors.EVENT_READ, data="stderr")
+
+ while sel.get_map():
+ for key, _ in sel.select():
+ line = key.fileobj.readline()
+ if not line:
+ sel.unregister(key.fileobj)
+ continue
+ if key.data == "stdout":
+ typer.echo(line, nl=False)
+ stdout_lines.append(line.rstrip("\r\n"))
+ else:
+ typer.echo(line, err=True, nl=False)
+ except Exception:
+ # Ensure cargo does not outlive the parent if the selector loop fails.
with contextlib.suppress(Exception):
proc.kill()
- break
- if not any(t.is_alive() for t in threads):
- break
- for t in threads:
- t.join(timeout=0.1)
- # Ensure all threads have finished before closing streams.
- for thread in threads:
- thread.join()
- # Streams are guaranteed non-None by earlier guard.
- proc.stdout.close()
- proc.stderr.close()
- if thread_exceptions:
- proc.wait()
- raise thread_exceptions[0]
- else:
- sel = selectors.DefaultSelector()
- try:
- sel.register(proc.stdout, selectors.EVENT_READ, data="stdout")
- sel.register(proc.stderr, selectors.EVENT_READ, data="stderr")
-
- while sel.get_map():
- for key, _ in sel.select():
- line = key.fileobj.readline()
- if not line:
- sel.unregister(key.fileobj)
- continue
- if key.data == "stdout":
- typer.echo(line, nl=False)
- stdout_lines.append(line.rstrip("\r\n"))
- else:
- typer.echo(line, err=True, nl=False)
- except Exception:
- # Ensure cargo does not outlive the parent if the selector loop fails.
- with contextlib.suppress(Exception):
- proc.kill()
- proc.wait()
- raise
- finally:
- sel.close()
- # Safe due to earlier guard.
- proc.stdout.close()
- proc.stderr.close()
-
- retcode = proc.wait()
- if retcode != 0:
- typer.echo(
- f"cargo {shlex.join(args)} failed with code {retcode}",
- err=True,
- )
- raise typer.Exit(code=retcode or 1)
- return "\n".join(stdout_lines)
+ proc.wait()
+ raise
+ finally:
+ sel.close()
+
+ retcode = proc.wait()
+ if retcode != 0:
+ typer.echo(
+ f"cargo {shlex.join(args)} failed with code {retcode}",
+ err=True,
+ )
+ raise typer.Exit(code=retcode or 1)
+ return "\n".join(stdout_lines)
+ finally:
+ _safe_close_text_stream(proc.stdout)
+ _safe_close_text_stream(proc.stderr)
def _merge_lcov(base: Path, extra: Path) -> None:
diff --git a/.github/actions/generate-coverage/tests/test_scripts.py b/.github/actions/generate-coverage/tests/test_scripts.py
index 5d8e582f..7786aa9c 100644
--- a/.github/actions/generate-coverage/tests/test_scripts.py
+++ b/.github/actions/generate-coverage/tests/test_scripts.py
@@ -108,7 +108,7 @@ def kill(self) -> None:
if track_lifecycle:
self.killed = True
- def wait(self) -> int:
+ def wait(self, timeout: float | None = None) -> int:
if track_lifecycle:
self.waited = True
return returncode
@@ -198,6 +198,37 @@ def fake_echo(line: str, *, err: bool = False, nl: bool = True) -> None:
assert res == "out-line"
+def test_run_cargo_windows_closes_streams(
+ monkeypatch: pytest.MonkeyPatch,
+) -> None:
+ """``_run_cargo`` closes captured streams on success."""
+ mod = _load_module(monkeypatch, "run_rust")
+ monkeypatch.setattr(mod.os, "name", "nt")
+ monkeypatch.setattr(mod.typer, "echo", lambda *a, **k: None)
+
+ class TrackingStream(io.StringIO):
+ def __init__(self, value: str) -> None:
+ super().__init__(value)
+ self.close_calls = 0
+
+ def close(self) -> None:
+ self.close_calls += 1
+ super().close()
+
+ stdout = TrackingStream("out-line\n")
+ stderr = TrackingStream("err-line\n")
+ fake_cargo = _make_fake_cargo(stdout, stderr)
+ monkeypatch.setattr(mod, "cargo", fake_cargo)
+
+ result = mod._run_cargo(["llvm-cov"])
+
+ assert result == "out-line"
+ assert stdout.closed
+ assert stderr.closed
+ assert stdout.close_calls >= 1
+ assert stderr.close_calls >= 1
+
+
def test_run_cargo_windows_nonzero_exit(
monkeypatch: pytest.MonkeyPatch,
) -> None:
@@ -228,8 +259,8 @@ def test_run_cargo_windows_pump_exception(
class BoomIO(io.StringIO):
def readline(self) -> str:
- msg = "boom in pump"
- raise RuntimeError(msg)
+ message = "boom in pump"
+ raise RuntimeError(message)
fake_cargo = _make_fake_cargo(BoomIO(), io.StringIO(""), track_lifecycle=True)
monkeypatch.setattr(mod, "cargo", fake_cargo)
@@ -249,9 +280,14 @@ def test_run_cargo_windows_none_stdout(
monkeypatch.setattr(mod.os, "name", "nt")
monkeypatch.setattr(mod.typer, "echo", lambda *a, **k: None)
- monkeypatch.setattr(mod, "cargo", _make_fake_cargo(None, "err-line\n"))
- with pytest.raises(RuntimeError):
+ fake_cargo = _make_fake_cargo(None, "err-line\n")
+ monkeypatch.setattr(mod, "cargo", fake_cargo)
+ with pytest.raises(mod.typer.Exit):
mod._run_cargo([])
+ proc = fake_cargo.last_proc
+ assert proc is not None
+ assert proc.stderr is not None
+ assert proc.stderr.closed
def test_run_cargo_windows_none_stderr(
@@ -262,9 +298,46 @@ def test_run_cargo_windows_none_stderr(
monkeypatch.setattr(mod.os, "name", "nt")
monkeypatch.setattr(mod.typer, "echo", lambda *a, **k: None)
- monkeypatch.setattr(mod, "cargo", _make_fake_cargo("out-line\n", None))
- with pytest.raises(RuntimeError):
+ fake_cargo = _make_fake_cargo("out-line\n", None)
+ monkeypatch.setattr(mod, "cargo", fake_cargo)
+ with pytest.raises(mod.typer.Exit):
mod._run_cargo([])
+ proc = fake_cargo.last_proc
+ assert proc is not None
+ assert proc.stdout is not None
+ assert proc.stdout.closed
+
+
+def test_run_cargo_stream_close_error_suppressed(
+ monkeypatch: pytest.MonkeyPatch,
+) -> None:
+ """Errors closing streams are suppressed during cleanup."""
+ mod = _load_module(monkeypatch, "run_rust")
+ monkeypatch.setattr(mod.os, "name", "nt")
+ monkeypatch.setattr(mod.typer, "echo", lambda *a, **k: None)
+
+ class ExplodingStream(io.StringIO):
+ def __init__(self, value: str) -> None:
+ super().__init__(value)
+ self.close_calls = 0
+
+ def close(self) -> None:
+ self.close_calls += 1
+ super().close()
+ message = "close failure"
+ raise RuntimeError(message)
+
+ stdout = ExplodingStream("out-line\n")
+ stderr = io.StringIO("err-line\n")
+ fake_cargo = _make_fake_cargo(stdout, stderr)
+ monkeypatch.setattr(mod, "cargo", fake_cargo)
+
+ result = mod._run_cargo(["llvm-cov"])
+
+ assert result == "out-line"
+ assert stdout.close_calls >= 1
+ assert stdout.closed
+ assert stderr.closed
def test_run_rust_with_cucumber(tmp_path: Path, shell_stubs: StubManager) -> None:
@@ -510,8 +583,8 @@ def test_lcov_permission_error(
lcov.write_text("LF:1\nLH:1\n")
def bad_read_text(*_: object, **__: object) -> str:
- msg = "nope"
- raise PermissionError(msg)
+ message = "nope"
+ raise PermissionError(message)
monkeypatch.setattr(Path, "read_text", bad_read_text, raising=False)
with pytest.raises(run_rust_module.typer.Exit) as excinfo:
@@ -679,8 +752,8 @@ def test_cobertura_permission_error(
xml.write_text("")
def raise_permission_error(*_: object, **__: object) -> object:
- msg = "denied"
- raise PermissionError(msg)
+ message = "denied"
+ raise PermissionError(message)
import coverage_parsers
diff --git a/.github/actions/release-to-pypi-uv/CHANGELOG.md b/.github/actions/release-to-pypi-uv/CHANGELOG.md
new file mode 100644
index 00000000..77a865db
--- /dev/null
+++ b/.github/actions/release-to-pypi-uv/CHANGELOG.md
@@ -0,0 +1,22 @@
+# Changelog
+
+## v1.0.2 (2025-09-18)
+
+- Add a configurable `python-version` input and ensure all uv commands honour
+ it, letting workflows pin their interpreter version.
+- Harden release validation: retry GitHub API lookups with exponential
+ backoff, tighten semantic version detection, and expand TOML validation
+ coverage along with unit tests for the helper scripts.
+
+## v1.0.1 (2025-09-18)
+
+- Document required workflow permissions for trusted publishing, clarify that
+ the action forwards `GITHUB_TOKEN` automatically, and fix the README usage
+ example to reference the local path without a version suffix.
+
+## v1.0.0 (2025-09-18)
+
+- Initial release: resolve release tags, ensure GitHub Release readiness, and
+ publish Python distributions with uv Trusted Publishing support.
+- Validate `pyproject.toml` versions against the release tag and optionally
+ block dynamic version declarations.
diff --git a/.github/actions/release-to-pypi-uv/README.md b/.github/actions/release-to-pypi-uv/README.md
new file mode 100644
index 00000000..767b4fc5
--- /dev/null
+++ b/.github/actions/release-to-pypi-uv/README.md
@@ -0,0 +1,73 @@
+# Release to PyPI (uv)
+
+Build and publish Python distributions via
+[uv](https://github.com/astral-sh/uv) with GitHub's trusted publishing flow.
+
+## Inputs
+
+| Name | Description | Required | Default |
+| --- | --- | --- | --- |
+| tag | Tag to release (e.g. `v1.2.3`). Required when the workflow is not running on a tag ref. | no | _(empty)_ |
+| require-confirmation | Require a manual confirmation string before publishing. | no | `false` |
+| confirm | Confirmation string. Must equal `release ` when `require-confirmation` is true. | no | _(empty)_ |
+| environment-name | GitHub environment to reference in the release summary. | no | `pypi` |
+| uv-index | Optional uv index name to publish to (e.g. `testpypi`). Must exist in `tool.uv.index`. | no | _(empty)_ |
+| toml-glob | Glob used to discover `pyproject.toml` files for version validation. | no | `**/pyproject.toml` |
+| skip-directories | Comma- or newline-separated directory names to skip during discovery. | no | _(empty)_ |
+| fail-on-dynamic-version | Fail when a project declares a dynamic PEP 621 version instead of a literal string. | no | `false` |
+| fail-on-empty | Fail when no `pyproject.toml` files match the discovery glob. | no | `false` |
+| python-version | Python version to install and use for all uv commands. | no | `3.13` |
+
+The composite action installs the interpreter requested through `python-version`
+before invoking any uv commands, ensuring builds run against the expected
+runtime. Set `fail-on-empty: true` when your repository must always contain at
+least one `pyproject.toml`. This turns the default warning into a failing error
+so misconfigured globs surface early.
+
+Directories named `.venv`, `venv`, `.mypy_cache`, `.pytest_cache`, `.cache`,
+`htmlcov`, and `node_modules` are skipped during TOML discovery. Provide a
+comma- or newline-separated list via `skip-directories` when your repository
+uses additional transient paths that should be excluded.
+
+## Outputs
+
+| Name | Description |
+| --- | --- |
+| tag | Resolved release tag. |
+| version | Resolved release version (tag without the leading `v`). |
+
+> **Required permissions**: set the job to `permissions: contents: read` and `permissions: id-token: write` so uv Trusted Publishing can exchange an OIDC token with PyPI.
+> The composite action forwards the workflow's `GITHUB_TOKEN` to its scripts as `GH_TOKEN`, so you do not need to add an extra `env` block.
+
+## Usage
+
+```yaml
+name: Release
+on:
+ push:
+ tags:
+ - "v*"
+
+jobs:
+ publish:
+ concurrency:
+ group: release-pypi-${{ github.repository }}-${{ github.ref_name }}
+ cancel-in-progress: true
+ runs-on: ubuntu-latest
+ permissions:
+ contents: read # required for trusted publishing
+ id-token: write # required for trusted publishing
+ steps:
+ - uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
+
+ - name: Build and publish
+ uses: ./.github/actions/release-to-pypi-uv
+ with:
+ python-version: '3.12'
+ require-confirmation: true
+ confirm: release ${{ github.ref_name }}
+```
+
+Release history is available in [CHANGELOG](CHANGELOG.md).
diff --git a/.github/actions/release-to-pypi-uv/action.yml b/.github/actions/release-to-pypi-uv/action.yml
new file mode 100644
index 00000000..1fbe9afb
--- /dev/null
+++ b/.github/actions/release-to-pypi-uv/action.yml
@@ -0,0 +1,112 @@
+name: Release to PyPI (uv)
+description: Build and publish Python distributions to PyPI using uv.
+# Required job permissions for OIDC trusted publishing:
+# contents: read
+# id-token: write
+inputs:
+ tag:
+ description: Tag to release (e.g. v1.2.3). Required when the workflow is not running on a tag ref.
+ required: false
+ require-confirmation:
+ description: Require a manual confirmation string before publishing.
+ required: false
+ default: 'false'
+ confirm:
+ description: Confirmation string. Must equal `release ` when `require-confirmation` is true.
+ required: false
+ environment-name:
+ description: GitHub environment to target (informational only, surfaced in the summary output).
+ required: false
+ default: pypi
+ uv-index:
+ description: Optional uv index name to publish to (e.g. testpypi). Must exist in pyproject tool.uv.index.
+ required: false
+ default: ''
+ toml-glob:
+ description: Glob used to discover pyproject.toml files for version validation.
+ required: false
+ default: "**/pyproject.toml"
+ skip-directories:
+ description: Comma- or newline-separated directory names to skip during TOML discovery.
+ required: false
+ default: ''
+ fail-on-dynamic-version:
+ description: Fail if any project declares a dynamic version instead of a literal string.
+ required: false
+ default: 'false'
+ fail-on-empty:
+ description: Fail when no pyproject.toml files match the configured glob.
+ required: false
+ default: 'false'
+ python-version:
+ description: Python version to install and use with uv commands.
+ required: false
+ default: '3.13'
+outputs:
+ tag:
+ description: Resolved release tag.
+ value: ${{ steps.resolve.outputs.tag }}
+ version:
+ description: Resolved release version (tag without the leading v).
+ value: ${{ steps.resolve.outputs.version }}
+runs:
+ using: composite
+ steps:
+ - name: Setup uv
+ # v6.4.3
+ uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc
+ with:
+ python-version: ${{ inputs.python-version }}
+ cache-dependency-glob: |
+ **/pyproject.toml
+ **/uv.lock
+ cache-suffix: action-${{ github.action_ref || github.sha }}
+ - name: Install Python
+ run: uv python install "${{ inputs.python-version }}"
+ shell: bash
+ - name: Determine tag and version
+ id: resolve
+ run: uv run --script "${{ github.action_path }}/scripts/determine_release.py"
+ shell: bash
+ env:
+ INPUT_TAG: ${{ inputs.tag }}
+ - name: Optional manual confirmation
+ if: inputs.require-confirmation == 'true'
+ run: uv run --script "${{ github.action_path }}/scripts/confirm_release.py"
+ shell: bash
+ env:
+ EXPECTED: release ${{ steps.resolve.outputs.tag }}
+ INPUT_CONFIRM: ${{ inputs.confirm }}
+ - name: Ensure GitHub Release exists and is published
+ run: uv run --script "${{ github.action_path }}/scripts/check_github_release.py"
+ shell: bash
+ env:
+ RELEASE_TAG: ${{ steps.resolve.outputs.tag }}
+ GH_TOKEN: ${{ github.token }}
+ GITHUB_TOKEN: ${{ github.token }}
+ GITHUB_REPOSITORY: ${{ github.repository }}
+ - name: Validate TOML files
+ run: uv run --script "${{ github.action_path }}/scripts/validate_toml_versions.py"
+ shell: bash
+ env:
+ RESOLVED_VERSION: ${{ steps.resolve.outputs.version }}
+ INPUT_TOML_GLOB: ${{ inputs.toml-glob }}
+ INPUT_FAIL_ON_DYNAMIC_VERSION: ${{ inputs.fail-on-dynamic-version }}
+ INPUT_FAIL_ON_EMPTY: ${{ inputs.fail-on-empty }}
+ INPUT_SKIP_DIRECTORIES: ${{ inputs.skip-directories }}
+ - name: Build distributions
+ run: uv build
+ shell: bash
+ - name: Publish to PyPI (Trusted Publishing)
+ run: uv run --script "${{ github.action_path }}/scripts/publish_release.py"
+ shell: bash
+ env:
+ INPUT_UV_INDEX: ${{ inputs.uv-index }}
+ - name: Summary
+ if: always()
+ run: uv run --script "${{ github.action_path }}/scripts/write_summary.py"
+ shell: bash
+ env:
+ RESOLVED_TAG: ${{ steps.resolve.outputs.tag }}
+ INPUT_UV_INDEX: ${{ inputs.uv-index }}
+ INPUT_ENVIRONMENT_NAME: ${{ inputs.environment-name }}
diff --git a/.github/actions/release-to-pypi-uv/scripts/check_github_release.py b/.github/actions/release-to-pypi-uv/scripts/check_github_release.py
new file mode 100644
index 00000000..410f2827
--- /dev/null
+++ b/.github/actions/release-to-pypi-uv/scripts/check_github_release.py
@@ -0,0 +1,198 @@
+#!/usr/bin/env -S uv run --script
+# /// script
+# requires-python = ">=3.13"
+# dependencies = ["typer>=0.17,<0.18"]
+# ///
+"""Verify that the GitHub Release for the provided tag exists and is published."""
+
+from __future__ import annotations
+
+import contextlib
+import json
+import random
+import time
+import typing as typ
+import urllib.error
+import urllib.parse
+import urllib.request
+
+import typer
+
+TAG_OPTION = typer.Option(..., envvar="RELEASE_TAG")
+TOKEN_OPTION = typer.Option(..., envvar="GH_TOKEN")
+REPO_OPTION = typer.Option(..., envvar="GITHUB_REPOSITORY")
+
+
+class _UniformGenerator(typ.Protocol):
+ """Protocol describing RNG objects that provide ``uniform``."""
+
+ def uniform(self, a: float, b: float) -> float:
+ """Return a random floating point number N such that ``a <= N <= b``."""
+
+
+SleepFn = typ.Callable[[float], None]
+
+_JITTER = random.SystemRandom()
+
+
+def _sleep_with_jitter(
+ delay: float,
+ *,
+ jitter: _UniformGenerator | None = None,
+ sleep: SleepFn | None = None,
+) -> None:
+ """Sleep for ``delay`` seconds with a deterministic jitter hook for tests."""
+ sleep_base = max(delay, 0.0)
+ jitter_source = _JITTER if jitter is None else jitter
+ sleep_fn = time.sleep if sleep is None else sleep
+ jitter_amount = sleep_base * jitter_source.uniform(0.0, 0.1)
+ sleep_fn(sleep_base + jitter_amount)
+
+
+class GithubReleaseError(RuntimeError):
+ """Raised when the GitHub release is not ready for publishing."""
+
+
+def _fetch_release(repo: str, tag: str, token: str) -> dict[str, object]:
+ api = f"https://api.github.com/repos/{repo}/releases/tags/{tag}"
+ parsed = urllib.parse.urlsplit(api)
+ if parsed.scheme != "https": # pragma: no cover - defensive guard
+ message = f"Unsupported URL scheme '{parsed.scheme}' for GitHub API request."
+ raise GithubReleaseError(message)
+ request = urllib.request.Request( # noqa: S310 - https scheme enforced above
+ api,
+ headers={
+ "Authorization": f"Bearer {token}",
+ "Accept": "application/vnd.github+json",
+ "X-GitHub-Api-Version": "2022-11-28",
+ "User-Agent": "release-to-pypi-action",
+ },
+ )
+ max_attempts = 5
+ backoff_factor = 1.5
+ delay = 1.0
+ payload: str | None = None
+
+ for attempt in range(1, max_attempts + 1):
+ try:
+ with urllib.request.urlopen(request, timeout=30) as response: # noqa: S310
+ payload = response.read().decode("utf-8")
+ break
+ except urllib.error.HTTPError as exc: # pragma: no cover - network failure path
+ detail = (
+ exc.read().decode("utf-8", errors="ignore")
+ if hasattr(exc, "read")
+ else ""
+ )
+ match exc.code:
+ case 401:
+ context = detail or exc.reason
+ message = (
+ "GitHub rejected the token (401 Unauthorized). "
+ "Verify that GH_TOKEN is correct and has not expired."
+ )
+ if context:
+ message = f"{message} ({context})"
+ raise GithubReleaseError(message) from exc
+ case 403:
+ permission_message = (
+ "GitHub token lacks permission to read releases "
+ "or has expired. "
+ "Use a token with contents:read scope."
+ )
+ context = detail or exc.reason
+ message = f"{permission_message} ({context})"
+ raise GithubReleaseError(message) from exc
+ case 404:
+ message = (
+ "No GitHub release found for tag "
+ f"{tag}. Create and publish the release first."
+ )
+ raise GithubReleaseError(message) from exc
+ case _ if attempt == max_attempts:
+ failure_reason = detail or exc.reason
+ message = (
+ "GitHub API request failed with status "
+ f"{exc.code}: {failure_reason}"
+ )
+ raise GithubReleaseError(message) from exc
+ case _:
+ retry_after = None
+ if hasattr(exc, "headers") and exc.headers is not None:
+ retry_after = exc.headers.get("Retry-After")
+ if retry_after:
+ with contextlib.suppress(Exception):
+ delay = float(retry_after)
+ _sleep_with_jitter(delay)
+ delay *= backoff_factor
+ except urllib.error.URLError as exc: # pragma: no cover - network failure path
+ if attempt == max_attempts:
+ message = f"Failed to reach GitHub API: {exc.reason}"
+ raise GithubReleaseError(message) from exc
+ _sleep_with_jitter(delay)
+ delay *= backoff_factor
+ else: # pragma: no cover - loop exhausted without break
+ message = "GitHub API request failed after retries."
+ raise GithubReleaseError(message)
+
+ try:
+ return json.loads(payload or "")
+ except json.JSONDecodeError as exc: # pragma: no cover - unexpected payload
+ message = "GitHub API returned invalid JSON"
+ raise GithubReleaseError(message) from exc
+
+
+def _validate_release(tag: str, data: dict[str, object]) -> str:
+ draft = data.get("draft")
+ prerelease = data.get("prerelease")
+ name = data.get("name") or tag
+
+ if draft:
+ message = (
+ f"Release '{name}' for {tag} is still a draft. "
+ "Publish it before running this action."
+ )
+ raise GithubReleaseError(message)
+ if prerelease:
+ message = (
+ f"Release '{name}' for {tag} is marked as prerelease. "
+ "Publish a normal release first."
+ )
+ raise GithubReleaseError(message)
+
+ return str(name)
+
+
+def main(
+ tag: str = TAG_OPTION,
+ token: str = TOKEN_OPTION,
+ repo: str = REPO_OPTION,
+) -> None:
+ """Check that the GitHub release for ``tag`` is published.
+
+ Parameters
+ ----------
+ tag : str
+ Release tag to validate.
+ token : str
+ Token used to authenticate the GitHub API request.
+ repo : str
+ Repository slug in ``owner/name`` form where the release should exist.
+
+ Raises
+ ------
+ typer.Exit
+ Raised when the release is missing or not ready for publication.
+ """
+ try:
+ data = _fetch_release(repo, tag, token)
+ name = _validate_release(tag, data)
+ except GithubReleaseError as exc:
+ typer.echo(f"::error::{exc}", err=True)
+ raise typer.Exit(1) from exc
+
+ typer.echo(f"GitHub Release '{name}' is published.")
+
+
+if __name__ == "__main__":
+ typer.run(main)
diff --git a/.github/actions/release-to-pypi-uv/scripts/confirm_release.py b/.github/actions/release-to-pypi-uv/scripts/confirm_release.py
new file mode 100644
index 00000000..6c022a57
--- /dev/null
+++ b/.github/actions/release-to-pypi-uv/scripts/confirm_release.py
@@ -0,0 +1,46 @@
+#!/usr/bin/env -S uv run --script
+# /// script
+# requires-python = ">=3.13"
+# dependencies = ["typer>=0.17,<0.18"]
+# ///
+"""Validate that the caller supplied the expected confirmation string."""
+
+from __future__ import annotations
+
+import typer
+
+EXPECTED_OPTION = typer.Option(..., envvar="EXPECTED")
+CONFIRM_OPTION = typer.Option("", envvar="INPUT_CONFIRM")
+
+
+def main(expected: str = EXPECTED_OPTION, confirm: str = CONFIRM_OPTION) -> None:
+ """Validate that the provided confirmation string matches ``expected``.
+
+ Parameters
+ ----------
+ expected : str
+ Confirmation phrase that must be entered to proceed.
+ confirm : str
+ User-supplied confirmation string collected from workflow input.
+
+ Raises
+ ------
+ typer.Exit
+ Raised when the supplied confirmation does not match ``expected``.
+ """
+ # Normalise whitespace in both inputs before comparison.
+ expected = expected.strip()
+ confirm = confirm.strip()
+ if confirm != expected:
+ typer.echo(
+ "::error::Confirmation failed. "
+ "Set the 'confirm' input to the expected phrase.",
+ err=True,
+ )
+ raise typer.Exit(1)
+
+ typer.echo("Manual confirmation OK.")
+
+
+if __name__ == "__main__":
+ typer.run(main)
diff --git a/.github/actions/release-to-pypi-uv/scripts/determine_release.py b/.github/actions/release-to-pypi-uv/scripts/determine_release.py
new file mode 100644
index 00000000..4a529c97
--- /dev/null
+++ b/.github/actions/release-to-pypi-uv/scripts/determine_release.py
@@ -0,0 +1,78 @@
+#!/usr/bin/env -S uv run --script
+# /// script
+# requires-python = ">=3.13"
+# dependencies = ["typer>=0.17,<0.18"]
+# ///
+"""Resolve the release tag and semantic version for the current run."""
+
+from __future__ import annotations
+
+import os
+import re
+from pathlib import Path # noqa: TC003 # used at runtime for Typer CLI types
+
+import typer
+
+TAG_OPTION = typer.Option(None, envvar="INPUT_TAG")
+GITHUB_OUTPUT_OPTION = typer.Option(..., envvar="GITHUB_OUTPUT")
+
+
+def _emit_outputs(dest: Path, tag: str, version: str) -> None:
+ with dest.open("a", encoding="utf-8") as fh:
+ for key, value in (("tag", tag), ("version", version)):
+ fh.write(f"{key}<<__EOF__\n{value}\n__EOF__\n")
+
+
+def main(
+ tag: str | None = TAG_OPTION, github_output: Path = GITHUB_OUTPUT_OPTION
+) -> None:
+ """Resolve the release tag for the workflow execution.
+
+ Parameters
+ ----------
+ tag : str or None
+ Optional release tag supplied via the action input or CLI argument.
+ github_output : Path
+ Destination file used to communicate outputs to GitHub Actions.
+
+ Raises
+ ------
+ typer.Exit
+ If a tag cannot be resolved or does not follow the ``vMAJOR.MINOR.PATCH``
+ semantic versioning format.
+ """
+ ref_type = os.getenv("GITHUB_REF_TYPE", "")
+ ref_name = os.getenv("GITHUB_REF_NAME", "")
+
+ resolved_tag: str | None = None
+ candidate_tag = (tag or "").strip()
+ ref_name = ref_name.strip()
+ if candidate_tag:
+ resolved_tag = candidate_tag
+ elif ref_type == "tag" and ref_name:
+ resolved_tag = ref_name
+
+ if not resolved_tag:
+ typer.echo(
+ "::error::No tag was provided and this run is not on a tag ref.",
+ err=True,
+ )
+ raise typer.Exit(1)
+
+ semver_pattern = r"v\d+\.\d+\.\d+(?:-[0-9A-Za-z.-]+)?(?:\+[0-9A-Za-z.-]+)?"
+ if not re.fullmatch(semver_pattern, resolved_tag):
+ typer.echo(
+ "::error::Tag must be a valid semantic version (e.g. v1.2.3), "
+ f"got '{resolved_tag}'.",
+ err=True,
+ )
+ raise typer.Exit(1)
+
+ version = resolved_tag.removeprefix("v")
+
+ _emit_outputs(github_output, resolved_tag, version)
+ typer.echo(f"Resolved release tag: {resolved_tag} (version: {version})")
+
+
+if __name__ == "__main__":
+ typer.run(main)
diff --git a/.github/actions/release-to-pypi-uv/scripts/publish_release.py b/.github/actions/release-to-pypi-uv/scripts/publish_release.py
new file mode 100644
index 00000000..17ee216d
--- /dev/null
+++ b/.github/actions/release-to-pypi-uv/scripts/publish_release.py
@@ -0,0 +1,89 @@
+#!/usr/bin/env -S uv run --script
+# /// script
+# requires-python = ">=3.13"
+# dependencies = ["typer>=0.17,<0.18"]
+# ///
+"""Publish the built distributions using uv."""
+
+from __future__ import annotations
+
+import contextlib
+import os
+import shutil
+import sys
+from pathlib import Path
+
+import typer
+
+
+def _ensure_python_runtime() -> None:
+ """Fail fast when Python 3.13+ or uv provisioning is unavailable."""
+ if sys.version_info >= (3, 13):
+ return
+ if shutil.which("uv") is not None:
+ return
+ typer.echo(
+ "::error::Python >= 3.13 or uv must be available before publishing.",
+ err=True,
+ )
+ raise typer.Exit(1)
+
+
+def _extend_sys_path() -> None:
+ candidates: list[Path] = []
+ action_path_env = os.getenv("GITHUB_ACTION_PATH")
+ if action_path_env:
+ action_path = Path(action_path_env).resolve()
+ candidates.append(action_path / "scripts")
+ with contextlib.suppress(IndexError):
+ candidates.append(action_path.parents[2])
+ else:
+ script_path = Path(__file__).resolve()
+ scripts_dir = script_path.parent
+ candidates.append(scripts_dir)
+ with contextlib.suppress(IndexError):
+ candidates.append(scripts_dir.parents[3])
+
+ for candidate in candidates:
+ if not candidate.exists():
+ continue
+ path_str = str(candidate)
+ if path_str not in sys.path:
+ sys.path.insert(0, path_str)
+
+
+_ensure_python_runtime()
+_extend_sys_path()
+
+from cmd_utils import run_cmd # noqa: E402
+
+INDEX_OPTION = typer.Option(
+ "",
+ envvar="INPUT_UV_INDEX",
+ help="Optional index name or URL for uv publish.",
+)
+
+
+def main(index: str = "") -> None:
+ """Publish the built distributions with uv.
+
+ Parameters
+ ----------
+ index : str
+ Optional package index name or URL to pass to ``uv publish``.
+ """
+ if index := index.strip():
+ typer.echo(f"Publishing with uv to index '{index}'")
+ run_cmd(["uv", "publish", "--index", index])
+ else:
+ typer.echo("Publishing with uv to default index (PyPI)")
+ run_cmd(["uv", "publish"])
+
+
+def cli(index: str = INDEX_OPTION) -> None:
+ """CLI entrypoint."""
+ main(index=index)
+
+
+if __name__ == "__main__":
+ typer.run(cli)
diff --git a/.github/actions/release-to-pypi-uv/scripts/validate_toml_versions.py b/.github/actions/release-to-pypi-uv/scripts/validate_toml_versions.py
new file mode 100644
index 00000000..bb493869
--- /dev/null
+++ b/.github/actions/release-to-pypi-uv/scripts/validate_toml_versions.py
@@ -0,0 +1,203 @@
+#!/usr/bin/env -S uv run --script
+# /// script
+# requires-python = ">=3.13"
+# dependencies = ["typer>=0.17,<0.18"]
+# ///
+"""Validate that project versions in pyproject.toml files match the release version."""
+
+from __future__ import annotations
+
+import typing as typ
+from pathlib import Path
+
+import typer
+
+VERSION_OPTION = typer.Option(..., envvar="RESOLVED_VERSION")
+PATTERN_OPTION = typer.Option("**/pyproject.toml", envvar="INPUT_TOML_GLOB")
+FAIL_ON_DYNAMIC_OPTION = typer.Option(
+ "false",
+ envvar="INPUT_FAIL_ON_DYNAMIC_VERSION",
+)
+FAIL_ON_EMPTY_OPTION = typer.Option(
+ "false",
+ envvar="INPUT_FAIL_ON_EMPTY",
+)
+SKIP_DIRECTORIES_OPTION = typer.Option(
+ "",
+ envvar="INPUT_SKIP_DIRECTORIES",
+)
+
+# Common transient directories created by tooling (virtualenvs, caches,
+# pytest artefacts such as ``.pytest_cache``/``.cache`` and coverage reports
+# under ``htmlcov``) that should be ignored when searching for
+# ``pyproject.toml`` files to validate.
+DEFAULT_SKIP_PARTS = {
+ ".git",
+ ".venv",
+ "venv",
+ "node_modules",
+ "dist",
+ "build",
+ ".direnv",
+ ".mypy_cache",
+ ".pytest_cache",
+ ".cache",
+ "htmlcov",
+}
+
+SKIP_PARTS = frozenset(DEFAULT_SKIP_PARTS)
+
+TRUTHY_STRINGS = {"true", "1", "yes", "y", "on"}
+
+
+def _iter_files(
+ pattern: str, *, skip_parts: typ.Collection[str] | None = None
+) -> typ.Iterable[Path]:
+ root = Path()
+ skip = set(SKIP_PARTS if skip_parts is None else skip_parts)
+ for path in sorted(
+ root.glob(pattern), key=lambda candidate: tuple(candidate.parts)
+ ):
+ if not path.is_file():
+ continue
+ parts = set(path.parts)
+ if parts & skip:
+ continue
+ yield path
+
+
+def _parse_skip_directories(raw: str | None) -> set[str]:
+ if not raw:
+ return set()
+ normalized = raw.replace(",", "\n")
+ return {part.strip() for part in normalized.splitlines() if part.strip()}
+
+
+def _parse_bool(value: str | None) -> bool:
+ if value is None:
+ return False
+ normalized = value.strip().lower()
+ if not normalized:
+ return False
+ return normalized in TRUTHY_STRINGS
+
+
+def _load_toml(path: Path) -> dict[str, object]:
+ try:
+ text = path.read_text(encoding="utf-8")
+ except OSError as exc:
+ message = f"{path}: failed to read: {exc}"
+ raise RuntimeError(message) from exc
+
+ try:
+ import tomllib
+ except ModuleNotFoundError as exc: # pragma: no cover - python < 3.11
+ message = "tomllib module is unavailable"
+ raise RuntimeError(message) from exc
+
+ try:
+ return tomllib.loads(text)
+ except tomllib.TOMLDecodeError as exc: # type: ignore[attr-defined]
+ message = f"{path}: failed to parse: {exc}"
+ raise RuntimeError(message) from exc
+
+
+def main(
+ version: str = VERSION_OPTION,
+ pattern: str = PATTERN_OPTION,
+ fail_on_dynamic: str = FAIL_ON_DYNAMIC_OPTION,
+ fail_on_empty: str = FAIL_ON_EMPTY_OPTION,
+ skip_directories: str = SKIP_DIRECTORIES_OPTION,
+) -> None:
+ """Confirm that project versions in TOML files match the release version.
+
+ Parameters
+ ----------
+ version : str
+ Semantic version resolved for the release tag.
+ pattern : str
+ Glob pattern used to discover ``pyproject.toml`` files to inspect.
+ fail_on_dynamic : str
+ String flag that controls whether dynamic versions should raise an
+ error.
+ fail_on_empty : str
+ String flag that controls whether missing matches should raise an
+ error instead of logging a warning.
+ skip_directories : str
+ Comma- or newline-separated list of directory name components to ignore
+ when matching ``pyproject.toml`` files.
+
+ Raises
+ ------
+ typer.Exit
+ Raised when TOML files cannot be read or contain mismatched versions.
+ """
+ skip_parts = set(SKIP_PARTS) | _parse_skip_directories(skip_directories)
+ files = list(_iter_files(pattern, skip_parts=skip_parts))
+ if not files:
+ if _parse_bool(fail_on_empty):
+ typer.echo(
+ f"::error::No TOML files matched pattern {pattern}",
+ err=True,
+ )
+ raise typer.Exit(1)
+ typer.echo(f"::warning::No TOML files matched pattern {pattern}")
+ return
+
+ literal_version_errors: list[str] = []
+ dynamic_errors: list[str] = []
+ checked = 0
+ fail_dynamic = _parse_bool(fail_on_dynamic)
+
+ for path in files:
+ try:
+ data = _load_toml(path)
+ except RuntimeError as exc:
+ typer.echo(f"::error::{exc}", err=True)
+ raise typer.Exit(1) from exc
+
+ project = data.get("project")
+ if not isinstance(project, dict):
+ continue
+ checked += 1
+
+ dynamic = project.get("dynamic")
+ dynamic_set = (
+ {str(item) for item in dynamic}
+ if isinstance(dynamic, list | tuple)
+ else set()
+ )
+ if "version" in dynamic_set:
+ message = f"{path}: uses dynamic 'version' (PEP 621)."
+ if fail_dynamic:
+ dynamic_errors.append(
+ f"{message} Set fail-on-dynamic-version=false to allow."
+ )
+ else:
+ typer.echo(f"::notice::{message} Skipping version check.")
+ continue
+
+ toml_version = project.get("version")
+ if toml_version is None:
+ literal_version_errors.append(
+ f"{path}: missing [project].version and not marked dynamic"
+ )
+ continue
+
+ if str(toml_version) != version:
+ literal_version_errors.append(
+ f"{path}: [project].version '{toml_version}' != tag version '{version}'"
+ )
+
+ if dynamic_errors or literal_version_errors:
+ for error in (*dynamic_errors, *literal_version_errors):
+ typer.echo(f"::error::{error}", err=True)
+ raise typer.Exit(1)
+
+ typer.echo(
+ f"Checked {checked} PEP 621 project file(s); all versions match {version}."
+ )
+
+
+if __name__ == "__main__":
+ typer.run(main)
diff --git a/.github/actions/release-to-pypi-uv/scripts/write_summary.py b/.github/actions/release-to-pypi-uv/scripts/write_summary.py
new file mode 100644
index 00000000..a938e378
--- /dev/null
+++ b/.github/actions/release-to-pypi-uv/scripts/write_summary.py
@@ -0,0 +1,55 @@
+#!/usr/bin/env -S uv run --script
+# /// script
+# requires-python = ">=3.13"
+# dependencies = ["typer>=0.17,<0.18"]
+# ///
+"""Append a short release summary for the workflow run."""
+
+from __future__ import annotations
+
+from pathlib import Path # noqa: TC003 # used at runtime for Typer CLI types
+
+import typer
+
+TAG_OPTION = typer.Option(..., envvar="RESOLVED_TAG")
+INDEX_OPTION = typer.Option("", envvar="INPUT_UV_INDEX")
+ENV_OPTION = typer.Option("pypi", envvar="INPUT_ENVIRONMENT_NAME")
+SUMMARY_OPTION = typer.Option(..., envvar="GITHUB_STEP_SUMMARY")
+
+
+def main(
+ tag: str = TAG_OPTION,
+ index: str = INDEX_OPTION,
+ environment_name: str = ENV_OPTION,
+ summary_path: Path = SUMMARY_OPTION,
+) -> None:
+ """Append release details to the GitHub step summary file.
+
+ Parameters
+ ----------
+ tag : str
+ Resolved release tag to report.
+ index : str
+ Optional package index identifier provided to the publish step.
+ environment_name : str
+ Name of the deployment environment associated with the release.
+ summary_path : Path
+ File path to ``GITHUB_STEP_SUMMARY`` that should receive the content.
+ """
+ index_label = index or "pypi (default)"
+ heading = "## Release summary\n"
+ lines = [
+ f"- Released tag: {tag}\n",
+ f"- Publish index: {index_label}\n",
+ f"- Environment: {environment_name}\n",
+ ]
+
+ prefix = "\n" if summary_path.exists() and summary_path.stat().st_size > 0 else ""
+ with summary_path.open("a", encoding="utf-8") as fh:
+ fh.write(prefix + heading)
+ for line in lines:
+ fh.write(line)
+
+
+if __name__ == "__main__":
+ typer.run(main)
diff --git a/.github/actions/release-to-pypi-uv/tests/__init__.py b/.github/actions/release-to-pypi-uv/tests/__init__.py
new file mode 100644
index 00000000..406af808
--- /dev/null
+++ b/.github/actions/release-to-pypi-uv/tests/__init__.py
@@ -0,0 +1 @@
+"""Test helpers for the release-to-pypi-uv action."""
diff --git a/.github/actions/release-to-pypi-uv/tests/_helpers.py b/.github/actions/release-to-pypi-uv/tests/_helpers.py
new file mode 100644
index 00000000..b7057f7e
--- /dev/null
+++ b/.github/actions/release-to-pypi-uv/tests/_helpers.py
@@ -0,0 +1,41 @@
+"""Test helpers for release-to-pypi-uv action scripts."""
+
+from __future__ import annotations
+
+import importlib.util
+import os
+import sys
+import typing as typ
+from pathlib import Path
+
+if typ.TYPE_CHECKING: # pragma: no cover - imported for annotations only
+ from types import ModuleType
+
+_ACTION_PATH = os.environ.get("GITHUB_ACTION_PATH")
+
+if _ACTION_PATH:
+ _action_root = Path(_ACTION_PATH).resolve()
+ SCRIPTS_DIR = _action_root / "scripts"
+ REPO_ROOT = _action_root.parents[2]
+else:
+ SCRIPTS_DIR = Path(__file__).resolve().parents[1] / "scripts"
+ REPO_ROOT = SCRIPTS_DIR.parents[3]
+
+
+def load_script_module(name: str) -> ModuleType:
+ """Load a script module by *name* from the action's scripts directory."""
+ script_path = SCRIPTS_DIR / f"{name}.py"
+ spec = importlib.util.spec_from_file_location(
+ f"release_to_pypi_uv_{name}", script_path
+ )
+ if spec is None or spec.loader is None: # pragma: no cover - import failure
+ message = f"Unable to load script module {name} from {script_path}"
+ raise RuntimeError(message)
+ module = importlib.util.module_from_spec(spec)
+ # Register module in sys.modules so importlib.reload works in tests
+ sys.modules[spec.name] = module
+ spec.loader.exec_module(module)
+ return module
+
+
+__all__ = ["REPO_ROOT", "SCRIPTS_DIR", "load_script_module"]
diff --git a/.github/actions/release-to-pypi-uv/tests/test_action_python_version.py b/.github/actions/release-to-pypi-uv/tests/test_action_python_version.py
new file mode 100644
index 00000000..7461f7f5
--- /dev/null
+++ b/.github/actions/release-to-pypi-uv/tests/test_action_python_version.py
@@ -0,0 +1,59 @@
+"""Tests covering the python-version input in action.yml."""
+
+from __future__ import annotations
+
+import typing as typ
+from pathlib import Path
+
+import yaml
+
+
+def _load_action() -> dict[str, typ.Any]:
+ action_path = Path(__file__).resolve().parents[1] / "action.yml"
+ return yaml.safe_load(action_path.read_text(encoding="utf-8"))
+
+
+def test_action_exposes_python_version_input() -> None:
+ """Unit test: ensure metadata defines python-version with the expected default."""
+ data = _load_action()
+ python_version = data["inputs"]["python-version"]
+ assert python_version["default"] == "3.13"
+ assert "Python version" in python_version["description"]
+
+
+def test_setup_step_forwards_python_version_input() -> None:
+ """Behavioral test: ensure setup-uv installs the requested interpreter."""
+ data = _load_action()
+ steps = data["runs"]["steps"]
+ setup_step = next(step for step in steps if step["name"] == "Setup uv")
+ assert setup_step["with"]["python-version"] == "${{ inputs.python-version }}"
+
+
+def test_install_step_uses_python_version_input() -> None:
+ """Behavioral test: ensure uv python install receives the requested version."""
+ data = _load_action()
+ steps = data["runs"]["steps"]
+ install_step = next(step for step in steps if step["name"] == "Install Python")
+ assert 'uv python install "${{ inputs.python-version }}"' in install_step["run"]
+
+
+def test_validate_step_passes_fail_on_empty_flag() -> None:
+ """Ensure the validation step forwards the fail-on-empty input."""
+ data = _load_action()
+ steps = data["runs"]["steps"]
+ validate_step = next(
+ step for step in steps if step["name"] == "Validate TOML files"
+ )
+ env = validate_step["env"]
+ assert env["INPUT_FAIL_ON_EMPTY"] == "${{ inputs.fail-on-empty }}"
+
+
+def test_validate_step_passes_skip_directories_input() -> None:
+ """Ensure the validation step forwards the skip-directories input."""
+ data = _load_action()
+ steps = data["runs"]["steps"]
+ validate_step = next(
+ step for step in steps if step["name"] == "Validate TOML files"
+ )
+ env = validate_step["env"]
+ assert env["INPUT_SKIP_DIRECTORIES"] == "${{ inputs.skip-directories }}"
diff --git a/.github/actions/release-to-pypi-uv/tests/test_check_github_release.py b/.github/actions/release-to-pypi-uv/tests/test_check_github_release.py
new file mode 100644
index 00000000..c2ca9250
--- /dev/null
+++ b/.github/actions/release-to-pypi-uv/tests/test_check_github_release.py
@@ -0,0 +1,262 @@
+"""Tests for check_github_release.py."""
+
+from __future__ import annotations
+
+import io
+import json
+import typing as typ
+import uuid
+
+import pytest
+
+if typ.TYPE_CHECKING: # pragma: no cover - imported for annotations only
+ from types import ModuleType
+
+from ._helpers import load_script_module
+
+
+class DummyResponse:
+ """In-memory substitute for an ``urllib`` HTTP response."""
+
+ def __init__(self, payload: dict[str, typ.Any]) -> None:
+ """Store the JSON payload returned by the fake response."""
+ self._payload = json.dumps(payload).encode("utf-8")
+
+ def __enter__(self) -> DummyResponse:
+ """Return the response instance for context manager usage."""
+ return self
+
+ def __exit__(
+ self,
+ exc_type: type[BaseException] | None,
+ exc: BaseException | None,
+ traceback: object | None,
+ ) -> None:
+ """Propagate exceptions raised within the context manager."""
+ return
+
+ def read(self) -> bytes:
+ """Return the cached payload bytes."""
+ return self._payload
+
+
+@pytest.fixture(name="module")
+def fixture_module() -> ModuleType:
+ """Load the ``check_github_release`` script module under test."""
+ return load_script_module("check_github_release")
+
+
+@pytest.fixture(name="fake_token")
+def fixture_fake_token() -> str:
+ """Generate a unique but fake token for GitHub API requests."""
+ return f"test-token-{uuid.uuid4().hex}"
+
+
+def test_sleep_with_jitter_allows_custom_rng(module: ModuleType) -> None:
+ """Allow tests to provide deterministic jitter and sleep functions."""
+ calls: list[float] = []
+
+ class FixedRandom:
+ """Stub RNG that always returns a fixed jitter fraction."""
+
+ def uniform(self, a: float, b: float) -> float:
+ assert a == 0.0
+ assert b == 0.1
+ return 0.05
+
+ module._sleep_with_jitter(4.0, jitter=FixedRandom(), sleep=calls.append)
+
+ assert calls == [4.2]
+
+
+def test_success(
+ monkeypatch: pytest.MonkeyPatch,
+ capsys: pytest.CaptureFixture[str],
+ module: ModuleType,
+ fake_token: str,
+) -> None:
+ """Print a success message when GitHub marks the release as published."""
+
+ def fake_urlopen(request: typ.Any, timeout: float = 30) -> DummyResponse: # noqa: ANN401
+ return DummyResponse({"draft": False, "prerelease": False, "name": "1.2.3"})
+
+ monkeypatch.setattr(module.urllib.request, "urlopen", fake_urlopen)
+
+ module.main(tag="v1.2.3", token=fake_token, repo="owner/repo")
+
+ captured = capsys.readouterr()
+ assert "GitHub Release '1.2.3' is published." in captured.out
+
+
+def test_draft_release(
+ monkeypatch: pytest.MonkeyPatch,
+ module: ModuleType,
+ capsys: pytest.CaptureFixture[str],
+ fake_token: str,
+) -> None:
+ """Exit with an error when GitHub reports the release as a draft."""
+
+ def fake_urlopen(request: typ.Any, timeout: float = 30) -> DummyResponse: # noqa: ANN401
+ return DummyResponse({"draft": True, "prerelease": False, "name": "draft"})
+
+ monkeypatch.setattr(module.urllib.request, "urlopen", fake_urlopen)
+
+ with pytest.raises(module.typer.Exit):
+ module.main(tag="v1.0.0", token=fake_token, repo="owner/repo")
+
+ captured = capsys.readouterr()
+ assert "still a draft" in captured.err
+
+
+def test_prerelease(
+ monkeypatch: pytest.MonkeyPatch,
+ module: ModuleType,
+ capsys: pytest.CaptureFixture[str],
+ fake_token: str,
+) -> None:
+ """Exit with an error when GitHub flags the release as a prerelease."""
+
+ def fake_urlopen(request: typ.Any, timeout: float = 30) -> DummyResponse: # noqa: ANN401
+ return DummyResponse({"draft": False, "prerelease": True, "name": "pre"})
+
+ monkeypatch.setattr(module.urllib.request, "urlopen", fake_urlopen)
+
+ with pytest.raises(module.typer.Exit):
+ module.main(tag="v1.0.0", token=fake_token, repo="owner/repo")
+
+ captured = capsys.readouterr()
+ assert "prerelease" in captured.err
+
+
+def test_missing_release(
+ monkeypatch: pytest.MonkeyPatch,
+ module: ModuleType,
+ capsys: pytest.CaptureFixture[str],
+ fake_token: str,
+) -> None:
+ """Raise an error when the GitHub API cannot find the release."""
+
+ def fake_urlopen(request: typ.Any, timeout: float = 30) -> typ.Any: # noqa: ANN401
+ raise module.urllib.error.HTTPError(
+ url=str(request.full_url),
+ code=404,
+ msg="Not Found",
+ hdrs=None,
+ fp=io.BytesIO(b""),
+ )
+
+ monkeypatch.setattr(module.urllib.request, "urlopen", fake_urlopen)
+
+ with pytest.raises(module.typer.Exit):
+ module.main(tag="v1.0.0", token=fake_token, repo="owner/repo")
+
+ captured = capsys.readouterr()
+ assert "No GitHub release found" in captured.err
+
+
+def test_authentication_failure(
+ monkeypatch: pytest.MonkeyPatch,
+ module: ModuleType,
+ capsys: pytest.CaptureFixture[str],
+ fake_token: str,
+) -> None:
+ """Exit with guidance when GitHub rejects the authentication token."""
+ detail = b"Bad credentials"
+ error = module.urllib.error.HTTPError(
+ url="https://api.github.com",
+ code=401,
+ msg="Unauthorized",
+ hdrs=None,
+ fp=io.BytesIO(detail),
+ )
+
+ def raising_urlopen(request: typ.Any, timeout: float = 30) -> typ.Any: # noqa: ANN401
+ _ = request, timeout
+ raise error
+
+ monkeypatch.setattr(module.urllib.request, "urlopen", raising_urlopen)
+
+ with pytest.raises(module.typer.Exit):
+ module.main(tag="v1.0.0", token=fake_token, repo="owner/repo")
+
+ captured = capsys.readouterr()
+ assert "Verify that GH_TOKEN" in captured.err
+ assert "Unauthorized" in captured.err
+
+
+def test_permission_denied(
+ monkeypatch: pytest.MonkeyPatch,
+ module: ModuleType,
+ capsys: pytest.CaptureFixture[str],
+ fake_token: str,
+) -> None:
+ """Exit with a helpful error when GitHub responds with 403 Forbidden."""
+ detail = b"forbidden"
+ error = module.urllib.error.HTTPError(
+ url="https://api.github.com",
+ code=403,
+ msg="Forbidden",
+ hdrs=None,
+ fp=io.BytesIO(detail),
+ )
+
+ def raising_urlopen(request: typ.Any, timeout: float = 30) -> typ.Any: # noqa: ANN401
+ raise error
+
+ monkeypatch.setattr(module.urllib.request, "urlopen", raising_urlopen)
+
+ with pytest.raises(module.typer.Exit):
+ module.main(tag="v1.0.0", token=fake_token, repo="owner/repo")
+
+ captured = capsys.readouterr()
+ assert "GitHub token lacks permission" in captured.err
+
+
+def test_retries_then_success(
+ monkeypatch: pytest.MonkeyPatch,
+ module: ModuleType,
+ capsys: pytest.CaptureFixture[str],
+ fake_token: str,
+) -> None:
+ """Retry transient HTTP failures until GitHub releases the metadata."""
+ attempts: list[int] = []
+
+ def fake_urlopen(request: typ.Any, timeout: float = 30) -> DummyResponse: # noqa: ANN401
+ attempts.append(1)
+ if len(attempts) < 3:
+ message = "temporary"
+ raise module.urllib.error.URLError(message)
+ return DummyResponse({"draft": False, "prerelease": False, "name": "ok"})
+
+ monkeypatch.setattr(module.urllib.request, "urlopen", fake_urlopen)
+ monkeypatch.setattr(module.time, "sleep", lambda _: None)
+
+ module.main(tag="v1.0.0", token=fake_token, repo="owner/repo")
+
+ assert len(attempts) == 3
+ captured = capsys.readouterr()
+ assert "GitHub Release 'ok' is published." in captured.out
+
+
+def test_retries_then_fail(
+ monkeypatch: pytest.MonkeyPatch,
+ module: ModuleType,
+ capsys: pytest.CaptureFixture[str],
+ fake_token: str,
+) -> None:
+ """Abort after exhausting retries when transient errors persist."""
+
+ def failing_urlopen(request: typ.Any, timeout: float = 30) -> typ.Any: # noqa: ANN401
+ _ = request, timeout
+ message = "temporary"
+ raise module.urllib.error.URLError(message)
+
+ monkeypatch.setattr(module.urllib.request, "urlopen", failing_urlopen)
+ monkeypatch.setattr(module.time, "sleep", lambda _: None)
+
+ with pytest.raises(module.typer.Exit) as exc_info:
+ module.main(tag="v1.0.0", token=fake_token, repo="owner/repo")
+
+ captured = capsys.readouterr()
+ assert exc_info.value.exit_code == 1
+ assert "temporary" in captured.err or "fetch" in captured.err
diff --git a/.github/actions/release-to-pypi-uv/tests/test_confirm_release.py b/.github/actions/release-to-pypi-uv/tests/test_confirm_release.py
new file mode 100644
index 00000000..a7a5d2ba
--- /dev/null
+++ b/.github/actions/release-to-pypi-uv/tests/test_confirm_release.py
@@ -0,0 +1,48 @@
+"""Tests for confirm_release.py."""
+
+from __future__ import annotations
+
+import subprocess
+from pathlib import Path
+
+from shared_actions_conftest import REQUIRES_UV
+
+from .test_determine_release import base_env
+
+pytestmark = REQUIRES_UV
+
+
+def run_confirm(
+ tmp_path: Path, expected: str, confirm: str
+) -> subprocess.CompletedProcess[str]:
+ """Run the ``confirm_release`` script with explicit confirmation inputs."""
+ env = base_env(tmp_path)
+ env["EXPECTED"] = expected
+ env["INPUT_CONFIRM"] = confirm
+ script = Path(__file__).resolve().parents[1] / "scripts" / "confirm_release.py"
+ cmd = ["uv", "run", "--script", str(script)]
+ return subprocess.run( # noqa: S603
+ cmd,
+ capture_output=True,
+ encoding="utf-8",
+ errors="replace",
+ env=env,
+ check=False,
+ cwd=env.get("PWD"),
+ )
+
+
+def test_confirmation_success(tmp_path: Path) -> None:
+ """Accept matching confirmation phrases."""
+ result = run_confirm(tmp_path, expected="release v1.2.3", confirm="release v1.2.3")
+
+ assert result.returncode == 0, result.stderr
+ assert "Manual confirmation OK." in result.stdout
+
+
+def test_confirmation_failure(tmp_path: Path) -> None:
+ """Reject confirmation attempts with mismatched input."""
+ result = run_confirm(tmp_path, expected="release v1.2.3", confirm="nope")
+
+ assert result.returncode == 1
+ assert "Confirmation failed" in result.stderr
diff --git a/.github/actions/release-to-pypi-uv/tests/test_determine_release.py b/.github/actions/release-to-pypi-uv/tests/test_determine_release.py
new file mode 100644
index 00000000..f97876b0
--- /dev/null
+++ b/.github/actions/release-to-pypi-uv/tests/test_determine_release.py
@@ -0,0 +1,221 @@
+"""Tests for determine_release.py."""
+
+from __future__ import annotations
+
+import os
+import subprocess
+from pathlib import Path
+
+from shared_actions_conftest import REQUIRES_UV
+
+pytestmark = REQUIRES_UV
+
+
+def run_script(
+ script: Path, *, env: dict[str, str]
+) -> subprocess.CompletedProcess[str]:
+ """Execute ``determine_release`` with a controlled environment."""
+ cmd = ["uv", "run", "--script", str(script)]
+ return subprocess.run( # noqa: S603
+ cmd,
+ capture_output=True,
+ encoding="utf-8",
+ errors="replace",
+ env=env,
+ check=False,
+ cwd=env.get("PWD"),
+ )
+
+
+def base_env(tmp_path: Path) -> dict[str, str]:
+ """Construct the base environment shared by the release script tests."""
+ merged = {**os.environ}
+ root = str(Path(__file__).resolve().parents[4])
+ prev = os.environ.get("PYTHONPATH", "")
+ merged["PYTHONPATH"] = root + (os.pathsep + prev if prev else "")
+ merged["PYTHONIOENCODING"] = "utf-8"
+ merged["GITHUB_OUTPUT"] = str(tmp_path / "out.txt")
+ merged["PWD"] = str(tmp_path)
+ return merged
+
+
+def read_outputs(tmp_path: Path) -> dict[str, str]:
+ """Return ``GITHUB_OUTPUT`` key/value pairs emitted by the script."""
+ out = {}
+ output_file = tmp_path / "out.txt"
+ if not output_file.exists():
+ return out
+ lines = output_file.read_text(encoding="utf-8").splitlines()
+ iterator = iter(lines)
+ for line in iterator:
+ if line.endswith("<<__EOF__"):
+ key, _ = line.split("<<", 1)
+ value_lines: list[str] = []
+ for value_line in iterator:
+ if value_line == "__EOF__":
+ break
+ value_lines.append(value_line)
+ out[key] = "\n".join(value_lines)
+ continue
+ if "=" in line:
+ key, value = line.split("=", 1)
+ out[key] = value
+ return out
+
+
+def test_outputs_use_multiline_format(tmp_path: Path) -> None:
+ """Write GitHub Action outputs using the recommended heredoc syntax."""
+ env = base_env(tmp_path)
+ env["INPUT_TAG"] = "v3.1.4"
+
+ script = Path(__file__).resolve().parents[1] / "scripts" / "determine_release.py"
+ result = run_script(script, env=env)
+
+ assert result.returncode == 0, result.stderr
+ lines = (tmp_path / "out.txt").read_text(encoding="utf-8").splitlines()
+ assert lines == [
+ "tag<<__EOF__",
+ "v3.1.4",
+ "__EOF__",
+ "version<<__EOF__",
+ "3.1.4",
+ "__EOF__",
+ ]
+
+
+def test_resolves_tag_from_ref(tmp_path: Path) -> None:
+ """Derive the release tag from Git reference metadata."""
+ env = base_env(tmp_path)
+ env["GITHUB_REF_TYPE"] = "tag"
+ env["GITHUB_REF_NAME"] = "v1.2.3"
+
+ script = Path(__file__).resolve().parents[1] / "scripts" / "determine_release.py"
+ result = run_script(script, env=env)
+
+ assert result.returncode == 0, result.stderr
+ outputs = read_outputs(tmp_path)
+ assert outputs["tag"] == "v1.2.3"
+ assert outputs["version"] == "1.2.3"
+
+
+def test_resolves_tag_from_input(tmp_path: Path) -> None:
+ """Derive the release tag from the workflow input when present."""
+ env = base_env(tmp_path)
+ env["INPUT_TAG"] = "v2.0.0"
+
+ script = Path(__file__).resolve().parents[1] / "scripts" / "determine_release.py"
+ result = run_script(script, env=env)
+
+ assert result.returncode == 0, result.stderr
+ outputs = read_outputs(tmp_path)
+ assert outputs["tag"] == "v2.0.0"
+ assert outputs["version"] == "2.0.0"
+
+
+def test_input_tag_overrides_ref(tmp_path: Path) -> None:
+ """Prefer the workflow input tag when both sources are present."""
+ env = base_env(tmp_path)
+ env["GITHUB_REF_TYPE"] = "tag"
+ env["GITHUB_REF_NAME"] = "v0.9.9"
+ env["INPUT_TAG"] = "v2.3.4"
+
+ script = Path(__file__).resolve().parents[1] / "scripts" / "determine_release.py"
+ result = run_script(script, env=env)
+
+ assert result.returncode == 0, result.stderr
+ outputs = read_outputs(tmp_path)
+ assert outputs["tag"] == "v2.3.4"
+ assert outputs["version"] == "2.3.4"
+
+
+def test_accepts_prerelease_and_build_tags(tmp_path: Path) -> None:
+ """Allow SemVer pre-release and build metadata components."""
+ env = base_env(tmp_path)
+ env["INPUT_TAG"] = "v1.2.3-rc.1+build.5"
+
+ script = Path(__file__).resolve().parents[1] / "scripts" / "determine_release.py"
+ result = run_script(script, env=env)
+
+ assert result.returncode == 0, result.stderr
+ outputs = read_outputs(tmp_path)
+ assert outputs["tag"] == "v1.2.3-rc.1+build.5"
+ assert outputs["version"] == "1.2.3-rc.1+build.5"
+
+
+def test_rejects_invalid_tag(tmp_path: Path) -> None:
+ """Reject release tags that do not follow the expected SemVer format."""
+ env = base_env(tmp_path)
+ env["GITHUB_REF_TYPE"] = "tag"
+ env["GITHUB_REF_NAME"] = "release-1.0.0"
+
+ script = Path(__file__).resolve().parents[1] / "scripts" / "determine_release.py"
+ result = run_script(script, env=env)
+
+ assert result.returncode == 1
+ assert "Tag must be a valid semantic version" in result.stderr
+
+
+def test_errors_when_no_tag_and_not_on_tag_ref(tmp_path: Path) -> None:
+ """Fail when neither Git metadata nor inputs provide a tag."""
+ env = base_env(tmp_path)
+ env.pop("GITHUB_REF_TYPE", None)
+ env.pop("GITHUB_REF_NAME", None)
+ env.pop("INPUT_TAG", None)
+
+ script = Path(__file__).resolve().parents[1] / "scripts" / "determine_release.py"
+ result = run_script(script, env=env)
+
+ assert result.returncode == 1
+ assert "No tag was provided" in result.stderr
+
+
+def test_errors_when_ref_type_missing(tmp_path: Path) -> None:
+ """Fail when ``GITHUB_REF_TYPE`` is absent."""
+ env = base_env(tmp_path)
+ env.pop("GITHUB_REF_TYPE", None)
+ env["GITHUB_REF_NAME"] = "v1.2.3"
+
+ script = Path(__file__).resolve().parents[1] / "scripts" / "determine_release.py"
+ result = run_script(script, env=env)
+
+ assert result.returncode == 1
+ assert "No tag was provided" in result.stderr
+
+
+def test_errors_when_ref_name_missing(tmp_path: Path) -> None:
+ """Fail when ``GITHUB_REF_NAME`` is not provided."""
+ env = base_env(tmp_path)
+ env["GITHUB_REF_TYPE"] = "tag"
+ env.pop("GITHUB_REF_NAME", None)
+
+ script = Path(__file__).resolve().parents[1] / "scripts" / "determine_release.py"
+ result = run_script(script, env=env)
+
+ assert result.returncode == 1
+ assert "No tag was provided" in result.stderr
+
+
+def test_errors_when_ref_name_empty(tmp_path: Path) -> None:
+ """Fail when ``GITHUB_REF_NAME`` is empty."""
+ env = base_env(tmp_path)
+ env["GITHUB_REF_TYPE"] = "tag"
+ env["GITHUB_REF_NAME"] = ""
+
+ script = Path(__file__).resolve().parents[1] / "scripts" / "determine_release.py"
+ result = run_script(script, env=env)
+
+ assert result.returncode == 1
+ assert "No tag was provided" in result.stderr
+
+
+def test_errors_on_malformed_version_tag(tmp_path: Path) -> None:
+ """Fail when the release tag omits version components."""
+ env = base_env(tmp_path)
+ env["GITHUB_REF_TYPE"] = "tag"
+ env["GITHUB_REF_NAME"] = "v1.2"
+
+ script = Path(__file__).resolve().parents[1] / "scripts" / "determine_release.py"
+ result = run_script(script, env=env)
+
+ assert result.returncode == 1
+ assert "Tag must be a valid semantic version" in result.stderr
diff --git a/.github/actions/release-to-pypi-uv/tests/test_publish_release.py b/.github/actions/release-to-pypi-uv/tests/test_publish_release.py
new file mode 100644
index 00000000..9bbe873e
--- /dev/null
+++ b/.github/actions/release-to-pypi-uv/tests/test_publish_release.py
@@ -0,0 +1,151 @@
+"""Tests for publish_release.py."""
+
+from __future__ import annotations
+
+import types
+import typing as typ
+
+if typ.TYPE_CHECKING: # pragma: no cover - imported for annotations only
+ from types import ModuleType
+
+import pytest
+from typer.testing import CliRunner
+
+from ._helpers import SCRIPTS_DIR, load_script_module
+
+
+@pytest.fixture(name="publish_module")
+def fixture_publish_module() -> ModuleType:
+ """Load the ``publish_release`` script module with repository paths set."""
+ module = load_script_module("publish_release")
+ scripts_dir = str(SCRIPTS_DIR)
+ if scripts_dir not in module.sys.path: # type: ignore[attr-defined]
+ module.sys.path.insert(0, scripts_dir) # type: ignore[attr-defined]
+ return module
+
+
+@pytest.mark.parametrize(
+ ("index", "expected_calls", "expected_message"),
+ [
+ ("", [["uv", "publish"]], "Publishing with uv to default index (PyPI)"),
+ (
+ " testpypi ",
+ [["uv", "publish", "--index", "testpypi"]],
+ "Publishing with uv to index 'testpypi'",
+ ),
+ ],
+)
+def test_publish_index_behaviour(
+ monkeypatch: pytest.MonkeyPatch,
+ capsys: pytest.CaptureFixture[str],
+ publish_module: ModuleType,
+ index: str,
+ expected_calls: list[list[str]],
+ expected_message: str,
+) -> None:
+ """Exercise publishing for both default and custom index inputs."""
+ calls: list[list[str]] = []
+
+ def fake_run_cmd(args: list[str], **_: object) -> None:
+ calls.append(args)
+
+ monkeypatch.setattr(publish_module, "run_cmd", fake_run_cmd)
+
+ publish_module.main(index=index)
+
+ assert calls == expected_calls
+ captured = capsys.readouterr()
+ assert expected_message in captured.out
+
+
+def test_ensure_python_runtime_errors_without_uv(
+ monkeypatch: pytest.MonkeyPatch,
+ capsys: pytest.CaptureFixture[str],
+ publish_module: ModuleType,
+) -> None:
+ """Guard the fail-fast check when Python < 3.13 and uv is unavailable."""
+ stub_sys = types.SimpleNamespace(version_info=(3, 12, 0))
+ monkeypatch.setattr(publish_module, "sys", stub_sys)
+ monkeypatch.setattr(publish_module.shutil, "which", lambda name: None)
+
+ with pytest.raises(publish_module.typer.Exit):
+ publish_module._ensure_python_runtime()
+
+ err = capsys.readouterr().err
+ assert "Python >= 3.13" in err
+
+
+def test_publish_run_cmd_error(
+ monkeypatch: pytest.MonkeyPatch, publish_module: ModuleType
+) -> None:
+ """Propagate errors raised by ``run_cmd`` during publishing."""
+
+ class DummyError(Exception):
+ pass
+
+ def fake_run_cmd(_: list[str], **__: object) -> None:
+ message = "uv publish failed"
+ raise DummyError(message)
+
+ monkeypatch.setattr(publish_module, "run_cmd", fake_run_cmd)
+
+ with pytest.raises(DummyError, match="uv publish failed"):
+ publish_module.main(index="")
+
+
+def test_cli_proxies_to_main(
+ monkeypatch: pytest.MonkeyPatch, publish_module: ModuleType
+) -> None:
+ """Ensure the CLI entrypoint forwards arguments to ``main``."""
+ received: dict[str, str] = {}
+
+ def fake_main(*, index: str) -> None:
+ received["index"] = index
+
+ monkeypatch.setattr(publish_module, "main", fake_main)
+
+ publish_module.cli(index="mirror")
+
+ assert received == {"index": "mirror"}
+
+
+def test_cli_runner_default_index(
+ monkeypatch: pytest.MonkeyPatch, publish_module: ModuleType
+) -> None:
+ """Exercise the CLI behaviour when no index is provided."""
+ calls: list[list[str]] = []
+
+ def fake_run_cmd(args: list[str], **_: object) -> None:
+ calls.append(args)
+
+ monkeypatch.setattr(publish_module, "run_cmd", fake_run_cmd)
+
+ runner = CliRunner()
+ app = publish_module.typer.Typer()
+ app.command()(publish_module.cli)
+ result = runner.invoke(app, [])
+
+ assert result.exit_code == 0
+ assert calls == [["uv", "publish"]]
+ assert "Publishing with uv to default index (PyPI)" in result.output
+
+
+def test_cli_runner_respects_env_index(
+ monkeypatch: pytest.MonkeyPatch, publish_module: ModuleType
+) -> None:
+ """Accept the index from the GitHub Action input environment variable."""
+ calls: list[list[str]] = []
+
+ def fake_run_cmd(args: list[str], **_: object) -> None:
+ calls.append(args)
+
+ monkeypatch.setattr(publish_module, "run_cmd", fake_run_cmd)
+
+ runner = CliRunner()
+ app = publish_module.typer.Typer()
+ app.command()(publish_module.cli)
+ result = runner.invoke(app, [], env={"INPUT_UV_INDEX": "testpypi"})
+
+ assert result.exit_code == 0
+ assert calls == [["uv", "publish", "--index", "testpypi"]]
+ assert "Publishing with uv to index 'testpypi'" in result.output
diff --git a/.github/actions/release-to-pypi-uv/tests/test_validate_toml_versions.py b/.github/actions/release-to-pypi-uv/tests/test_validate_toml_versions.py
new file mode 100644
index 00000000..f57f6b76
--- /dev/null
+++ b/.github/actions/release-to-pypi-uv/tests/test_validate_toml_versions.py
@@ -0,0 +1,506 @@
+"""Tests for validate_toml_versions.py."""
+
+from __future__ import annotations
+
+import typing as typ
+
+if typ.TYPE_CHECKING: # pragma: no cover - type hints only
+ from pathlib import Path
+ from types import ModuleType
+
+import pytest
+from typer.testing import CliRunner
+
+from ._helpers import load_script_module
+
+MODULE: ModuleType = load_script_module("validate_toml_versions")
+SKIP_PARTS = tuple(sorted(MODULE.SKIP_PARTS))
+
+
+@pytest.fixture(name="module")
+def fixture_module() -> ModuleType:
+ """Reload the ``validate_toml_versions`` script for a clean state."""
+ return load_script_module("validate_toml_versions")
+
+
+@pytest.fixture
+def project_root(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> Path:
+ """Use a temporary directory as the working tree for each test."""
+ monkeypatch.chdir(tmp_path)
+ return tmp_path
+
+
+def _write_pyproject(base: Path, content: str) -> None:
+ """Create a ``pyproject.toml`` file populated with the provided content."""
+ base.mkdir(parents=True, exist_ok=True)
+ (base / "pyproject.toml").write_text(content.strip(), encoding="utf-8")
+
+
+def _invoke_main(module: ModuleType, **kwargs: object) -> None:
+ """Invoke ``module.main`` with defaults tailored for the tests."""
+ kwargs.setdefault("pattern", "**/pyproject.toml")
+ kwargs.setdefault("fail_on_dynamic", "false")
+ kwargs.setdefault("fail_on_empty", "false")
+ kwargs.setdefault("skip_directories", "")
+ module.main(**kwargs)
+
+
+def test_passes_when_versions_match(
+ project_root: Path, module: ModuleType, capsys: pytest.CaptureFixture[str]
+) -> None:
+ """Succeed when all discovered packages match the expected version."""
+ _write_pyproject(
+ project_root / "pkg",
+ """
+[project]
+name = "demo"
+version = "1.0.0"
+""",
+ )
+
+ _invoke_main(module, version="1.0.0", fail_on_dynamic=None)
+
+ captured = capsys.readouterr()
+ assert (
+ captured.out.strip()
+ == "Checked 1 PEP 621 project file(s); all versions match 1.0.0."
+ )
+
+
+def test_cli_defaults_when_optional_parameters_omitted(
+ project_root: Path, module: ModuleType
+) -> None:
+ """Use default CLI values when optional flags are not provided."""
+ _write_pyproject(
+ project_root / "pkg",
+ """
+[project]
+name = "demo"
+version = "1.0.0"
+""",
+ )
+
+ runner = CliRunner()
+ app = module.typer.Typer()
+ app.command()(module.main)
+ result = runner.invoke(app, ["--version", "1.0.0"])
+
+ assert result.exit_code == 0
+ assert "all versions match 1.0.0" in result.output
+
+
+def test_fails_on_mismatch(
+ project_root: Path, module: ModuleType, capsys: pytest.CaptureFixture[str]
+) -> None:
+ """Fail when a package declares a version that differs from the tag."""
+ _write_pyproject(
+ project_root / "pkg",
+ """
+[project]
+name = "demo"
+version = "1.0.1"
+""",
+ )
+
+ with pytest.raises(module.typer.Exit):
+ _invoke_main(module, version="1.0.0")
+
+ captured = capsys.readouterr()
+ assert "version '1.0.1' != tag version '1.0.0'" in captured.err
+
+
+def test_dynamic_version_failure(
+ project_root: Path, module: ModuleType, capsys: pytest.CaptureFixture[str]
+) -> None:
+ """Fail when dynamic versions are disallowed but present in metadata."""
+ _write_pyproject(
+ project_root / "pkg",
+ """
+[project]
+name = "demo"
+dynamic = ["version"]
+""",
+ )
+
+ with pytest.raises(module.typer.Exit):
+ _invoke_main(module, version="1.0.0", fail_on_dynamic="true")
+
+ captured = capsys.readouterr()
+ assert "dynamic 'version'" in captured.err
+
+
+@pytest.mark.parametrize("truthy", ["true", "TRUE", "Yes", " y ", "1", "On"])
+def test_dynamic_version_failure_for_truthy_variants(
+ project_root: Path,
+ module: ModuleType,
+ capsys: pytest.CaptureFixture[str],
+ truthy: str,
+) -> None:
+ """Fail whenever dynamic versions are disallowed with truthy inputs."""
+ _write_pyproject(
+ project_root / "pkg",
+ """
+[project]
+name = "demo"
+dynamic = ["version"]
+""",
+ )
+
+ with pytest.raises(module.typer.Exit):
+ _invoke_main(module, version="1.0.0", fail_on_dynamic=truthy)
+
+ captured = capsys.readouterr()
+ assert "dynamic 'version'" in captured.err
+
+
+def test_fails_on_parse_error(
+ project_root: Path, module: ModuleType, capsys: pytest.CaptureFixture[str]
+) -> None:
+ """Fail gracefully when the TOML configuration cannot be parsed."""
+ target = project_root / "pkg"
+ target.mkdir()
+ (target / "pyproject.toml").write_text("this is not TOML")
+
+ with pytest.raises(module.typer.Exit):
+ _invoke_main(module, version="1.0.0")
+
+ captured = capsys.readouterr()
+ assert "failed to parse" in captured.err
+
+
+def test_dynamic_version_allowed_when_flag_false(
+ project_root: Path,
+ module: ModuleType,
+ capsys: pytest.CaptureFixture[str],
+) -> None:
+ """Allow dynamic versions when the flag explicitly disables failures."""
+ _write_pyproject(
+ project_root / "pkg",
+ """
+[project]
+name = "demo"
+dynamic = ["version"]
+""",
+ )
+
+ _invoke_main(module, version="1.0.0", fail_on_dynamic="false")
+
+ captured = capsys.readouterr()
+ assert "uses dynamic 'version'" in captured.out
+
+
+@pytest.mark.parametrize("falsey", ["false", "", "no", "0", "off", "n", "False"])
+def test_dynamic_version_allowed_for_falsey_variants(
+ project_root: Path,
+ module: ModuleType,
+ capsys: pytest.CaptureFixture[str],
+ falsey: str,
+) -> None:
+ """Allow dynamic versions for all supported falsey flag values."""
+ _write_pyproject(
+ project_root / "pkg",
+ """
+[project]
+name = "demo"
+dynamic = ["version"]
+""",
+ )
+
+ _invoke_main(module, version="1.0.0", fail_on_dynamic=falsey)
+
+ captured = capsys.readouterr()
+ assert "uses dynamic 'version'" in captured.out
+
+
+@pytest.mark.parametrize("skip_part", SKIP_PARTS, ids=lambda part: part)
+def test_skips_files_in_ignored_directory(
+ project_root: Path,
+ module: ModuleType,
+ capsys: pytest.CaptureFixture[str],
+ skip_part: str,
+) -> None:
+ """Warn and exit when matches appear solely under ignored directories."""
+ assert skip_part in module.SKIP_PARTS
+ _write_pyproject(
+ project_root / skip_part / "pkg",
+ """
+[project]
+name = "ignored"
+version = "9.9.9"
+""",
+ )
+ _write_pyproject(
+ project_root / "nested" / skip_part / "pkg",
+ """
+[project]
+name = "nested-ignored"
+version = "9.9.9"
+""",
+ )
+
+ discovered = list(module._iter_files("**/pyproject.toml"))
+ assert not discovered
+
+ _invoke_main(module, version="1.0.0")
+ captured = capsys.readouterr()
+ assert "::warning::No TOML files matched pattern" in captured.out
+
+
+def test_iter_files_skips_virtualenv_and_mypy_cache(
+ project_root: Path,
+ module: ModuleType,
+ capsys: pytest.CaptureFixture[str],
+) -> None:
+ """Ignore matches located under virtualenv and mypy cache directories."""
+ _write_pyproject(
+ project_root / ".venv" / "pkg",
+ """
+[project]
+name = "ignored-venv"
+version = "0.1.0"
+""",
+ )
+ _write_pyproject(
+ project_root / "src" / ".mypy_cache" / "pkg",
+ """
+[project]
+name = "ignored-mypy"
+version = "0.2.0"
+""",
+ )
+
+ discovered = list(module._iter_files("**/pyproject.toml"))
+ assert not discovered
+
+ _invoke_main(module, version="1.0.0")
+ captured = capsys.readouterr()
+ assert "::warning::No TOML files matched pattern" in captured.out
+
+
+def test_custom_skip_directories_filter_matches(
+ project_root: Path,
+ module: ModuleType,
+ capsys: pytest.CaptureFixture[str],
+) -> None:
+ """Allow repositories to skip additional transient directory names."""
+ _write_pyproject(
+ project_root / "cache_dir" / "pkg",
+ """
+[project]
+name = "ignored-cache"
+version = "0.3.0"
+""",
+ )
+ _write_pyproject(
+ project_root / "alt-dir" / "pkg",
+ """
+[project]
+name = "ignored-alt"
+version = "0.4.0"
+""",
+ )
+
+ discovered = list(module._iter_files("**/pyproject.toml"))
+ assert discovered
+ assert "cache_dir" not in module.SKIP_PARTS
+
+ _invoke_main(
+ module,
+ version="1.0.0",
+ skip_directories="cache_dir\nalt-dir",
+ )
+
+ captured = capsys.readouterr()
+ assert "::warning::No TOML files matched pattern" in captured.out
+ assert "cache_dir" not in module.SKIP_PARTS
+
+
+def test_fail_on_empty_errors_when_enabled(
+ project_root: Path,
+ module: ModuleType,
+ capsys: pytest.CaptureFixture[str],
+) -> None:
+ """Raise an error instead of a warning when ``fail_on_empty`` is truthy."""
+ with pytest.raises(module.typer.Exit):
+ _invoke_main(module, version="1.0.0", fail_on_empty="true")
+
+ captured = capsys.readouterr()
+ assert "::error::No TOML files matched pattern" in captured.err
+
+
+def test_skip_parts_cover_transient_tooling_dirs(module: ModuleType) -> None:
+ """Ensure tooling artefact directories remain excluded from discovery."""
+ expected = {
+ ".venv",
+ "venv",
+ ".direnv",
+ ".mypy_cache",
+ ".pytest_cache",
+ ".cache",
+ "htmlcov",
+ }
+ assert expected <= module.SKIP_PARTS
+
+
+def test_dynamic_version_allowed_when_flag_unset(
+ project_root: Path,
+ module: ModuleType,
+ capsys: pytest.CaptureFixture[str],
+) -> None:
+ """Allow dynamic versions when the flag is omitted entirely."""
+ _write_pyproject(
+ project_root / "pkg",
+ """
+[project]
+name = "demo"
+dynamic = ["version"]
+""",
+ )
+
+ _invoke_main(module, version="1.0.0")
+
+ captured = capsys.readouterr()
+ assert "uses dynamic 'version'" in captured.out
+
+
+def test_missing_project_section_is_ignored(
+ project_root: Path,
+ module: ModuleType,
+ capsys: pytest.CaptureFixture[str],
+) -> None:
+ """Ignore files lacking a ``[project]`` table when validating versions."""
+ _write_pyproject(
+ project_root / "pkg",
+ """
+[tool.poetry]
+name = "demo"
+version = "1.0.0"
+""",
+ )
+
+ _invoke_main(module, version="1.0.0")
+
+ captured = capsys.readouterr()
+ assert captured.err == ""
+
+
+def test_fails_when_project_version_missing(
+ project_root: Path,
+ module: ModuleType,
+ capsys: pytest.CaptureFixture[str],
+) -> None:
+ """Error when a project lacks a version and is not marked dynamic."""
+ _write_pyproject(
+ project_root / "pkg",
+ """
+[project]
+name = "demo"
+""",
+ )
+ with pytest.raises(module.typer.Exit):
+ _invoke_main(module, version="1.0.0")
+ captured = capsys.readouterr()
+ assert "missing [project].version" in captured.err
+
+
+def test_multiple_toml_files_mixed_validity(
+ project_root: Path,
+ module: ModuleType,
+ capsys: pytest.CaptureFixture[str],
+) -> None:
+ """Fail when any discovered TOML file contains a mismatched version."""
+ _write_pyproject(
+ project_root / "pkg_valid",
+ """
+[project]
+name = "demo"
+version = "1.0.0"
+""",
+ )
+ _write_pyproject(
+ project_root / "pkg_invalid",
+ """
+[project]
+name = "demo"
+version = "2.0.0"
+""",
+ )
+
+ with pytest.raises(module.typer.Exit):
+ _invoke_main(module, version="1.0.0")
+
+ captured = capsys.readouterr()
+ assert "!= tag version" in captured.err
+
+
+def test_iter_files_discovers_paths_in_deterministic_order(
+ project_root: Path,
+ module: ModuleType,
+ monkeypatch: pytest.MonkeyPatch,
+) -> None:
+ """Ensure TOML discovery yields paths in a stable, sorted order."""
+ _write_pyproject(
+ project_root / "pkg_b",
+ """
+[project]
+name = "pkg-b"
+version = "1.0.0"
+""",
+ )
+ _write_pyproject(
+ project_root / "pkg_a",
+ """
+[project]
+name = "pkg-a"
+version = "1.0.0"
+""",
+ )
+
+ first = project_root / "pkg_a" / "pyproject.toml"
+ second = project_root / "pkg_b" / "pyproject.toml"
+
+ def fake_glob(
+ self: Path,
+ pattern: str,
+ ) -> typ.Iterator[Path]:
+ _ = self
+ assert pattern == "**/pyproject.toml"
+ return iter((second, first))
+
+ monkeypatch.setattr(module.Path, "glob", fake_glob, raising=False)
+
+ discovered = list(module._iter_files("**/pyproject.toml"))
+
+ assert discovered == [first, second]
+
+
+def test_iter_files_discovers_paths_in_sorted_order(
+ project_root: Path,
+ module: ModuleType,
+) -> None:
+ """Ensure discovery order remains deterministic for reproducible output."""
+ for name in ("pkg_c", "pkg_a", "pkg_b"):
+ _write_pyproject(
+ project_root / name,
+ """
+[project]
+name = "demo"
+version = "1.0.0"
+""",
+ )
+
+ discovered = list(module._iter_files("**/pyproject.toml"))
+ relative = [path.as_posix() for path in discovered]
+ assert relative == sorted(relative)
+
+
+@pytest.mark.parametrize("value", ["true", "TRUE", "Yes", "1", "on"])
+def test_parse_bool_truthy_values(module: ModuleType, value: str) -> None:
+ """Treat recognised truthy values as ``True`` for configuration flags."""
+ assert module._parse_bool(value) is True
+
+
+@pytest.mark.parametrize("value", [None, "", "false", "no", "0", "off", "n"])
+def test_parse_bool_falsey_values(module: ModuleType, value: str | None) -> None:
+ """Treat recognised falsey values as ``False`` for configuration flags."""
+ assert module._parse_bool(value) is False
diff --git a/.github/actions/release-to-pypi-uv/tests/test_write_summary.py b/.github/actions/release-to-pypi-uv/tests/test_write_summary.py
new file mode 100644
index 00000000..f0932cc6
--- /dev/null
+++ b/.github/actions/release-to-pypi-uv/tests/test_write_summary.py
@@ -0,0 +1,71 @@
+"""Tests for write_summary.py."""
+
+from __future__ import annotations
+
+import typing as typ
+from pathlib import Path
+
+if typ.TYPE_CHECKING: # pragma: no cover - imported for annotations only
+ from types import ModuleType
+
+import pytest
+
+from ._helpers import load_script_module
+
+
+@pytest.fixture(name="write_module")
+def fixture_write_module() -> ModuleType:
+ """Load the ``write_summary`` script module under test."""
+ return load_script_module("write_summary")
+
+
+def test_write_summary_appends_markdown(
+ tmp_path: Path, write_module: ModuleType
+) -> None:
+ """Append a fresh summary block when the summary file is empty."""
+ summary_path = tmp_path / "summary.md"
+
+ write_module.main(
+ tag="v1.2.3",
+ index="",
+ environment_name="pypi",
+ summary_path=summary_path,
+ )
+
+ content = summary_path.read_text(encoding="utf-8")
+ assert "## Release summary" in content
+ assert "- Released tag: v1.2.3" in content
+ assert "- Publish index: pypi (default)" in content
+ assert "- Environment: pypi" in content
+
+
+def test_write_summary_handles_existing_content(
+ tmp_path: Path, write_module: ModuleType
+) -> None:
+ """Preserve existing content while appending the release summary."""
+ summary_path = tmp_path / "summary.md"
+ summary_path.write_text("Existing\n", encoding="utf-8")
+
+ write_module.main(
+ tag="v1.2.3",
+ index="custom",
+ environment_name="prod",
+ summary_path=summary_path,
+ )
+
+ content = summary_path.read_text(encoding="utf-8")
+ assert content.endswith("- Environment: prod\n")
+ assert content.count("## Release summary") == 1
+
+
+def test_write_summary_raises_on_io_error(write_module: ModuleType) -> None:
+ """Surface file-system errors when the summary path is invalid."""
+ summary_path = Path("/nonexistent/path/summary.md")
+
+ with pytest.raises(FileNotFoundError):
+ write_module.main(
+ tag="v1.0.0",
+ index="",
+ environment_name="pypi",
+ summary_path=summary_path,
+ )
diff --git a/.github/actions/rust-build-release/action.yml b/.github/actions/rust-build-release/action.yml
index b8f17ef0..3f4bce39 100644
--- a/.github/actions/rust-build-release/action.yml
+++ b/.github/actions/rust-build-release/action.yml
@@ -104,7 +104,7 @@ runs:
echo "::error:: binary not found at ${bin_src}"
exit 1
fi
- mapfile -d '' -t man_matches < <(
+ mapfile -d $'\0' -t man_matches < <(
find "target/${{ inputs.target }}/release/build" \
-path "*/out/${{ inputs.bin-name }}.1" \
-type f -print0
diff --git a/.github/actions/rust-build-release/src/main.py b/.github/actions/rust-build-release/src/main.py
index 75940b15..5fb0cbfe 100755
--- a/.github/actions/rust-build-release/src/main.py
+++ b/.github/actions/rust-build-release/src/main.py
@@ -124,6 +124,21 @@ def _toolchain_channel(toolchain_name: str) -> str:
return toolchain_name
+def _probe_runtime(name: str) -> bool:
+ """Return True when *name* runtime is available, tolerating probe timeouts."""
+ try:
+ return runtime_available(name)
+ except subprocess.TimeoutExpired as exc:
+ timeout = getattr(exc, "timeout", None)
+ duration = f" after {timeout}s" if timeout else ""
+ message = (
+ f"::warning::{name} runtime probe timed out{duration}; "
+ "treating runtime as unavailable"
+ )
+ typer.echo(message, err=True)
+ return False
+
+
@app.command()
def main(
target: str = typer.Argument("", help="Target triple to build"),
@@ -226,8 +241,8 @@ def main(
docker_present = False
podman_present = False
if should_probe_container(sys.platform, target):
- docker_present = runtime_available("docker")
- podman_present = runtime_available("podman")
+ docker_present = _probe_runtime("docker")
+ podman_present = _probe_runtime("podman")
has_container = docker_present or podman_present
use_cross = cross_path is not None and has_container
diff --git a/.github/actions/rust-build-release/src/runtime.py b/.github/actions/rust-build-release/src/runtime.py
index 09f5b2f0..e19708f2 100644
--- a/.github/actions/rust-build-release/src/runtime.py
+++ b/.github/actions/rust-build-release/src/runtime.py
@@ -18,60 +18,103 @@
CROSS_CONTAINER_ERROR_CODES = {125, 126, 127}
+_ARCH_TO_WINDOWS_DEFAULT = {
+ "amd64": "x86_64-pc-windows-msvc",
+ "x86_64": "x86_64-pc-windows-msvc",
+ "arm64": "aarch64-pc-windows-msvc",
+ "aarch64": "aarch64-pc-windows-msvc",
+}
+
+_ARCH_TO_DARWIN_DEFAULT = {
+ "x86_64": "x86_64-apple-darwin",
+ "amd64": "x86_64-apple-darwin",
+ "arm64": "aarch64-apple-darwin",
+ "aarch64": "aarch64-apple-darwin",
+}
+
+
+def _platform_default_host_target() -> str:
+ """Return a platform-specific fallback host triple."""
+ machine = (
+ platform.machine().lower()
+ or os.environ.get("PROCESSOR_ARCHITECTURE", "").lower()
+ )
+ if sys_platform := sys.platform:
+ if sys_platform == "win32":
+ return _ARCH_TO_WINDOWS_DEFAULT.get(machine, "x86_64-pc-windows-msvc")
+ if sys_platform == "darwin":
+ return _ARCH_TO_DARWIN_DEFAULT.get(machine, "x86_64-apple-darwin")
+ return "x86_64-unknown-linux-gnu"
+
+
+DEFAULT_HOST_TARGET = _platform_default_host_target()
+_DEFAULT_PROBE_TIMEOUT = 10
+_MAX_PROBE_TIMEOUT = 300
+
+
+def _run_probe(
+ exec_path: str | Path,
+ name: str,
+ probe: str,
+ args: list[str],
+ *,
+ cwd: str | Path | None = None,
+ **kwargs: object,
+) -> subprocess.CompletedProcess[str] | None:
+ """Execute a runtime probe and handle common failure modes."""
+ try:
+ return run_validated(
+ exec_path,
+ args,
+ allowed_names=(name, f"{name}.exe"),
+ timeout=PROBE_TIMEOUT,
+ cwd=cwd,
+ **kwargs,
+ )
+ except subprocess.TimeoutExpired:
+ typer.echo(
+ "::warning:: "
+ f"{name} {probe} probe exceeded {PROBE_TIMEOUT}s timeout; "
+ "treating runtime as unavailable",
+ err=True,
+ )
+ except (OSError, subprocess.CalledProcessError):
+ pass
+ return None
+
+
+def _get_probe_timeout() -> int:
+ """Return the sanitized probe timeout for runtime detection."""
+ raw = os.environ.get("RUNTIME_PROBE_TIMEOUT")
+ if raw is None:
+ return _DEFAULT_PROBE_TIMEOUT
+ try:
+ value = int(raw)
+ except ValueError:
+ typer.echo(
+ "::warning:: Invalid RUNTIME_PROBE_TIMEOUT value"
+ f" {raw!r}; using {_DEFAULT_PROBE_TIMEOUT}s fallback",
+ err=True,
+ )
+ return _DEFAULT_PROBE_TIMEOUT
+ if value <= 0:
+ typer.echo(
+ "::warning:: "
+ f"RUNTIME_PROBE_TIMEOUT={value}s raised to {_DEFAULT_PROBE_TIMEOUT}s",
+ err=True,
+ )
+ return _DEFAULT_PROBE_TIMEOUT
+ if value > _MAX_PROBE_TIMEOUT:
+ typer.echo(
+ "::warning:: "
+ f"RUNTIME_PROBE_TIMEOUT={value}s capped to {_MAX_PROBE_TIMEOUT}s",
+ err=True,
+ )
+ return _MAX_PROBE_TIMEOUT
+ return value
+
-def _normalize_arch(machine: str) -> str:
- mapping = {
- "amd64": "x86_64",
- "x64": "x86_64",
- "x86_64": "x86_64",
- "i386": "i686",
- "i486": "i686",
- "i586": "i686",
- "i686": "i686",
- "x86": "i686",
- "arm64": "aarch64",
- "aarch64": "aarch64",
- "armv8": "aarch64",
- "armv8a": "aarch64",
- "armv8l": "aarch64",
- "armv7": "armv7",
- "armv7a": "armv7",
- "armv7hl": "armv7",
- "armv7l": "armv7",
- "armv6": "armv6",
- "armv6l": "armv6",
- "ppc64": "ppc64",
- "ppc64le": "ppc64le",
- "powerpc64": "ppc64",
- "powerpc64le": "ppc64le",
- "s390x": "s390x",
- "riscv64": "riscv64",
- "loongarch64": "loongarch64",
- }
- if not machine:
- return "x86_64"
- machine_lower = machine.lower()
- return mapping.get(machine_lower, machine_lower)
-
-
-def _default_host_target_for_current_platform() -> str:
- arch = _normalize_arch(platform.machine()) or "x86_64"
- system_name = platform.system().lower()
- platform_id = sys.platform.lower()
- if system_name == "windows":
- return f"{arch}-pc-windows-msvc"
- if system_name.startswith(("cygwin", "msys")) or platform_id in {"cygwin", "msys"}:
- return f"{arch}-pc-windows-gnu"
- if system_name == "darwin":
- return f"{arch}-apple-darwin"
- if system_name.startswith("linux"):
- return f"{arch}-unknown-linux-gnu"
- identifier = system_name or platform_id or "linux"
- return f"{arch}-unknown-{identifier}"
-
-
-DEFAULT_HOST_TARGET = _default_host_target_for_current_platform()
-PROBE_TIMEOUT = int(os.environ.get("RUNTIME_PROBE_TIMEOUT", "10"))
+PROBE_TIMEOUT = _get_probe_timeout()
def runtime_available(name: str, *, cwd: str | Path | None = None) -> bool:
@@ -83,35 +126,33 @@ def runtime_available(name: str, *, cwd: str | Path | None = None) -> bool:
exec_path = ensure_allowed_executable(path, (name, f"{name}.exe"))
except UnexpectedExecutableError:
return False
- try:
- result = run_validated(
- exec_path,
- ["info"],
- allowed_names=(name, f"{name}.exe"),
- stdout=subprocess.DEVNULL,
- stderr=subprocess.DEVNULL,
- timeout=PROBE_TIMEOUT,
- cwd=cwd,
- )
- except (OSError, subprocess.TimeoutExpired):
+ result = _run_probe(
+ exec_path,
+ name,
+ "info",
+ ["info"],
+ cwd=cwd,
+ stdout=subprocess.DEVNULL,
+ stderr=subprocess.DEVNULL,
+ )
+ if result is None:
return False
if result.returncode != 0:
return False
if name == "podman":
- try:
- security_info = run_validated(
- exec_path,
- ["info", "--format", "{{json .Host.Security}}"],
- allowed_names=(name, f"{name}.exe"),
- capture_output=True,
- text=True,
- check=True,
- timeout=PROBE_TIMEOUT,
- cwd=cwd,
- )
- except (OSError, subprocess.CalledProcessError, subprocess.TimeoutExpired):
+ security_info = _run_probe(
+ exec_path,
+ name,
+ "security",
+ ["info", "--format", "{{json .Host.Security}}"],
+ cwd=cwd,
+ capture_output=True,
+ text=True,
+ check=True,
+ )
+ if security_info is None:
return False
try:
diff --git a/.github/actions/rust-build-release/tests/conftest.py b/.github/actions/rust-build-release/tests/conftest.py
index 88bacbc5..6161513e 100644
--- a/.github/actions/rust-build-release/tests/conftest.py
+++ b/.github/actions/rust-build-release/tests/conftest.py
@@ -135,13 +135,6 @@ def patch_shutil_which(self, func: cabc.Callable[[str], str | None]) -> None:
"""Patch ``shutil.which`` for the wrapped module."""
self.monkeypatch.setattr(self.module.shutil, "which", func)
- def patch_subprocess_run(self, func: cabc.Callable[..., object]) -> None:
- """Patch ``subprocess.run`` for the wrapped module."""
- if hasattr(self.module, "run_validated"):
- self.monkeypatch.setattr(self.module, "run_validated", func)
- if hasattr(self.module, "subprocess"):
- self.monkeypatch.setattr(self.module.subprocess, "run", func)
-
def patch_platform(self, platform: str) -> None:
"""Force ``sys.platform`` to ``platform`` within the module."""
self.monkeypatch.setattr(self.module.sys, "platform", platform)
@@ -154,6 +147,24 @@ def patch_attr(self, name: str, value: object) -> None:
HarnessFactory = cabc.Callable[[ModuleType], ModuleHarness]
+@pytest.fixture
+def echo_recorder(
+ monkeypatch: pytest.MonkeyPatch,
+) -> cabc.Callable[[ModuleType], list[tuple[str, bool]]]:
+ """Return a helper that patches ``typer.echo`` and records messages."""
+
+ def install(module: ModuleType) -> list[tuple[str, bool]]:
+ messages: list[tuple[str, bool]] = []
+
+ def fake_echo(message: str, *, err: bool = False) -> None:
+ messages.append((message, err))
+
+ monkeypatch.setattr(module.typer, "echo", fake_echo)
+ return messages
+
+ return install
+
+
@pytest.fixture
def module_harness(monkeypatch: pytest.MonkeyPatch) -> HarnessFactory:
"""Return a factory that wraps a module with a harness and recorder."""
@@ -333,22 +344,14 @@ def pytest_collection_modifyitems(
nodeid = getattr(item, "nodeid", "")
if WINDOWS_SMOKE_TEST not in nodeid or "-pc-windows-" not in nodeid:
continue
- xfail_marks = [
- mark for mark in item.iter_markers(name="xfail") if mark in item.own_markers
- ]
- if not xfail_marks:
- continue
- drop_marks = [
+ original_count = len(item.own_markers)
+ filtered_markers = [
mark
- for mark in xfail_marks
- if (
- isinstance(reason := mark.kwargs.get("reason"), str)
- and reason.strip() == WINDOWS_XFAIL_REASON
+ for mark in item.own_markers
+ if not (
+ mark.name == "xfail"
+ and mark.kwargs.get("reason") == WINDOWS_XFAIL_REASON
)
]
- if not drop_marks:
- continue
- keep_marks = [mark for mark in xfail_marks if mark not in drop_marks]
- item.remove_marker("xfail")
- for mark in keep_marks:
- item.add_marker(pytest.mark.xfail(*mark.args, **mark.kwargs))
+ if len(filtered_markers) != original_count:
+ item.own_markers[:] = filtered_markers
diff --git a/.github/actions/rust-build-release/tests/test_cross_install.py b/.github/actions/rust-build-release/tests/test_cross_install.py
index 077a8dc3..481630eb 100644
--- a/.github/actions/rust-build-release/tests/test_cross_install.py
+++ b/.github/actions/rust-build-release/tests/test_cross_install.py
@@ -9,50 +9,46 @@
import zipfile
import pytest
+from shared_actions_conftest import (
+ CMD_MOX_UNSUPPORTED,
+ _register_cross_version_stub,
+ _register_docker_info_stub,
+ _register_podman_info_stub,
+ _register_rustup_toolchain_stub,
+)
if typ.TYPE_CHECKING:
from pathlib import Path
from types import ModuleType
- from .conftest import HarnessFactory
-
-
-def _constant_run(stdout: str) -> typ.Callable[..., subprocess.CompletedProcess[str]]:
- """Return a ``run_validated`` stub emitting *stdout*."""
-
- def fake_run(
- executable: str,
- args: list[str],
- *,
- allowed_names: tuple[str, ...],
- capture_output: bool = False,
- check: bool = False,
- text: bool = False,
- **_: object,
- ) -> subprocess.CompletedProcess[str]:
- _ = allowed_names
- cmd = [executable, *args]
- return subprocess.CompletedProcess(cmd, 0, stdout=stdout)
+ from shared_actions_conftest import CmdMox
- return fake_run
+ from .conftest import HarnessFactory
+@CMD_MOX_UNSUPPORTED
def test_installs_cross_when_missing(
- cross_module: ModuleType, module_harness: HarnessFactory
+ cross_module: ModuleType,
+ module_harness: HarnessFactory,
+ cmd_mox: CmdMox,
) -> None:
"""Installs cross when it is missing."""
harness = module_harness(cross_module)
- cross_checks = [None, "/usr/bin/cross"]
+ cross_path = _register_cross_version_stub(cmd_mox)
+ cross_checks = [None, cross_path]
def fake_which(name: str) -> str | None:
- return cross_checks.pop(0) if name == "cross" else None
+ if name == "cross":
+ return cross_checks.pop(0) if cross_checks else cross_path
+ return None
harness.patch_shutil_which(fake_which)
- harness.patch_subprocess_run(_constant_run("cross 0.2.5\n"))
+ cmd_mox.replay()
path, ver = cross_module.ensure_cross("0.2.5")
+ cmd_mox.verify()
- assert path == "/usr/bin/cross"
+ assert path == cross_path
assert ver == "0.2.5"
install = next(
cmd for cmd in harness.calls if cmd[:3] == ["cargo", "install", "cross"]
@@ -60,6 +56,9 @@ def fake_which(name: str) -> str | None:
assert "--locked" in install
idx = install.index("--version")
assert install[idx + 1] == "0.2.5"
+ # Prove we did not take the git fallback path
+ assert "--git" not in install
+ assert "--tag" not in install
def test_cross_install_failure_non_windows(
@@ -85,36 +84,29 @@ def fail_install(cmd: list[str]) -> None:
assert exc_info.value.output == "install failed"
+@CMD_MOX_UNSUPPORTED
def test_upgrades_outdated_cross(
- cross_module: ModuleType, module_harness: HarnessFactory
+ cross_module: ModuleType,
+ module_harness: HarnessFactory,
+ cmd_mox: CmdMox,
) -> None:
"""Upgrades cross when an older version is installed."""
harness = module_harness(cross_module)
- versions = ["cross 0.2.4\n", "cross 0.2.5\n"]
+ cross_path = _register_cross_version_stub(
+ cmd_mox, ["cross 0.2.4\n", "cross 0.2.5\n"]
+ )
- def fake_run(
- executable: str,
- args: list[str],
- *,
- allowed_names: tuple[str, ...],
- capture_output: bool = False,
- check: bool = False,
- text: bool = False,
- **_: object,
- ) -> subprocess.CompletedProcess[str]:
- _ = allowed_names
- cmd = [executable, *args]
- return subprocess.CompletedProcess(cmd, 0, stdout=versions.pop(0))
+ def fake_which(name: str) -> str | None:
+ return cross_path if name == "cross" else None
- harness.patch_shutil_which(
- lambda name: "/usr/bin/cross" if name == "cross" else None
- )
- harness.patch_subprocess_run(fake_run)
+ harness.patch_shutil_which(fake_which)
+ cmd_mox.replay()
path, ver = cross_module.ensure_cross("0.2.5")
+ cmd_mox.verify()
- assert path == "/usr/bin/cross"
+ assert path == cross_path
assert ver == "0.2.5"
install = next(
cmd for cmd in harness.calls if cmd[:3] == ["cargo", "install", "cross"]
@@ -122,51 +114,68 @@ def fake_run(
assert "--locked" in install
idx = install.index("--version")
assert install[idx + 1] == "0.2.5"
+ # Ensure upgrade used crates.io, not the git fallback
+ assert "--git" not in install
+ assert "--tag" not in install
+@CMD_MOX_UNSUPPORTED
def test_uses_cached_cross(
- cross_module: ModuleType, module_harness: HarnessFactory
+ cross_module: ModuleType,
+ module_harness: HarnessFactory,
+ cmd_mox: CmdMox,
) -> None:
"""Uses cached cross when version is sufficient."""
harness = module_harness(cross_module)
- harness.patch_shutil_which(
- lambda name: "/usr/bin/cross" if name == "cross" else None
- )
- harness.patch_subprocess_run(_constant_run("cross 0.2.5\n"))
+ cross_path = _register_cross_version_stub(cmd_mox)
+
+ def fake_which(name: str) -> str | None:
+ return cross_path if name == "cross" else None
+ harness.patch_shutil_which(fake_which)
+
+ cmd_mox.replay()
path, ver = cross_module.ensure_cross("0.2.5")
+ cmd_mox.verify()
- assert path == "/usr/bin/cross"
+ assert path == cross_path
assert ver == "0.2.5"
assert not harness.calls
+@CMD_MOX_UNSUPPORTED
def test_installs_prebuilt_cross_on_windows(
- cross_module: ModuleType, module_harness: HarnessFactory
+ cross_module: ModuleType,
+ module_harness: HarnessFactory,
+ cmd_mox: CmdMox,
) -> None:
"""Uses the prebuilt cross binary on Windows hosts."""
harness = module_harness(cross_module)
- cross_checks = [None, "C:/cross.exe"]
+ cross_path = _register_cross_version_stub(cmd_mox)
+ cross_checks = [None, cross_path]
def fake_which(name: str) -> str | None:
- return cross_checks.pop(0) if name == "cross" else None
+ if name == "cross":
+ return cross_checks.pop(0) if cross_checks else cross_path
+ return None
harness.patch_shutil_which(fake_which)
- harness.patch_subprocess_run(_constant_run("cross 0.2.5\n"))
harness.patch_platform("win32")
- release_called = {"value": False}
+ release_call_args: list[str] = []
def fake_release(version: str) -> bool:
- release_called["value"] = True
+ release_call_args.append(version)
return True
harness.patch_attr("install_cross_release", fake_release)
+ cmd_mox.replay()
path, ver = cross_module.ensure_cross("0.2.5")
+ cmd_mox.verify()
- assert release_called["value"] is True
- assert path == "C:/cross.exe"
+ assert release_call_args == ["0.2.5"]
+ assert path == cross_path
assert ver == "0.2.5"
assert all(cmd[:2] != ["cargo", "install"] for cmd in harness.calls)
@@ -174,6 +183,7 @@ def fake_release(version: str) -> bool:
def test_install_cross_release_validates_binary(
cross_module: ModuleType,
module_harness: HarnessFactory,
+ echo_recorder: typ.Callable[[ModuleType], list[tuple[str, bool]]],
tmp_path: Path,
) -> None:
"""Cross release installer verifies the downloaded binary executes."""
@@ -252,10 +262,7 @@ def fake_run(
run_calls.append(cmd)
return subprocess.CompletedProcess(cmd, 0, stdout="cross 0.2.5\n")
- messages: list[tuple[str, bool]] = []
-
- def fake_echo(message: str, *, err: bool = False) -> None:
- messages.append((message, err))
+ messages = echo_recorder(module)
home_dir = tmp_path / "home"
@@ -264,7 +271,6 @@ def fake_echo(message: str, *, err: bool = False) -> None:
module.tempfile, "TemporaryDirectory", lambda: FakeTempDir()
)
harness.monkeypatch.setattr(module, "run_validated", fake_run)
- harness.monkeypatch.setattr(module.typer, "echo", fake_echo)
harness.monkeypatch.setattr(module.Path, "home", lambda: home_dir)
assert module.install_cross_release("0.2.5") is True
@@ -348,48 +354,36 @@ def fake_urlopen(url: str) -> FakeBinaryResponse | FakeTextResponse:
assert cross_module.install_cross_release("0.2.5") is False
+@CMD_MOX_UNSUPPORTED
def test_installs_cross_without_container_runtime(
main_module: ModuleType,
cross_module: ModuleType,
module_harness: HarnessFactory,
+ cmd_mox: CmdMox,
) -> None:
"""Installs cross even when no container runtime is available."""
cross_env = module_harness(cross_module)
app_env = module_harness(main_module)
- cross_checks = [None, "/usr/bin/cross"]
+ default_toolchain = main_module.DEFAULT_TOOLCHAIN
+ rustup_stdout = f"{default_toolchain}-x86_64-unknown-linux-gnu\n"
+ cross_path = _register_cross_version_stub(cmd_mox)
+ rustup_path = _register_rustup_toolchain_stub(cmd_mox, rustup_stdout)
+ cross_checks = [None, cross_path]
def fake_which(name: str) -> str | None:
if name == "cross":
- return cross_checks.pop(0)
- return None if name in {"docker", "podman"} else "/usr/bin/rustup"
+ return cross_checks.pop(0) if cross_checks else cross_path
+ if name in {"docker", "podman"}:
+ return None
+ return rustup_path if name == "rustup" else None
cross_env.patch_shutil_which(fake_which)
app_env.patch_shutil_which(fake_which)
- default_toolchain = main_module.DEFAULT_TOOLCHAIN
-
- def fake_run(
- executable: str,
- args: list[str],
- *,
- allowed_names: tuple[str, ...],
- capture_output: bool = False,
- check: bool = False,
- text: bool = False,
- **_: object,
- ) -> subprocess.CompletedProcess[str]:
- _ = allowed_names
- cmd = [executable, *args]
- if len(cmd) > 1 and cmd[1] == "toolchain":
- output = f"{default_toolchain}-x86_64-unknown-linux-gnu\n"
- return subprocess.CompletedProcess(cmd, 0, stdout=output)
- return subprocess.CompletedProcess(cmd, 0, stdout="cross 0.2.5\n")
-
- cross_env.patch_subprocess_run(fake_run)
- app_env.patch_subprocess_run(fake_run)
-
+ cmd_mox.replay()
main_module.main("x86_64-unknown-linux-gnu", default_toolchain)
+ cmd_mox.verify()
install = next(
cmd for cmd in cross_env.calls if cmd[:3] == ["cargo", "install", "cross"]
@@ -400,78 +394,119 @@ def fake_run(
build_cmd = app_env.calls[-1]
assert build_cmd[0] == "cargo"
assert build_cmd[1] == f"+{default_toolchain}-x86_64-unknown-linux-gnu"
+ # Ensure no container runtime calls were attempted
+ assert all(cmd[0] not in {"docker", "podman"} for cmd in app_env.calls)
+ assert all(cmd[0] not in {"docker", "podman"} for cmd in cross_env.calls)
+@CMD_MOX_UNSUPPORTED
def test_falls_back_to_git_when_crates_io_unavailable(
- cross_module: ModuleType, module_harness: HarnessFactory
+ cross_module: ModuleType,
+ module_harness: HarnessFactory,
+ cmd_mox: CmdMox,
) -> None:
"""Falls back to git install when crates.io is unavailable."""
harness = module_harness(cross_module)
- cross_checks = [None, "/usr/bin/cross"]
+ cross_path = _register_cross_version_stub(cmd_mox)
+ cross_checks = [None, cross_path]
def run_cmd_side_effect(cmd: list[str]) -> None:
if len(harness.calls) == 1:
raise subprocess.CalledProcessError(1, cmd)
return
+ def fake_which(name: str) -> str | None:
+ if name == "cross":
+ return cross_checks.pop(0) if cross_checks else cross_path
+ return None
+
harness.patch_run_cmd(run_cmd_side_effect)
- harness.patch_shutil_which(
- lambda name: cross_checks.pop(0) if name == "cross" else None
- )
- harness.patch_subprocess_run(_constant_run("cross 0.2.5\n"))
+ harness.patch_shutil_which(fake_which)
+ cmd_mox.replay()
path, ver = cross_module.ensure_cross("0.2.5")
+ cmd_mox.verify()
assert len(harness.calls) == 2
- assert "--git" in harness.calls[1]
- assert "--tag" in harness.calls[1]
- assert "v0.2.5" in harness.calls[1]
- assert path == "/usr/bin/cross"
+ first, second = harness.calls
+ # First attempt was crates.io
+ assert "--git" not in first
+ assert "--tag" not in first
+ # Second attempt is the git fallback with a tag
+ assert "--git" in second
+ assert "--tag" in second
+ assert "v0.2.5" in second
+ assert path == cross_path
assert ver == "0.2.5"
+@CMD_MOX_UNSUPPORTED
def test_falls_back_to_cargo_when_runtime_unusable(
main_module: ModuleType,
cross_module: ModuleType,
module_harness: HarnessFactory,
+ cmd_mox: CmdMox,
) -> None:
"""Falls back to cargo when docker exists but is unusable."""
cross_env = module_harness(cross_module)
app_env = module_harness(main_module)
+ default_toolchain = main_module.DEFAULT_TOOLCHAIN
+ rustup_stdout = f"{default_toolchain}-x86_64-unknown-linux-gnu\n"
+ cross_path = _register_cross_version_stub(cmd_mox)
+ rustup_path = _register_rustup_toolchain_stub(cmd_mox, rustup_stdout)
+ docker_path = _register_docker_info_stub(cmd_mox, exit_code=1)
+
def fake_which(name: str) -> str | None:
if name == "docker":
- return "/usr/bin/docker"
- return "/usr/bin/cross" if name == "cross" else "/usr/bin/rustup"
+ return docker_path
+ if name == "cross":
+ return cross_path
+ return rustup_path if name == "rustup" else None
cross_env.patch_shutil_which(fake_which)
app_env.patch_shutil_which(fake_which)
+ cmd_mox.replay()
+ main_module.main("x86_64-unknown-linux-gnu", default_toolchain)
+ cmd_mox.verify()
+
+ assert any(cmd[0] == "cargo" for cmd in app_env.calls)
+ assert all(cmd[0] != "cross" for cmd in app_env.calls)
+
+
+@CMD_MOX_UNSUPPORTED
+def test_falls_back_to_cargo_when_podman_unusable(
+ main_module: ModuleType,
+ cross_module: ModuleType,
+ module_harness: HarnessFactory,
+ cmd_mox: CmdMox,
+) -> None:
+ """Falls back to cargo when podman exists but is unusable."""
+ cross_env = module_harness(cross_module)
+ app_env = module_harness(main_module)
+
default_toolchain = main_module.DEFAULT_TOOLCHAIN
+ rustup_stdout = f"{default_toolchain}-x86_64-unknown-linux-gnu\n"
+ cross_path = _register_cross_version_stub(cmd_mox)
+ rustup_path = _register_rustup_toolchain_stub(cmd_mox, rustup_stdout)
+ podman_path = _register_podman_info_stub(cmd_mox, exit_code=1)
- def fake_run(
- executable: str,
- args: list[str],
- *,
- allowed_names: tuple[str, ...],
- capture_output: bool = False,
- check: bool = False,
- text: bool = False,
- **_: object,
- ) -> subprocess.CompletedProcess[str]:
- _ = allowed_names
- cmd = [executable, *args]
- if executable == "/usr/bin/docker":
- return subprocess.CompletedProcess(cmd, 1, stdout="")
- if len(cmd) > 1 and cmd[1] == "toolchain":
- output = f"{default_toolchain}-x86_64-unknown-linux-gnu\n"
- return subprocess.CompletedProcess(cmd, 0, stdout=output)
- return subprocess.CompletedProcess(cmd, 0, stdout="cross 0.2.5\n")
+ def fake_which(name: str) -> str | None:
+ if name == "podman":
+ return podman_path
+ if name == "cross":
+ return cross_path
+ if name == "rustup":
+ return rustup_path
+ return None
- cross_env.patch_subprocess_run(fake_run)
- app_env.patch_subprocess_run(fake_run)
+ cross_env.patch_shutil_which(fake_which)
+ app_env.patch_shutil_which(fake_which)
+ cmd_mox.replay()
main_module.main("x86_64-unknown-linux-gnu", default_toolchain)
+ cmd_mox.verify()
assert any(cmd[0] == "cargo" for cmd in app_env.calls)
assert all(cmd[0] != "cross" for cmd in app_env.calls)
@@ -498,5 +533,7 @@ def failing_run_cmd(cmd: list[str]) -> None:
assert len(harness.calls) == 2
assert path is None
assert ver is None
- out = capsys.readouterr().out.lower()
- assert "warning" in out
+ io = capsys.readouterr()
+ msg = io.err.lower()
+ assert "warning" in msg
+ assert "cross install failed; continuing without cross" in msg
diff --git a/.github/actions/rust-build-release/tests/test_runtime.py b/.github/actions/rust-build-release/tests/test_runtime.py
index fd0afc69..64045bdf 100644
--- a/.github/actions/rust-build-release/tests/test_runtime.py
+++ b/.github/actions/rust-build-release/tests/test_runtime.py
@@ -2,16 +2,63 @@
from __future__ import annotations
+import importlib.util
import json
import subprocess
+import sys
import typing as typ
+from types import ModuleType, SimpleNamespace
import pytest
if typ.TYPE_CHECKING:
- from types import ModuleType
+ from .conftest import HarnessFactory, ModuleHarness
- from .conftest import HarnessFactory
+
+def _patch_run_validated_timeout(
+ runtime_module: ModuleType,
+ harness: ModuleHarness,
+ *,
+ predicate: typ.Callable[[list[str]], bool] | None = None,
+ success_factory: typ.Callable[[list[str]], subprocess.CompletedProcess[str]]
+ | None = None,
+) -> None:
+ """Patch ``run_validated`` to raise ``TimeoutExpired`` when *predicate* matches."""
+
+ def fake_run(
+ executable: str,
+ args: list[str],
+ *,
+ allowed_names: tuple[str, ...],
+ **kwargs: object,
+ ) -> subprocess.CompletedProcess[str]:
+ _ = (allowed_names, kwargs)
+ cmd = [executable, *args]
+ should_timeout = predicate(args) if predicate is not None else True
+ if should_timeout:
+ raise subprocess.TimeoutExpired(cmd, runtime_module.PROBE_TIMEOUT)
+ if success_factory is not None:
+ return success_factory(cmd)
+ return subprocess.CompletedProcess(cmd, 0, stdout="")
+
+ harness.monkeypatch.setattr(runtime_module, "run_validated", fake_run)
+
+
+def _reload_runtime_module(runtime_module: ModuleType, module_name: str) -> ModuleType:
+ """Reload the runtime module under a new name for environment-specific tests."""
+ module_path = getattr(runtime_module, "__file__", None)
+ if module_path is None:
+ pytest.fail("runtime module does not expose a __file__ path")
+ module_spec = importlib.util.spec_from_file_location(module_name, module_path)
+ if module_spec is None or module_spec.loader is None:
+ pytest.fail("failed to load runtime module specification")
+ module = importlib.util.module_from_spec(module_spec)
+ sys.modules[module_name] = module
+ try:
+ module_spec.loader.exec_module(module)
+ finally:
+ sys.modules.pop(module_name, None)
+ return module
def test_runtime_available_false_when_missing(
@@ -38,31 +85,51 @@ def fake_ensure(path: str, allowed: tuple[str, ...]) -> str:
def test_runtime_available_returns_false_on_timeout(
- runtime_module: ModuleType, module_harness: HarnessFactory
+ runtime_module: ModuleType,
+ module_harness: HarnessFactory,
+ echo_recorder: typ.Callable[[ModuleType], list[tuple[str, bool]]],
) -> None:
"""Treats runtimes that hang during discovery as unavailable."""
harness = module_harness(runtime_module)
harness.patch_shutil_which(lambda name: "/usr/bin/docker")
harness.patch_attr("ensure_allowed_executable", lambda path, allowed: path)
+ messages = echo_recorder(runtime_module)
+ _patch_run_validated_timeout(runtime_module, harness)
- def fake_run(
- executable: str,
- args: list[str],
- *,
- allowed_names: tuple[str, ...],
- **_: object,
- ) -> subprocess.CompletedProcess[str]:
- _ = allowed_names
- cmd = [executable, *args]
- raise subprocess.TimeoutExpired(cmd, runtime_module.PROBE_TIMEOUT)
+ assert runtime_module.runtime_available("docker") is False
+ assert any(err for _, err in messages), "expected stderr warning to be emitted"
+ assert any(
+ "docker info probe exceeded" in msg and str(runtime_module.PROBE_TIMEOUT) in msg
+ for msg, err in messages
+ if err
+ ), "docker info probe timeout warning missing"
+
+
+def test_runtime_available_oserror_does_not_warn(
+ runtime_module: ModuleType,
+ module_harness: HarnessFactory,
+ echo_recorder: typ.Callable[[ModuleType], list[tuple[str, bool]]],
+) -> None:
+ """OSError during runtime detection should not emit warnings."""
+ harness = module_harness(runtime_module)
+ harness.patch_shutil_which(lambda name: "/usr/bin/docker")
+ harness.patch_attr("ensure_allowed_executable", lambda path, allowed: path)
+ messages = echo_recorder(runtime_module)
+
+ def fake_run(*_: object, **__: object) -> subprocess.CompletedProcess[str]:
+ message = "simulated OSError"
+ raise OSError(message)
harness.monkeypatch.setattr(runtime_module, "run_validated", fake_run)
assert runtime_module.runtime_available("docker") is False
+ assert not any(err for _, err in messages), "unexpected warning for OSError"
def test_podman_without_cap_sys_admin_is_unavailable(
- runtime_module: ModuleType, module_harness: HarnessFactory
+ runtime_module: ModuleType,
+ module_harness: HarnessFactory,
+ echo_recorder: typ.Callable[[ModuleType], list[tuple[str, bool]]],
) -> None:
"""Podman runtimes lacking CAP_SYS_ADMIN are reported as unavailable."""
harness = module_harness(runtime_module)
@@ -86,13 +153,8 @@ def fake_run(
return subprocess.CompletedProcess(cmd, 0, stdout=data)
return subprocess.CompletedProcess(cmd, 0, stdout="")
- messages: list[tuple[str, bool]] = []
-
- def fake_echo(message: str, *, err: bool = False) -> None:
- messages.append((message, err))
-
+ messages = echo_recorder(runtime_module)
harness.monkeypatch.setattr(runtime_module, "run_validated", fake_run)
- harness.monkeypatch.setattr(runtime_module.typer, "echo", fake_echo)
assert runtime_module.runtime_available("podman") is False
assert any("CAP_SYS_ADMIN" in msg for msg, err in messages if err)
@@ -128,32 +190,29 @@ def fake_run(
def test_podman_security_timeout_treated_as_unavailable(
- runtime_module: ModuleType, module_harness: HarnessFactory
+ runtime_module: ModuleType,
+ module_harness: HarnessFactory,
+ echo_recorder: typ.Callable[[ModuleType], list[tuple[str, bool]]],
) -> None:
"""If podman security inspection times out the runtime is skipped."""
harness = module_harness(runtime_module)
harness.patch_shutil_which(lambda name: "/usr/bin/podman")
harness.patch_attr("ensure_allowed_executable", lambda path, allowed: path)
-
- def fake_run(
- executable: str,
- args: list[str],
- *,
- allowed_names: tuple[str, ...],
- capture_output: bool = False,
- check: bool = False,
- text: bool = False,
- **_: object,
- ) -> subprocess.CompletedProcess[str]:
- _ = (allowed_names, capture_output, check, text)
- cmd = [executable, *args]
- if "--format" in args:
- raise subprocess.TimeoutExpired(cmd, runtime_module.PROBE_TIMEOUT)
- return subprocess.CompletedProcess(cmd, 0, stdout="")
-
- harness.monkeypatch.setattr(runtime_module, "run_validated", fake_run)
+ messages = echo_recorder(runtime_module)
+ _patch_run_validated_timeout(
+ runtime_module,
+ harness,
+ predicate=lambda args: "--format" in args,
+ )
assert runtime_module.runtime_available("podman") is False
+ assert any(err for _, err in messages), "expected stderr warning to be emitted"
+ assert any(
+ "podman security probe exceeded" in msg
+ and str(runtime_module.PROBE_TIMEOUT) in msg
+ for msg, err in messages
+ if err
+ ), "podman security timeout warning missing"
def test_detect_host_target_returns_default_when_rustc_missing(
@@ -202,20 +261,7 @@ def test_detect_host_target_returns_default_on_timeout(
lambda name: "/usr/bin/rustc" if name == "rustc" else None
)
harness.patch_attr("ensure_allowed_executable", lambda path, allowed: path)
-
- def fake_run(
- executable: str,
- args: list[str],
- *,
- allowed_names: tuple[str, ...],
- **_: object,
- ) -> subprocess.CompletedProcess[str]:
- _ = (executable, args, allowed_names)
- raise subprocess.TimeoutExpired(
- [executable, *args], runtime_module.PROBE_TIMEOUT
- )
-
- harness.monkeypatch.setattr(runtime_module, "run_validated", fake_run)
+ _patch_run_validated_timeout(runtime_module, harness)
assert (
runtime_module.detect_host_target(default="fallback-triple")
@@ -223,6 +269,28 @@ def fake_run(
)
+def test_platform_default_host_target_windows(
+ runtime_module: ModuleType, module_harness: HarnessFactory
+) -> None:
+ """Windows fallbacks prefer the MSVC triple for common architectures."""
+ harness = module_harness(runtime_module)
+ harness.patch_attr("platform", SimpleNamespace(machine=lambda: "AMD64"))
+ harness.monkeypatch.setattr(runtime_module.sys, "platform", "win32")
+
+ assert runtime_module._platform_default_host_target() == "x86_64-pc-windows-msvc"
+
+
+def test_platform_default_host_target_darwin_arm(
+ runtime_module: ModuleType, module_harness: HarnessFactory
+) -> None:
+ """Ensure macOS ARM platforms fall back to the aarch64 Apple triple."""
+ harness = module_harness(runtime_module)
+ harness.patch_attr("platform", SimpleNamespace(machine=lambda: "arm64"))
+ harness.monkeypatch.setattr(runtime_module.sys, "platform", "darwin")
+
+ assert runtime_module._platform_default_host_target() == "aarch64-apple-darwin"
+
+
def test_detect_host_target_passes_timeout_to_run_validated(
runtime_module: ModuleType, module_harness: HarnessFactory
) -> None:
@@ -257,78 +325,127 @@ def fake_run(
assert call_kwargs.get("text") is True
assert call_kwargs.get("check") is True
assert call_kwargs.get("allowed_names") == ("rustc", "rustc.exe")
-@pytest.mark.parametrize(
- ("machine", "expected"),
- [
- ("AMD64", "x86_64"),
- ("x64", "x86_64"),
- ("i386", "i686"),
- ("I586", "i686"),
- ("ARM64", "aarch64"),
- ("armv8l", "aarch64"),
- ("ARMV7L", "armv7"),
- ("armv6l", "armv6"),
- ("PPC64LE", "ppc64le"),
- ("PowerPC64", "ppc64"),
- ("sparc64", "sparc64"),
- ],
-)
-def test_normalize_arch_unit_mappings(
- runtime_module: ModuleType, machine: str, expected: str
+
+
+def test_probe_timeout_env_override(
+ runtime_module: ModuleType,
+ module_harness: HarnessFactory,
+ monkeypatch: pytest.MonkeyPatch,
) -> None:
- """Unit test: known architecture identifiers normalize correctly."""
+ """Respect RUNTIME_PROBE_TIMEOUT when importing the module."""
+ monkeypatch.setenv("RUNTIME_PROBE_TIMEOUT", "2")
+ module = _reload_runtime_module(runtime_module, "rbr_runtime_reloaded")
+ harness = module_harness(module)
- assert runtime_module._normalize_arch(machine) == expected
+ harness.patch_shutil_which(lambda name: "/usr/bin/rustc")
+ harness.patch_attr("ensure_allowed_executable", lambda path, allowed: path)
+ captured: dict[str, object] = {}
-def test_normalize_arch_behavioral_fallbacks(runtime_module: ModuleType) -> None:
- """Behavioural test: unknown and missing machine names are handled."""
+ def fake_run(
+ executable: str,
+ args: list[str],
+ *,
+ allowed_names: tuple[str, ...],
+ **kwargs: object,
+ ) -> subprocess.CompletedProcess[str]:
+ captured.update(kwargs)
+ return subprocess.CompletedProcess(
+ [executable, *args], 0, stdout="host: x86_64-unknown-linux-gnu\n"
+ )
- assert runtime_module._normalize_arch("") == "x86_64"
- # Unknown identifiers are normalized to lowercase for stability.
- assert runtime_module._normalize_arch("Loongson") == "loongson"
+ harness.monkeypatch.setattr(module, "run_validated", fake_run)
+ module.detect_host_target()
+ assert captured.get("timeout") == 2
@pytest.mark.parametrize(
- ("system_name", "machine", "sys_platform", "expected"),
+ ("env_value", "expected_kind", "message_fragment"),
[
- ("Windows", "AMD64", "win32", "x86_64-pc-windows-msvc"),
- ("CYGWIN_NT-10.0", "x86_64", "cygwin", "x86_64-pc-windows-gnu"),
- ("MSYS_NT-10.0", "x86_64", "msys", "x86_64-pc-windows-gnu"),
- ("Darwin", "arm64", "darwin", "aarch64-apple-darwin"),
- ("Linux", "ppc64le", "linux", "ppc64le-unknown-linux-gnu"),
- ("Linux-gnu", "armv7l", "linux-gnu", "armv7-unknown-linux-gnu"),
- ("FreeBSD", "sparc64", "freebsd13", "sparc64-unknown-freebsd"),
+ pytest.param(
+ "not-a-number",
+ "default",
+ "Invalid RUNTIME_PROBE_TIMEOUT value",
+ id="invalid",
+ ),
+ pytest.param("0", "default", "0s raised to", id="zero"),
+ pytest.param("-5", "default", "-5s raised to", id="negative"),
+ pytest.param("999", "max", "999s capped to", id="capped"),
],
)
-def test_default_host_target_for_current_platform_unit(
+def test_probe_timeout_sanitization_warnings(
runtime_module: ModuleType,
monkeypatch: pytest.MonkeyPatch,
- system_name: str,
- machine: str,
- sys_platform: str,
- expected: str,
+ echo_recorder: typ.Callable[[ModuleType], list[tuple[str, bool]]],
+ request: pytest.FixtureRequest,
+ env_value: str,
+ expected_kind: str,
+ message_fragment: str,
) -> None:
- """Unit test: platform/architecture combinations map to expected triples."""
-
- monkeypatch.setattr(runtime_module.platform, "system", lambda: system_name)
- monkeypatch.setattr(runtime_module.platform, "machine", lambda: machine)
- monkeypatch.setattr(runtime_module.sys, "platform", sys_platform)
-
- assert runtime_module._default_host_target_for_current_platform() == expected
+ """Probe timeout overrides produce warnings when sanitized."""
+ messages = echo_recorder(runtime_module)
+ monkeypatch.setenv("RUNTIME_PROBE_TIMEOUT", env_value)
+ module = _reload_runtime_module(
+ runtime_module,
+ f"rbr_runtime_timeout_{request.node.callspec.id}",
+ )
+ if expected_kind == "default":
+ expected = module._DEFAULT_PROBE_TIMEOUT
+ elif expected_kind == "max":
+ expected = module._MAX_PROBE_TIMEOUT
+ else:
+ expected = int(env_value)
+ assert expected == module.PROBE_TIMEOUT
+ assert any(err for _, err in messages), "expected stderr warning for timeout"
+ assert any(
+ message_fragment in msg and str(expected) in msg for msg, err in messages if err
+ )
-def test_default_host_target_for_current_platform_behavioral_fallback(
- runtime_module: ModuleType, monkeypatch: pytest.MonkeyPatch
+@pytest.mark.parametrize(
+ ("env_value", "expected_kind", "message_fragment"),
+ [
+ pytest.param(None, "default", None, id="unset"),
+ pytest.param("5", "value", None, id="custom-value"),
+ pytest.param(
+ "not-a-number",
+ "default",
+ "Invalid RUNTIME_PROBE_TIMEOUT value",
+ id="invalid",
+ ),
+ pytest.param("0", "default", "0s raised to", id="zero"),
+ pytest.param("-5", "default", "-5s raised to", id="negative"),
+ pytest.param("999", "max", "999s capped to", id="capped"),
+ ],
+)
+def test_get_probe_timeout_sanitizes_values(
+ runtime_module: ModuleType,
+ monkeypatch: pytest.MonkeyPatch,
+ echo_recorder: typ.Callable[[ModuleType], list[tuple[str, bool]]],
+ env_value: str | None,
+ expected_kind: str,
+ message_fragment: str | None,
) -> None:
- """Behavioural test: fallbacks cover missing identifiers."""
-
- monkeypatch.setattr(runtime_module.platform, "system", lambda: "")
- monkeypatch.setattr(runtime_module.platform, "machine", lambda: "")
- monkeypatch.setattr(runtime_module.sys, "platform", "customos")
-
- assert (
- runtime_module._default_host_target_for_current_platform()
- == "x86_64-unknown-customos"
- )
-
+ """Unit tests for probe timeout sanitization helper."""
+ messages = echo_recorder(runtime_module)
+ if env_value is None:
+ monkeypatch.delenv("RUNTIME_PROBE_TIMEOUT", raising=False)
+ else:
+ monkeypatch.setenv("RUNTIME_PROBE_TIMEOUT", env_value)
+ result = runtime_module._get_probe_timeout()
+ if expected_kind == "default":
+ expected = runtime_module._DEFAULT_PROBE_TIMEOUT
+ elif expected_kind == "max":
+ expected = runtime_module._MAX_PROBE_TIMEOUT
+ else:
+ expected = int(env_value or runtime_module._DEFAULT_PROBE_TIMEOUT)
+ assert result == expected
+ if message_fragment is None:
+ assert not any(err for _, err in messages), "unexpected stderr warning"
+ else:
+ assert any(err for _, err in messages), "expected stderr warning"
+ assert any(
+ message_fragment in msg and str(expected) in msg
+ for msg, err in messages
+ if err
+ )
diff --git a/.github/actions/rust-build-release/tests/test_smoke.py b/.github/actions/rust-build-release/tests/test_smoke.py
index c7639f80..a669b5f6 100644
--- a/.github/actions/rust-build-release/tests/test_smoke.py
+++ b/.github/actions/rust-build-release/tests/test_smoke.py
@@ -29,6 +29,14 @@
WINDOWS_ONLY = pytest.mark.skipif(sys.platform != "win32", reason="requires Windows")
LINUX_ONLY = pytest.mark.skipif(sys.platform == "win32", reason="requires Linux")
+WINDOWS_KNOWN_FAILURE = pytest.mark.xfail(
+ sys.platform == "win32",
+ reason=(
+ "Known failure on Windows; see "
+ "https://github.com/leynos/shared-actions/issues/93"
+ ),
+ strict=True,
+)
HOST_TARGET = detect_host_target()
@@ -54,8 +62,8 @@ def _param_for_target(target: str) -> object:
marks: list[pytest.MarkDecorator] = []
if target != HOST_TARGET and target.endswith("-unknown-linux-gnu"):
marks.append(LINUX_ONLY)
- if target.endswith("-pc-windows-gnu"):
- marks.append(WINDOWS_ONLY)
+ if target.endswith(("-pc-windows-gnu", "-pc-windows-msvc")):
+ marks.extend((WINDOWS_ONLY, WINDOWS_KNOWN_FAILURE))
if marks:
return pytest.param(target, marks=tuple(marks))
return pytest.param(target)
diff --git a/.github/actions/rust-build-release/tests/test_target_install.py b/.github/actions/rust-build-release/tests/test_target_install.py
index b9b2aa09..d661e7be 100644
--- a/.github/actions/rust-build-release/tests/test_target_install.py
+++ b/.github/actions/rust-build-release/tests/test_target_install.py
@@ -1,4 +1,4 @@
-"""Tests target installation fallback behavior."""
+"""Tests for the target installation helpers in the rust-build-release action."""
from __future__ import annotations
@@ -7,18 +7,34 @@
import typing as typ
import pytest
+from shared_actions_conftest import (
+ CMD_MOX_UNSUPPORTED,
+ _register_cross_version_stub,
+ _register_docker_info_stub,
+ _register_podman_info_stub,
+ _register_rustup_toolchain_stub,
+)
if typ.TYPE_CHECKING:
from pathlib import Path
from types import ModuleType
+ from shared_actions_conftest import CmdMox
+
from .conftest import HarnessFactory
+def _assert_no_timeout_trace(output: str) -> None:
+ """Ensure TimeoutExpired tracebacks do not leak into CLI output."""
+ assert "TimeoutExpired" not in output, output
+
+
+@CMD_MOX_UNSUPPORTED
def test_skips_target_install_when_cross_available(
main_module: ModuleType,
cross_module: ModuleType,
module_harness: HarnessFactory,
+ cmd_mox: CmdMox,
) -> None:
"""Continues when target addition fails but cross is available."""
cross_env = module_harness(cross_module)
@@ -30,85 +46,54 @@ def run_cmd_side_effect(cmd: list[str]) -> None:
app_env.patch_run_cmd(run_cmd_side_effect)
+ default_toolchain = main_module.DEFAULT_TOOLCHAIN
+ rustup_stdout = f"{default_toolchain}-x86_64-unknown-linux-gnu\n"
+ rustup_path = _register_rustup_toolchain_stub(cmd_mox, rustup_stdout)
+ cross_path = _register_cross_version_stub(cmd_mox)
+ docker_path = _register_docker_info_stub(cmd_mox)
+
def fake_which(name: str) -> str | None:
mapping = {
- "cross": "/usr/bin/cross",
- "docker": "/usr/bin/docker",
- "rustup": "/usr/bin/rustup",
+ "cross": cross_path,
+ "docker": docker_path,
+ "rustup": rustup_path,
}
return mapping.get(name)
cross_env.patch_shutil_which(fake_which)
app_env.patch_shutil_which(fake_which)
- default_toolchain = main_module.DEFAULT_TOOLCHAIN
-
- def fake_run(
- executable: str,
- args: list[str],
- *,
- allowed_names: tuple[str, ...],
- capture_output: bool = False,
- check: bool = False,
- text: bool = False,
- **_: object,
- ) -> subprocess.CompletedProcess[str]:
- _ = allowed_names
- cmd = [executable, *args]
- if executable == "/usr/bin/docker":
- return subprocess.CompletedProcess(cmd, 0, stdout="")
- if len(cmd) > 1 and cmd[1] == "toolchain":
- stdout = f"{default_toolchain}-x86_64-unknown-linux-gnu\n"
- return subprocess.CompletedProcess(cmd, 0, stdout=stdout)
- return subprocess.CompletedProcess(cmd, 0, stdout="cross 0.2.5\n")
-
- cross_env.patch_subprocess_run(fake_run)
- app_env.patch_subprocess_run(fake_run)
+ cmd_mox.replay()
main_module.main("aarch64-pc-windows-gnu", default_toolchain)
+ cmd_mox.verify()
build_cmd = app_env.calls[-1]
assert build_cmd[0] == "cross"
assert build_cmd[1] == f"+{default_toolchain}"
+@CMD_MOX_UNSUPPORTED
def test_errors_when_target_unsupported_without_cross(
main_module: ModuleType,
cross_module: ModuleType,
module_harness: HarnessFactory,
+ cmd_mox: CmdMox,
capsys: pytest.CaptureFixture[str],
) -> None:
"""Emits an error when the toolchain lacks the requested target."""
cross_env = module_harness(cross_module)
app_env = module_harness(main_module)
+ default_toolchain = main_module.DEFAULT_TOOLCHAIN
+ rustup_stdout = f"{default_toolchain}-x86_64-unknown-linux-gnu\n"
+ rustup_path = _register_rustup_toolchain_stub(cmd_mox, rustup_stdout)
+
def fake_which(name: str) -> str | None:
- return "/usr/bin/rustup" if name == "rustup" else None
+ return rustup_path if name == "rustup" else None
cross_env.patch_shutil_which(fake_which)
app_env.patch_shutil_which(fake_which)
- default_toolchain = main_module.DEFAULT_TOOLCHAIN
-
- def fake_run(
- executable: str,
- args: list[str],
- *,
- allowed_names: tuple[str, ...],
- capture_output: bool = False,
- check: bool = False,
- text: bool = False,
- **_: object,
- ) -> subprocess.CompletedProcess[str]:
- _ = allowed_names
- cmd = [executable, *args]
- if len(cmd) > 1 and cmd[1] == "toolchain":
- stdout = f"{default_toolchain}-x86_64-unknown-linux-gnu\n"
- return subprocess.CompletedProcess(cmd, 0, stdout=stdout)
- return subprocess.CompletedProcess(cmd, 0, stdout="")
-
- cross_env.patch_subprocess_run(fake_run)
- app_env.patch_subprocess_run(fake_run)
-
def run_cmd_side_effect(cmd: list[str]) -> None:
if cmd[:3] == ["rustup", "target", "add"]:
raise subprocess.CalledProcessError(1, cmd)
@@ -117,17 +102,21 @@ def run_cmd_side_effect(cmd: list[str]) -> None:
app_env.patch_attr("ensure_cross", lambda *_: (None, None))
app_env.patch_attr("runtime_available", lambda name: False)
+ cmd_mox.replay()
with pytest.raises(main_module.typer.Exit):
main_module.main("thumbv7em-none-eabihf", default_toolchain)
+ cmd_mox.verify()
err = capsys.readouterr().err
assert "does not support target 'thumbv7em-none-eabihf'" in err
+@CMD_MOX_UNSUPPORTED
def test_falls_back_to_cargo_when_cross_container_fails(
main_module: ModuleType,
cross_module: ModuleType,
module_harness: HarnessFactory,
+ cmd_mox: CmdMox,
) -> None:
"""Falls back to cargo when cross exits with a container error."""
cross_env = module_harness(cross_module)
@@ -139,45 +128,76 @@ def run_cmd_side_effect(cmd: list[str]) -> None:
app_env.patch_run_cmd(run_cmd_side_effect)
+ default_toolchain = main_module.DEFAULT_TOOLCHAIN
+ rustup_stdout = f"{default_toolchain}-x86_64-unknown-linux-gnu\n"
+ rustup_path = _register_rustup_toolchain_stub(cmd_mox, rustup_stdout)
+ cross_path = _register_cross_version_stub(cmd_mox)
+
def fake_which(name: str) -> str | None:
- mapping = {
- "rustup": "/usr/bin/rustup",
- }
- return mapping.get(name)
+ return rustup_path if name == "rustup" else None
cross_env.patch_shutil_which(fake_which)
app_env.patch_shutil_which(fake_which)
- default_toolchain = main_module.DEFAULT_TOOLCHAIN
-
- def fake_run(
- executable: str,
- args: list[str],
- *,
- allowed_names: tuple[str, ...],
- capture_output: bool = False,
- check: bool = False,
- text: bool = False,
- **_: object,
- ) -> subprocess.CompletedProcess[str]:
- _ = allowed_names
- cmd = [executable, *args]
- if len(cmd) > 1 and cmd[1] == "toolchain":
- stdout = f"{default_toolchain}-x86_64-unknown-linux-gnu\n"
- return subprocess.CompletedProcess(cmd, 0, stdout=stdout)
- return subprocess.CompletedProcess(cmd, 0, stdout="")
-
- cross_env.patch_subprocess_run(fake_run)
- app_env.patch_subprocess_run(fake_run)
- app_env.patch_attr("ensure_cross", lambda required: ("/usr/bin/cross", required))
+ app_env.patch_attr("ensure_cross", lambda required: (cross_path, required))
app_env.patch_attr("runtime_available", lambda name: True)
+ cmd_mox.replay()
main_module.main("x86_64-unknown-linux-gnu", default_toolchain)
+ cmd_mox.verify()
build_cmd = app_env.calls[-1]
assert build_cmd[0] == "cargo"
assert build_cmd[1] == f"+{default_toolchain}-x86_64-unknown-linux-gnu"
+@CMD_MOX_UNSUPPORTED
+def test_falls_back_to_cargo_when_podman_unusable(
+ main_module: ModuleType,
+ cross_module: ModuleType,
+ runtime_module: ModuleType,
+ module_harness: HarnessFactory,
+ cmd_mox: CmdMox,
+ capsys: pytest.CaptureFixture[str],
+) -> None:
+ """Fallback to cargo when podman runtime detection fails quickly (issue #97)."""
+ cross_env = module_harness(cross_module)
+ runtime_env = module_harness(runtime_module)
+ app_env = module_harness(main_module)
+
+ default_toolchain = main_module.DEFAULT_TOOLCHAIN
+ rustup_stdout = f"{default_toolchain}-x86_64-unknown-linux-gnu\n"
+ cross_path = _register_cross_version_stub(cmd_mox)
+ rustup_path = _register_rustup_toolchain_stub(cmd_mox, rustup_stdout)
+ podman_path = _register_podman_info_stub(cmd_mox, exit_code=1)
+
+ def fake_which(name: str) -> str | None:
+ if name == "podman":
+ return podman_path
+ if name == "cross":
+ return cross_path
+ return rustup_path if name == "rustup" else None
+
+ cross_env.patch_shutil_which(fake_which)
+ runtime_env.patch_shutil_which(fake_which)
+ app_env.patch_shutil_which(fake_which)
+
+ app_env.patch_attr("ensure_cross", lambda required: (cross_path, required))
+ app_env.patch_attr("runtime_available", runtime_module.runtime_available)
+
+ cmd_mox.replay()
+ main_module.main("x86_64-unknown-linux-gnu", default_toolchain)
+ cmd_mox.verify()
+
+ assert any(cmd[0] == "cargo" for cmd in app_env.calls)
+ assert all(cmd[0] != "cross" for cmd in app_env.calls)
+ captured = capsys.readouterr()
+ assert (
+ "cross (0.2.5) requires a container runtime; "
+ "using cargo (docker=False, podman=False)" in captured.out
+ )
+ _assert_no_timeout_trace(captured.err)
+
+
@pytest.mark.parametrize(
"target",
[
@@ -311,6 +331,157 @@ def test_should_probe_container_handles_windows_targets(
assert main_module.should_probe_container(host_platform, target) is should_probe
+def test_probe_runtime_returns_runtime_available(
+ main_module: ModuleType,
+ module_harness: HarnessFactory,
+ capsys: pytest.CaptureFixture[str],
+) -> None:
+ """_probe_runtime returns the underlying runtime availability flag."""
+ harness = module_harness(main_module)
+ harness.patch_attr("runtime_available", lambda name: name == "docker")
+
+ assert main_module._probe_runtime("docker") is True
+ assert main_module._probe_runtime("podman") is False
+
+ captured = capsys.readouterr()
+ assert captured.err == ""
+
+
+def test_probe_runtime_warns_on_timeout(
+ main_module: ModuleType,
+ module_harness: HarnessFactory,
+ capsys: pytest.CaptureFixture[str],
+) -> None:
+ """Timeouts are converted into warnings and treated as unavailable."""
+ harness = module_harness(main_module)
+
+ def raise_timeout(name: str) -> bool:
+ raise subprocess.TimeoutExpired(cmd=f"{name} info", timeout=5)
+
+ harness.patch_attr("runtime_available", raise_timeout)
+
+ assert main_module._probe_runtime("podman") is False
+
+ err = capsys.readouterr().err
+ expected = (
+ "::warning::podman runtime probe timed out after 5s; "
+ "treating runtime as unavailable"
+ )
+ assert expected in err
+
+
+def test_probe_runtime_warns_on_timeout_without_duration(
+ main_module: ModuleType,
+ module_harness: HarnessFactory,
+ capsys: pytest.CaptureFixture[str],
+) -> None:
+ """Timeout warnings omit duration when the exception lacks a timeout."""
+ harness = module_harness(main_module)
+
+ def raise_timeout(name: str) -> bool:
+ _ = name
+ raise subprocess.TimeoutExpired(cmd="docker info", timeout=None)
+
+ harness.patch_attr("runtime_available", raise_timeout)
+
+ assert main_module._probe_runtime("docker") is False
+
+ err = capsys.readouterr().err
+ expected = (
+ "::warning::docker runtime probe timed out; treating runtime as unavailable"
+ )
+ assert expected in err
+
+
+def test_probe_runtime_propagates_unexpected_error(
+ main_module: ModuleType,
+ module_harness: HarnessFactory,
+ capsys: pytest.CaptureFixture[str],
+) -> None:
+ """Errors other than timeouts propagate to the caller."""
+ harness = module_harness(main_module)
+
+ class ProbeError(RuntimeError):
+ """Sentinel error for runtime probe tests."""
+
+ def raise_error(name: str) -> bool:
+ raise ProbeError
+
+ harness.patch_attr("runtime_available", raise_error)
+
+ with pytest.raises(ProbeError):
+ main_module._probe_runtime("docker")
+
+ captured = capsys.readouterr()
+ assert captured.err == ""
+
+
+def test_runtime_available_handles_timeout(
+ main_module: ModuleType,
+ module_harness: HarnessFactory,
+ capsys: pytest.CaptureFixture[str],
+) -> None:
+ """Treat runtime probe timeouts as unavailable while still completing the build."""
+ harness = module_harness(main_module)
+ default_toolchain = main_module.DEFAULT_TOOLCHAIN
+
+ harness.patch_shutil_which(
+ lambda name: "/usr/bin/rustup" if name == "rustup" else None
+ )
+
+ def fake_run_validated(
+ executable: str, args: list[str], **_: object
+ ) -> subprocess.CompletedProcess[str]:
+ if executable == "/usr/bin/rustup" and args[:2] == ["toolchain", "list"]:
+ stdout = f"{default_toolchain}-x86_64-unknown-linux-gnu\n"
+ return subprocess.CompletedProcess([executable, *args], 0, stdout=stdout)
+ pytest.fail(f"unexpected run_validated call: {executable} {args}")
+
+ harness.patch_attr("run_validated", fake_run_validated)
+
+ commands: list[list[str]] = []
+
+ def record_run_cmd(cmd: list[str]) -> None:
+ commands.append(cmd)
+ if cmd[:3] == ["/usr/bin/rustup", "target", "add"]:
+ return
+ if cmd and cmd[0] == "cargo":
+ return
+ pytest.fail(f"unexpected run_cmd call: {cmd}")
+
+ harness.patch_run_cmd(record_run_cmd)
+ harness.patch_attr("configure_windows_linkers", lambda *_: None)
+
+ def timeout_runtime(_name: str, *, cwd: object | None = None) -> bool:
+ _ = cwd
+ raise subprocess.TimeoutExpired(cmd="podman info", timeout=10)
+
+ harness.patch_attr("runtime_available", timeout_runtime)
+ harness.patch_attr("ensure_cross", lambda *_: (None, None))
+
+ main_module.main("thumbv7em-none-eabihf", default_toolchain)
+
+ out, err = capsys.readouterr()
+ expected_docker = (
+ "::warning::docker runtime probe timed out after 10s; "
+ "treating runtime as unavailable"
+ )
+ expected_podman = (
+ "::warning::podman runtime probe timed out after 10s; "
+ "treating runtime as unavailable"
+ )
+ assert expected_docker in err
+ assert expected_podman in err
+ assert "cross missing; using cargo" in out
+ _assert_no_timeout_trace(err)
+
+ assert len(commands) >= 2
+ assert commands[0][:3] == ["/usr/bin/rustup", "target", "add"]
+ assert commands[1][0] == "cargo"
+ assert commands[1][1].startswith("+")
+ assert commands[1][-1] == "thumbv7em-none-eabihf"
+
+
def test_configure_windows_linkers_prefers_toolchain_gcc(
main_module: ModuleType,
module_harness: HarnessFactory,
diff --git a/.github/actions/rust-build-release/tests/test_utils.py b/.github/actions/rust-build-release/tests/test_utils.py
index f49a8076..8dad3577 100644
--- a/.github/actions/rust-build-release/tests/test_utils.py
+++ b/.github/actions/rust-build-release/tests/test_utils.py
@@ -7,10 +7,13 @@
from pathlib import Path
import pytest
+from shared_actions_conftest import CMD_MOX_UNSUPPORTED
if typ.TYPE_CHECKING:
from types import ModuleType
+ from shared_actions_conftest import CmdMox
+
def test_ensure_allowed_executable_accepts_valid_name(
utils_module: ModuleType, tmp_path: Path
@@ -34,21 +37,18 @@ def test_ensure_allowed_executable_rejects_unknown(
utils_module.ensure_allowed_executable(exe_path, ("rustup", "rustup.exe"))
+@CMD_MOX_UNSUPPORTED
def test_run_validated_invokes_subprocess_with_validated_path(
- utils_module: ModuleType, tmp_path: Path, monkeypatch: pytest.MonkeyPatch
+ utils_module: ModuleType,
+ cmd_mox: CmdMox,
) -> None:
"""run_validated executes subprocess.run with the validated executable."""
- exe_path = tmp_path / "docker.exe"
- exe_path.write_text("", encoding="utf-8")
-
- recorded: dict[str, list[str]] = {}
-
- def fake_run(cmd: list[str], **_: object) -> subprocess.CompletedProcess[str]:
- recorded["cmd"] = cmd
- return subprocess.CompletedProcess(cmd, 0, stdout="ok")
-
- monkeypatch.setattr(utils_module.subprocess, "run", fake_run)
+ shim_dir = cmd_mox.environment.shim_dir
+ assert shim_dir is not None
+ exe_path = shim_dir / "docker.exe"
+ spy = cmd_mox.spy("docker.exe").with_args("info").returns(stdout="ok")
+ cmd_mox.replay()
result = utils_module.run_validated(
exe_path,
["info"],
@@ -57,11 +57,16 @@ def fake_run(cmd: list[str], **_: object) -> subprocess.CompletedProcess[str]:
capture_output=True,
text=True,
)
+ cmd_mox.verify()
- assert recorded["cmd"][0] == str(exe_path)
- assert recorded["cmd"][1:] == ["info"]
- assert isinstance(result, subprocess.CompletedProcess)
- assert result.stdout == "ok"
+ assert isinstance(result, subprocess.CompletedProcess), (
+ "run_validated should return subprocess.CompletedProcess"
+ )
+ assert result.args[0] == str(exe_path), (
+ "subprocess should be invoked with the validated executable path"
+ )
+ assert result.stdout == "ok", "stdout should propagate from the command double"
+ assert spy.call_count == 1, "command double should be invoked exactly once"
def test_run_validated_raises_for_unexpected_executable(
diff --git a/.github/actions/setup-windows-gnu/action.yml b/.github/actions/setup-windows-gnu/action.yml
index 4c50f4f1..5f04f2f4 100644
--- a/.github/actions/setup-windows-gnu/action.yml
+++ b/.github/actions/setup-windows-gnu/action.yml
@@ -10,9 +10,37 @@ inputs:
description: SHA-256 checksum for the llvm-mingw archive matching the selected version
required: false
default: "bd88084d7a3b95906fa295453399015a1fdd7b90a38baa8f78244bd234303737"
+ require-aarch64:
+ description: Fail if no aarch64 MinGW compiler is available
+ required: false
+ default: "true"
+ download-retry-attempts:
+ description: Maximum number of attempts when downloading the llvm-mingw archive
+ required: false
+ default: "5"
+ download-retry-initial-delay:
+ description: Initial retry delay in seconds before applying exponential backoff
+ required: false
+ default: "2"
+ download-retry-max-delay:
+ description: Upper bound in seconds for retry delays when downloading the archive
+ required: false
+ default: "30"
+ download-retry-jitter:
+ description: Maximum random jitter in seconds added to retry delays
+ required: false
+ default: "0"
runs:
using: composite
steps:
+ - name: "Guard: Windows runner only"
+ shell: bash
+ run: |
+ set -euo pipefail
+ if [ "${RUNNER_OS}" != "Windows" ]; then
+ echo "::error::setup-windows-gnu action requires a Windows runner" >&2
+ exit 1
+ fi
- name: Install MinGW toolchains
# Consumers can add extra cross linkers in their workflows if they
# require GCC-based aarch64 binaries. llvm-mingw provides clang by default.
@@ -23,6 +51,7 @@ runs:
install: |
mingw-w64-x86_64-toolchain
mingw-w64-x86_64-gcc-libs
+ mingw-w64-x86_64-pkg-config
- name: Install llvm-mingw
shell: pwsh
run: |
@@ -37,11 +66,81 @@ runs:
$asset = "llvm-mingw-$version-ucrt-x86_64.zip"
$url = "https://github.com/mstorsjo/llvm-mingw/releases/download/$version/$asset"
$archive = Join-Path $Env:RUNNER_TEMP "llvm-mingw.zip"
- try {
- Invoke-WebRequest -Uri $url -OutFile $archive -ErrorAction Stop
- } catch {
- Write-Error "Failed to download llvm-mingw archive from $url"
- throw
+ $culture = [System.Globalization.CultureInfo]::InvariantCulture
+ $numberStyles = [System.Globalization.NumberStyles]::Float
+
+ $attempts = 5
+ $attemptsParsed = 0
+ $attemptsRaw = "${{ inputs['download-retry-attempts'] }}"
+ if ([int]::TryParse($attemptsRaw, [ref]$attemptsParsed) -and $attemptsParsed -ge 1) {
+ $attempts = $attemptsParsed
+ }
+
+ $initialDelay = 2.0
+ $initialDelayParsed = 0.0
+ $initialDelayRaw = "${{ inputs['download-retry-initial-delay'] }}"
+ if (
+ [double]::TryParse(
+ $initialDelayRaw,
+ $numberStyles,
+ $culture,
+ [ref]$initialDelayParsed
+ ) -and $initialDelayParsed -gt 0
+ ) {
+ $initialDelay = $initialDelayParsed
+ }
+
+ $maxDelay = 30.0
+ $maxDelayParsed = 0.0
+ $maxDelayRaw = "${{ inputs['download-retry-max-delay'] }}"
+ if (
+ [double]::TryParse(
+ $maxDelayRaw,
+ $numberStyles,
+ $culture,
+ [ref]$maxDelayParsed
+ ) -and $maxDelayParsed -gt 0
+ ) {
+ $maxDelay = $maxDelayParsed
+ }
+ if ($maxDelay -lt $initialDelay) {
+ $maxDelay = $initialDelay
+ }
+
+ $jitter = 0.0
+ $jitterParsed = 0.0
+ $jitterRaw = "${{ inputs['download-retry-jitter'] }}"
+ if (
+ [double]::TryParse(
+ $jitterRaw,
+ $numberStyles,
+ $culture,
+ [ref]$jitterParsed
+ ) -and $jitterParsed -ge 0
+ ) {
+ $jitter = $jitterParsed
+ }
+ $retryRandom = if ($jitter -gt 0) { [System.Random]::new() } else { $null }
+
+ for ($i = 1; $i -le $attempts; $i++) {
+ try {
+ Invoke-WebRequest -Uri $url -OutFile $archive -ErrorAction Stop
+ break
+ } catch {
+ if ($i -eq $attempts) {
+ Write-Error "Failed to download llvm-mingw archive from $url after $attempts attempts"
+ throw
+ }
+ $backoffExponent = $i - 1
+ $backoff = [math]::Min($initialDelay * [math]::Pow(2, $backoffExponent), $maxDelay)
+ $sleepSeconds = $backoff
+ if ($retryRandom) {
+ $sleepSeconds = [math]::Min($backoff + ($retryRandom.NextDouble() * $jitter), $maxDelay)
+ }
+ $displayDelay = [math]::Round($sleepSeconds, 2, [System.MidpointRounding]::AwayFromZero)
+ Write-Warning "Download failed (attempt $i). Retrying in ${displayDelay}s..."
+ Start-Sleep -Seconds ([int][math]::Ceiling($sleepSeconds))
+ }
}
$actualHash = (Get-FileHash -Path $archive -Algorithm SHA256).Hash
if (-not [string]::Equals($actualHash, $expectedHash, [System.StringComparison]::OrdinalIgnoreCase)) {
@@ -59,6 +158,11 @@ runs:
throw "llvm-mingw bin directory not found at $binPath"
}
$binPath | Out-File -FilePath $Env:GITHUB_PATH -Encoding utf8 -Append
+ Add-Content -Path $Env:GITHUB_ENV -Value "LLVM_MINGW_ROOT=$toolRoot"
+ Add-Content -Path $Env:GITHUB_ENV -Value "LLVM_MINGW_BIN=$binPath"
+ # Toolchain hints (useful for Autotools/CMake; override as needed)
+ Add-Content -Path $Env:GITHUB_ENV -Value "CC_aarch64_pc_windows_gnu=aarch64-w64-mingw32-clang"
+ Add-Content -Path $Env:GITHUB_ENV -Value "CXX_aarch64_pc_windows_gnu=aarch64-w64-mingw32-clang++"
- name: Verify GNU toolchains
shell: bash
run: |
@@ -66,8 +170,13 @@ runs:
if ! command -v x86_64-w64-mingw32-gcc >/dev/null 2>&1; then
echo "::warning::x86_64 MinGW GCC not found" >&2
fi
+ require_a64="${{ inputs.require-aarch64 }}"
if ! command -v aarch64-w64-mingw32-gcc >/dev/null 2>&1 \
&& ! command -v aarch64-w64-mingw32-clang >/dev/null 2>&1; then
- echo "::error::No aarch64 MinGW compiler found (neither GCC nor clang)" >&2
- exit 1
+ if [ "${require_a64}" = "true" ]; then
+ echo "::error::No aarch64 MinGW compiler found (neither GCC nor clang)" >&2
+ exit 1
+ else
+ echo "::warning::No aarch64 MinGW compiler found; proceeding because require-aarch64=false" >&2
+ fi
fi
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index ca8e7a82..9dfe351a 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -35,6 +35,18 @@ jobs:
- name: Install nfpm
if: matrix.os == 'ubuntu-latest'
uses: ./.github/actions/install-nfpm
+ - name: Setup Bun
+ if: matrix.os == 'ubuntu-latest'
+ # v2.0.2
+ uses: oven-sh/setup-bun@735343b667d3e6f658f44d0eca948eb6282f2b76
+ - name: Check formatting
+ if: matrix.os == 'ubuntu-latest'
+ run: make check-fmt
+ shell: bash
+ - name: Run lint checks
+ if: matrix.os == 'ubuntu-latest'
+ run: make lint
+ shell: bash
- name: Run tests
run: |
uv sync --group dev
diff --git a/.gitignore b/.gitignore
index eb7b2ad3..646ad25f 100644
--- a/.gitignore
+++ b/.gitignore
@@ -153,5 +153,8 @@ venv/
target/
# uv cache and lockfile
-uv.lock
-.uv/
+/uv.lock
+/.uv/
+
+# Crush AI agent
+.crush/
diff --git a/Makefile b/Makefile
index e699f688..b38510b2 100644
--- a/Makefile
+++ b/Makefile
@@ -8,6 +8,7 @@ clean: ## Remove transient artefacts
BUILD_JOBS ?=
MDLINT ?= markdownlint
NIXIE ?= nixie
+RUFF_FIX_RULES ?= D202,I001
test: .venv ## Run tests
uv run --with typer --with packaging --with plumbum --with pyyaml pytest -v
@@ -19,7 +20,7 @@ test: .venv ## Run tests
lint: ## Check test scripts and actions
uvx ruff check
find .github/actions -type f \( -name 'action.yml' -o -name 'action.yaml' \) -print0 \
- | xargs -r -0 -n1 ${HOME}/.bun/bin/action-validator
+ | xargs -r -0 -n1 bunx -y @action-validator/cli
typecheck: .venv ## Run static type checking with Ty
./.venv/bin/ty check \
@@ -41,11 +42,13 @@ typecheck: .venv ## Run static type checking with Ty
--extra-search-path .github/actions/macos-package/scripts \
.github/actions/macos-package/scripts
uvx pyright
-fmt: ## Apply formatting to Python files
+fmt: ## Format Python files and auto-fix selected lint rules
uvx ruff format
+ uvx ruff check --select $(RUFF_FIX_RULES) --fix
check-fmt: ## Check Python formatting without modifying files
uvx ruff format --check
+ uvx ruff check --select $(RUFF_FIX_RULES)
markdownlint: ## Lint Markdown files
find . -type f -name '*.md' -not -path './target/*' -print0 | xargs -0 -- $(MDLINT)
diff --git a/README.md b/README.md
index c38a9120..bfae89ed 100644
--- a/README.md
+++ b/README.md
@@ -9,6 +9,7 @@ GitHub Actions
| Export Postgres URL | `.github/actions/export-postgres-url` | v1 |
| Generate coverage | `.github/actions/generate-coverage` | v1 |
| Setup Rust | `.github/actions/setup-rust` | v1 |
+| Release to PyPI (uv) | `.github/actions/release-to-pypi-uv` | v1 |
| Upload CodeScene Coverage | `.github/actions/upload-codescene-coverage` | v1 |
| Ratchet coverage | `.github/actions/ratchet-coverage` | v1 |
| Rust build release | `.github/actions/rust-build-release` | v1 |
diff --git a/conftest.py b/conftest.py
new file mode 100644
index 00000000..94a0d6a2
--- /dev/null
+++ b/conftest.py
@@ -0,0 +1,153 @@
+"""Pytest configuration for shared actions tests."""
+
+from __future__ import annotations
+
+import collections
+import collections.abc as cabc
+import shutil
+import sys
+import typing as typ
+
+import pytest
+
+CMD_MOX_UNSUPPORTED = pytest.mark.skipif(
+ sys.platform == "win32", reason="cmd-mox does not support Windows"
+)
+HAS_UV = shutil.which("uv") is not None
+
+REQUIRES_UV = pytest.mark.usefixtures("require_uv")
+
+sys.modules.setdefault("shared_actions_conftest", sys.modules[__name__])
+
+
+if typ.TYPE_CHECKING: # pragma: no cover - imported for annotations only
+ from pathlib import Path
+
+
+class CmdDouble(typ.Protocol):
+ """Contract for cmd-mox doubles that record expectations and behaviour."""
+
+ call_count: int
+
+ def with_args(self, *args: str) -> typ.Self:
+ """Set the expected argv for the double."""
+ ...
+
+ def returns(
+ self,
+ *,
+ stdout: str = "",
+ stderr: str = "",
+ exit_code: int = 0,
+ **_: object,
+ ) -> typ.Self:
+ """Provide canned output for the command invocation."""
+ ...
+
+ def runs(self, handler: cabc.Callable[[object], tuple[str, str, int]]) -> typ.Self:
+ """Execute a handler when the double is invoked."""
+ ...
+
+
+class CmdMoxEnvironment(typ.Protocol):
+ """Subset of :class:`cmd_mox.EnvironmentManager` used in tests."""
+
+ shim_dir: Path | None
+
+
+class CmdMox(typ.Protocol):
+ """Typed façade for the cmd-mox pytest fixture used in tests."""
+
+ environment: CmdMoxEnvironment
+
+ def stub(self, command: str) -> CmdDouble:
+ """Register a stubbed command double."""
+ ...
+
+ def spy(self, command: str) -> CmdDouble:
+ """Register a spying command double."""
+ ...
+
+ def replay(self) -> None:
+ """Activate the recorded doubles."""
+ ...
+
+ def verify(self) -> None:
+ """Assert that recorded expectations were satisfied."""
+ ...
+
+
+def _shim_path(cmd_mox: CmdMox, command: str) -> str:
+ """Return the shim path for ``command`` ensuring the environment is ready."""
+ shim_dir = cmd_mox.environment.shim_dir
+ if shim_dir is None: # pragma: no cover - defensive guard
+ msg = "cmd-mox shim directory is unavailable"
+ raise RuntimeError(msg)
+ return str(shim_dir / command)
+
+
+@pytest.fixture
+def require_uv() -> None:
+ """Skip tests that exercise uv when the CLI is unavailable."""
+ if not HAS_UV:
+ pytest.skip("uv CLI not installed")
+
+
+def _register_cross_version_stub(
+ cmd_mox: CmdMox,
+ stdout: str | cabc.Iterable[str] = "cross 0.2.5\n",
+) -> str:
+ """Register a stub for ``cross --version`` and return the shim path."""
+ if isinstance(stdout, str):
+ cmd_mox.stub("cross").with_args("--version").returns(stdout=stdout)
+ else:
+ outputs = collections.deque(stdout)
+ last = outputs[-1] if outputs else "cross 0.2.5\n"
+
+ def _handler(_invocation: object) -> tuple[str, str, int]:
+ data = outputs.popleft() if outputs else last
+ return data, "", 0
+
+ cmd_mox.stub("cross").with_args("--version").runs(_handler)
+ return _shim_path(cmd_mox, "cross")
+
+
+def _register_rustup_toolchain_stub(
+ cmd_mox: CmdMox,
+ stdout: str,
+) -> str: # pragma: no cover - helper
+ """Register a stub for ``rustup toolchain list`` and return the shim path."""
+ cmd_mox.stub("rustup").with_args("toolchain", "list").returns(stdout=stdout)
+ return _shim_path(cmd_mox, "rustup")
+
+
+def _register_docker_info_stub(
+ cmd_mox: CmdMox,
+ *,
+ exit_code: int = 0,
+) -> str: # pragma: no cover - helper
+ """Register a stub for ``docker info`` and return the shim path."""
+ cmd_mox.stub("docker").with_args("info").returns(exit_code=exit_code)
+ return _shim_path(cmd_mox, "docker")
+
+
+def _register_podman_info_stub(
+ cmd_mox: CmdMox,
+ *,
+ exit_code: int = 0,
+) -> str: # pragma: no cover - helper
+ """Register a stub for ``podman info`` and return the shim path."""
+ cmd_mox.stub("podman").with_args("info").returns(exit_code=exit_code)
+ return _shim_path(cmd_mox, "podman")
+
+
+if sys.platform != "win32": # pragma: win32 no cover - windows lacks cmd-mox
+ pytest_plugins = ("cmd_mox.pytest_plugin",)
+else:
+
+ @pytest.fixture
+ def cmd_mox() -> typ.NoReturn: # pragma: win32 no cover
+ """Skip tests that rely on cmd-mox on Windows."""
+ pytest.skip("cmd-mox does not support Windows")
+ unreachable = "unreachable"
+ raise RuntimeError(unreachable)
diff --git a/docs/cmd-mox-users-guide.md b/docs/cmd-mox-users-guide.md
new file mode 100644
index 00000000..b64297cc
--- /dev/null
+++ b/docs/cmd-mox-users-guide.md
@@ -0,0 +1,245 @@
+# CmdMox Usage Guide
+
+CmdMox provides a fluent API for mocking, stubbing and spying on external
+commands in tests. This guide shows common patterns for everyday use.
+
+## Related documents
+
+- [Python Native Command Mocking Design](./python-native-command-mocking-design.md)
+ – Architectural decisions, lifecycle sequencing and IPC design details.
+
+## Getting started
+
+Install the package and enable the pytest plugin (guarded on Windows where
+cmd-mox is not currently supported):
+
+```bash
+pip install cmd-mox
+```
+
+In a project's `conftest.py`:
+
+```python
+import sys
+
+if sys.platform != "win32":
+ pytest_plugins = ("cmd_mox.pytest_plugin",)
+```
+
+Each test receives a `cmd_mox` fixture that provides access to the controller
+object. Because the IPC transport is Unix-specific, guard any cmd-mox-backed
+tests with `pytest.mark.skipif(sys.platform == "win32", ...)` so CI runners on
+Windows bypass them gracefully.
+
+## Basic workflow
+
+CmdMox follows a strict record → replay → verify lifecycle. First declare
+expectations, then run the code with the shims active, finally verify that
+interactions matched what was recorded.
+
+The three phases are defined in the design document:
+
+1. **Record** – describe each expected command call, including its arguments
+ and behaviour.
+2. **Replay** – run the code under test while CmdMox intercepts command
+ executions.
+3. **Verify** – ensure every expectation was met and nothing unexpected
+ happened.
+
+These phases form a strict sequence for reliable command-line tests.
+
+A typical test brings the three phases together:
+
+```python
+cmd_mox.mock("git").with_args("clone", "repo").returns(exit_code=0)
+
+cmd_mox.replay()
+my_tool.clone_repo("repo")
+cmd_mox.verify()
+```
+
+## Stubs, mocks and spies
+
+Use the controller to register doubles:
+
+```python
+cmd_mox.stub("ls")
+cmd_mox.mock("git")
+cmd_mox.spy("curl")
+```
+
+- **Stubs** provide canned responses without strict checking.
+- **Mocks** enforce exact usage during verification.
+- **Spies** record every call for later inspection and can behave like stubs.
+
+Each call returns a `CommandDouble` that offers a fluent DSL to configure
+behaviour.
+
+## Defining expectations
+
+Combine methods to describe how a command should be invoked:
+
+```python
+cmd_mox.mock("git") \
+ .with_args("clone", "https://example.com/repo.git") \
+ .returns(exit_code=0)
+```
+
+Arguments can be matched more flexibly using comparators:
+
+```python
+from cmd_mox import Regex, Contains
+
+cmd_mox.mock("curl") \
+ .with_matching_args(Regex(r"--header=User-Agent:.*"), Contains("example"))
+```
+
+The design document lists the available comparators:
+
+- `Any`
+- `IsA`
+- `Regex`
+- `Contains`
+- `StartsWith`
+- `Predicate`
+
+Each comparator is a callable that returns `True` on match.
+`with_matching_args` expects one comparator per argv element (excluding the program name, i.e., `argv[1:]`),
+and `with_stdin` accepts either an exact string or a predicate `Callable[[str], bool]`
+for flexible input checks.
+
+## Running tests
+
+Typical pytest usage looks like this:
+
+```python
+def test_clone(cmd_mox):
+ cmd_mox.mock("git").with_args("clone", "repo").returns(exit_code=0)
+
+ cmd_mox.replay()
+ my_tool.clone_repo("repo")
+ cmd_mox.verify()
+```
+
+The context manager interface is available when pytest fixtures are not in play:
+
+```python
+with CmdMox() as mox:
+ mox.stub("ls").returns(stdout="")
+ mox.replay()
+ subprocess.run(["ls"], check=True)
+```
+
+## Spies and passthrough mode
+
+Spies expose `invocations` (a list of `Invocation` objects) and `call_count`
+during and after replay, making it easy to inspect what actually ran:
+
+```python
+def test_spy(cmd_mox):
+ spy = cmd_mox.spy("curl").returns(stdout="ok")
+ cmd_mox.replay()
+ run_download()
+ cmd_mox.verify()
+ assert spy.call_count == 1
+```
+
+A spy expectation can also use `times_called(count)`—an alias of
+`times(count)`—to require a specific call count during verification.
+
+A spy can also forward to the real command while recording everything:
+
+```python
+mox.spy("aws").passthrough()
+```
+
+This "record mode" is helpful for capturing real interactions and later turning
+them into mocks.
+
+After verification, spies provide assertion helpers inspired by
+`unittest.mock`:
+
+```python
+spy.assert_called()
+spy.assert_called_with("--silent", stdin="payload")
+# or, to ensure the spy never executed:
+spy.assert_not_called()
+```
+
+These methods raise `AssertionError` when expectations are not met and are
+restricted to spy doubles.
+
+## Controller configuration and journals
+
+`CmdMox` offers configuration hooks that surface through both the fixture and
+the context-manager API:
+
+- `verify_on_exit` (default `True`) automatically calls `verify()` when a replay
+ phase ends inside a `with CmdMox()` block. Disable it when manual verification
+ management is required. Verification still runs if the body raises; when both
+ verification and the body fail, the verification error is suppressed so the
+ original exception surfaces.
+- `max_journal_entries` bounds the number of stored invocations (oldest entries
+ are evicted FIFO when the bound is reached). The journal is exposed via
+ `cmd_mox.journal`, a `collections.deque[Invocation]` recorded during replay.
+
+The journal is especially handy when debugging:
+
+```python
+cmd_mox.replay()
+exercise_system()
+cmd_mox.verify()
+assert [call.command for call in cmd_mox.journal] == ["git", "curl"]
+```
+
+To intercept a command without configuring a double—for example, to ensure it is
+treated as unexpected—register it explicitly:
+
+```python
+cmd_mox.register_command("name")
+```
+
+CmdMox will create the shim so the command is routed through the IPC server even
+without a stub, mock, or spy.
+
+## Fluent API reference
+
+The DSL methods closely mirror those described in the design specification. A
+few common ones are:
+
+- `with_args(*args)` – require exact arguments.
+- `with_matching_args(*matchers)` – match arguments using comparators.
+- `with_stdin(data_or_matcher)` – expect specific standard input (`str`) or
+ validate it with a predicate `Callable[[str], bool]`.
+- `with_env(mapping)` – set additional environment variables for the invocation
+ and apply them when custom handlers run.
+- `returns(stdout="", stderr="", exit_code=0)` – static response using text
+ values; CmdMox operates in text mode—pass `str` (bytes are not supported).
+ Note: For binary payloads, prefer `passthrough()` or encode/decode at the
+ boundary (e.g., base64) so handlers exchange `str`.
+- `runs(handler)` – call a function to produce dynamic output. The handler
+ receives an `Invocation` and should return either a `(stdout, stderr,
+ exit_code)` tuple or a `Response` instance.
+- `times(count)` – expect the command exactly `count` times.
+- `times_called(count)` – alias for `times` that emphasizes spy call counts.
+- `in_order()` – enforce strict ordering with other expectations.
+- `any_order()` – allow the expectation to be satisfied in any position.
+- `passthrough()` – for spies, run the real command while recording it.
+- `assert_called()`, `assert_not_called()`, `assert_called_with(*args,
+ stdin=None, env=None)` – spy-only helpers for post-verification assertions.
+
+Refer to the [design document](./python-native-command-mocking-design.md) for
+the full table of methods and examples.
+
+## Environment variables
+
+CmdMox exposes two environment variables to coordinate shims with the IPC
+server.
+
+- `CMOX_IPC_SOCKET` – path to the Unix domain socket used by shims. The
+ `CmdMox` fixture sets this automatically when the server starts. Shims exit
+ with an error if the variable is missing.
+- `CMOX_IPC_TIMEOUT` – communication timeout in seconds. Override this to tune
+ connection waits. When unset, the default is `5.0` seconds.
+
+Most tests should rely on the fixture to manage these variables.
diff --git a/docs/python-native-command-mocking-design.md b/docs/python-native-command-mocking-design.md
new file mode 100644
index 00000000..48c77a14
--- /dev/null
+++ b/docs/python-native-command-mocking-design.md
@@ -0,0 +1,111 @@
+# Python Native Command Mocking Design
+
+CmdMox underpins the Python-based command doubling strategy. The library offers
+an ergonomic façade for writing tests while keeping the execution model explicit
+and deterministic. This document captures the architectural decisions and the
+contracts relied upon by the higher-level usage guide.
+
+## Objectives
+
+- Provide a transport-agnostic façade that lets tests intercept subprocess
+ invocations without patching the Python standard library.
+- Support mocks, stubs and spies with a consistent fluent DSL that emphasizes
+ readability.
+- Capture interactions for later inspection through a replay journal so tests
+ remain debuggable.
+- Remain portable across Unix platforms while documenting the Windows
+ limitations of the IPC transport.
+
+## Architecture Overview
+
+CmdMox consists of three cooperating subsystems:
+
+1. **Controller** – The public entry point used by tests. It configures
+ expectations, manages lifecycle transitions and coordinates verification.
+2. **Environment** – Provisions temporary shim binaries (or scripts) and binds
+ them to the controller via Unix domain sockets. Environment configuration is
+ exposed through attributes such as `environment.shim_dir`.
+3. **IPC Server** – Handles requests from shims, dispatching them to the
+ recorded doubles. The server enforces strict sequencing to maintain
+ deterministic behaviour.
+
+The pytest plugin creates a controller per test function. When used as a context
+manager (`with CmdMox() as mox:`) the same controller lifecycle is available for
+non-pytest clients.
+
+## Lifecycle: Record → Replay → Verify
+
+CmdMox enforces a three-stage lifecycle:
+
+1. **Record** – Tests describe expectations using the fluent API. Each
+ expectation registers a command double with information about argument
+ matching, environment and the response strategy.
+2. **Replay** – The controller activates the IPC server and replaces the target
+ commands with shims. During this phase, invocations flow through the doubles.
+3. **Verify** – Finally, the controller checks that every expectation was
+ satisfied, including call counts and ordering rules.
+
+Exiting a `with CmdMox()` block triggers `verify()` automatically when
+`verify_on_exit=True` (the default). Failing verification suppresses the error
+if an exception already bubbled out of the context, keeping the original
+exception visible to the test runner.
+
+## Command Doubles and Responses
+
+`CommandDouble` instances configure behaviour with a fluent DSL:
+
+- `with_args(*args)` asserts exact argument sequences.
+- `with_matching_args(*matchers)` allows per-position comparator functions such
+ as `Regex`, `Contains`, `StartsWith`, `Any`, `IsA` or custom predicates.
+- `with_stdin(...)` and `with_env({...})` match stdin content and environment
+ fragments.
+- `returns(stdout="", stderr="", exit_code=0)` provides deterministic
+ responses; the API operates exclusively on `str` payloads.
+- `runs(handler)` executes dynamic hooks that receive an `Invocation` object.
+- `times(count)` and `times_called(count)` enforce call counts, with the latter
+ acting as a spy-specific alias.
+- `passthrough()` forwards execution to the real command while continuing to
+ record invocations.
+- `assert_called*` helpers are available on spies after verification to ease
+ assertions in tests.
+
+## Journal and Diagnostics
+
+Every invocation processed during replay is appended to `cmd_mox.journal`, a
+bounded `collections.deque`. The capacity is controlled by
+`max_journal_entries`; exceeding the limit evicts the oldest entries. The
+journal is the primary diagnostic surface for understanding unexpected
+interactions and is frequently asserted against in tests.
+
+## Environment Variables
+
+Two environment variables tie the controller and shims together:
+
+- `CMOX_IPC_SOCKET` – Path to the Unix domain socket exposed by the server. Shims
+ exit early if this variable is missing.
+- `CMOX_IPC_TIMEOUT` – Seconds to wait for IPC operations before raising a
+ timeout error. The default is `5.0` seconds and can be tuned per test via the
+ controller API.
+
+These variables are injected automatically when the pytest fixture or context
+manager initializes the controller.
+
+## Platform Notes
+
+The IPC transport relies on Unix domain sockets, so the pytest plugin guards
+against activation on Windows (`sys.platform == "win32"`). Tests should guard
+Windows-specific code paths accordingly. Future work may explore TCP loopback or
+named-pipe transports for full parity.
+
+## Error Handling and Validation
+
+- The controller refuses to enter replay without recorded expectations when
+ strict verification is required, ensuring unexpected commands fail fast.
+- Each shim invocation is validated against its matching strategy; mismatches are
+ surfaced immediately with descriptive error messages.
+- Journal eviction and verification are both deterministic so repeated runs yield
+ identical behaviour given the same expectations and inputs.
+
+CmdMox is designed to remain implementation-agnostic at the call site, allowing
+maintainers to evolve the underlying IPC layer or shim mechanism without
+breaking tests that depend on the documented contracts above.
diff --git a/docs/scripting-standards.md b/docs/scripting-standards.md
index 17d177ea..9c4885d9 100644
--- a/docs/scripting-standards.md
+++ b/docs/scripting-standards.md
@@ -11,32 +11,31 @@ as a default.
## Rationale for adopting Cyclopts
-- **Environment‑first configuration without glue.** Cyclopts reads environment
+- Environment‑first configuration without glue. Cyclopts reads environment
variables with a defined prefix (for example, `INPUT_`) and maps them to
parameters directly. Bash argument assembly and bespoke parsing can be
removed.
-- **Typed lists and paths from env.** Parameters annotated as `list[str]` or
+- Typed lists and paths from env. Parameters annotated as `list[str]` or
`list[pathlib.Path]` are populated from whitespace‑ or delimiter‑separated
environment values. Custom split/trim helpers are unnecessary.
-- **Clear precedence model.** CLI flags override environment variables, which
+- Clear precedence model. CLI flags override environment variables, which
override code defaults. Behaviour is predictable in both CI and local runs.
-- **Small API surface.** The API is explicit and integrates cleanly with type
+- Small API surface. The API is explicit and integrates cleanly with type
hints, aiding readability and testing.
-- **Backwards‑compatible migration.** Option aliases and per‑parameter
+- Backwards‑compatible migration. Option aliases and per‑parameter
environment variable names permit preservation of existing interfaces while
removing shell glue.
## Language and runtime
-- Target Python **3.13** for all new scripts. Older versions may be used only
- when integration constraints require them; any exception must be documented
+- Target Python 3.13 for all new scripts. Older versions may only be used when
+ integration constraints require them, and any exception must be documented
inline.
-- Each script starts with a `uv` script block so runtime and dependency
- expectations travel with the file. Prefer the shebang
- `#!/usr/bin/env -S uv run python` followed by the metadata block shown below.
-- External processes are invoked via
- [`plumbum`](https://plumbum.readthedocs.io) to provide structured command
- execution rather than ad‑hoc shell strings.
+- Each script starts with an `uv` script block so runtime and dependency
+ expectations travel with the file. Prefer the shebang `#!/usr/bin/env -S uv
+ run python` followed by the metadata block shown in the example below.
+- External processes are invoked via [`plumbum`](https://plumbum.readthedocs.io)
+ to provide structured command execution rather than ad‑hoc shell strings.
- File‑system interactions use `pathlib.Path`. Higher‑level operations (for
example, copying or removing trees) go through the `shutil` standard library
module.
@@ -140,7 +139,7 @@ if __name__ == "__main__":
app()
```
-**Guidance:**
+Guidance:
- Parameter names should be descriptive and stable. Where a legacy flag name
must remain available, add an alias:
@@ -371,7 +370,7 @@ def test_patch_python_dependency(mocker):
# Example: patch a helper function used by the script
from scripts import helpers
- mocker.patch.object(helpers, "compute_checksum", return_value="deadbeef")
+ mocker.patch_object(helpers, "compute_checksum", return_value="deadbeef")
assert helpers.compute_checksum(b"abc") == "deadbeef"
```
@@ -452,17 +451,17 @@ def test_spy_and_record(cmd_mox, monkeypatch, tmp_path):
## Migration guidance (Typer → Cyclopts)
-1. **Dependencies:** replace Typer with Cyclopts in the script’s `uv` block.
-2. **Entry point:** replace `app = typer.Typer(...)` with `app = App(...)` and
+1. Dependencies: replace Typer with Cyclopts in the script’s `uv` block.
+2. Entry point: replace `app = typer.Typer(...)` with `app = App(...)` and
configure `Env("INPUT_", command=False)` where environment variables are
authoritative in CI.
-3. **Parameters:** replace `typer.Option(...)` with annotations and
+3. Parameters: replace `typer.Option(...)` with annotations and
`Parameter(...)`. Mark required options with `required=True`. Map any
non‑matching environment names via `env_var=...`.
-4. **Lists:** remove custom split/trim code. Use list‑typed parameters; add
+4. Lists: remove custom split/trim code. Use list‑typed parameters; add
`env_var_split=","` where a non‑whitespace delimiter is required.
-5. **Compatibility:** retain legacy flag names using `aliases=["--old-name"]`.
-6. **Bash glue:** delete argument arrays and conditional appends in GitHub
+5. Compatibility: retain legacy flag names using `aliases=["--old-name"]`.
+6. Bash glue: delete argument arrays and conditional appends in GitHub
Actions. Export `INPUT_*` environment variables and call `uv run` on the
script.
@@ -493,7 +492,5 @@ def test_spy_and_record(cmd_mox, monkeypatch, tmp_path):
- On Windows, newline‑separated lists are recommended for `list[Path]` to
sidestep `;`/`:` semantics.
-______________________________________________________________________
-
This document should be referenced when introducing or updating automation
scripts to maintain a consistent developer experience across the repository.
diff --git a/pyproject.toml b/pyproject.toml
index a6ecbf81..6897e907 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -23,8 +23,6 @@ classifiers = [
"Programming Language :: Python :: 3.12",
"Programming Language :: Python :: 3.13",
]
-
-
[dependency-groups]
dev = [
"lxml-stubs>=0.5.1",
@@ -32,6 +30,7 @@ dev = [
"pyyaml>=6.0,<7.0",
"ty>=0.0.1a20",
"uuid6>=2025.0.1",
+ "cmd-mox@git+https://github.com/leynos/cmd-mox.git@baaaf89862837b8a1565fab2c18c34d498e08601",
]
[tool.ruff]
@@ -39,27 +38,27 @@ line-length = 88
[tool.ruff.lint]
select = [
- "F", # Pyflakes rules
- "W", # PyCodeStyle warnings
- "E", # PyCodeStyle errors
- "I", # Sort imports properly
- "UP", # Warn if certain things can changed due to newer Python versions
- "C4", # Catch incorrect use of comprehensions, dict, list, etc
- "FA", # Enforce from __future__ import annotations
- "ISC", # Good use of string concatenation
- "ICN", # Use common import conventions
- "RET", # Good return practices
- "SIM", # Common simplification rules
- "TID", # Some good import practices
- "TC", # Enforce importing certain types in a TYPE_CHECKING block
- "PTH", # Use pathlib instead of os.path
- "TD", # Be diligent with TODO comments
- "A", # detect shadowed builtins
- "BLE", # disallow catch-all exceptions
- "S", # disallow things like "exec"; also restricts "assert" but I just NOQA it when I really need it
- "DTZ", # require strict timezone manipulation with datetime
- "FBT", # detect boolean traps
- "N", # enforce naming conventions, e.g. ClassName vs function_name
+ "F",
+ "W",
+ "E",
+ "I",
+ "UP",
+ "C4",
+ "FA",
+ "ISC",
+ "ICN",
+ "RET",
+ "SIM",
+ "TID",
+ "TC",
+ "PTH",
+ "TD",
+ "A",
+ "BLE",
+ "S",
+ "DTZ",
+ "FBT",
+ "N",
"FURB",
"B",
"RUF",
@@ -125,7 +124,8 @@ typeCheckingMode = "strict"
reportUnknownVariableType = "error"
reportUnknownParameterType = "error"
reportUnknownMemberType = "error"
-reportMissingTypeStubs = "error"
+reportMissingTypeStubs = "warning"
+reportUnusedFunction = "none"
pythonVersion = "3.12"
venvPath = "."
-venv = ".venv"
+venv = ".venv"
\ No newline at end of file
diff --git a/uv.lock b/uv.lock
deleted file mode 100644
index 8cfdbd99..00000000
--- a/uv.lock
+++ /dev/null
@@ -1,375 +0,0 @@
-version = 1
-revision = 2
-requires-python = ">=3.12"
-
-[[package]]
-name = "attrs"
-version = "25.3.0"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/1367933a8532ee6ff8d63537de4f1177af4bff9f3e829baf7331f595bb24/attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b", size = 812032, upload-time = "2025-03-13T11:10:22.779Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/77/06/bb80f5f86020c4551da315d78b3ab75e8228f89f0162f2c3a819e407941a/attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3", size = 63815, upload-time = "2025-03-13T11:10:21.14Z" },
-]
-
-[[package]]
-name = "click"
-version = "8.2.1"
-source = { registry = "https://pypi.org/simple" }
-dependencies = [
- { name = "colorama", marker = "sys_platform == 'win32'" },
-]
-sdist = { url = "https://files.pythonhosted.org/packages/60/6c/8ca2efa64cf75a977a0d7fac081354553ebe483345c734fb6b6515d96bbc/click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202", size = 286342, upload-time = "2025-05-20T23:19:49.832Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/85/32/10bb5764d90a8eee674e9dc6f4db6a0ab47c8c4d0d83c27f7c39ac415a4d/click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b", size = 102215, upload-time = "2025-05-20T23:19:47.796Z" },
-]
-
-[[package]]
-name = "colorama"
-version = "0.4.6"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" },
-]
-
-[[package]]
-name = "cyclopts"
-version = "2.9.9"
-source = { registry = "https://pypi.org/simple" }
-dependencies = [
- { name = "attrs" },
- { name = "docstring-parser", marker = "python_full_version < '4'" },
- { name = "rich" },
- { name = "rich-rst" },
- { name = "typing-extensions", marker = "python_full_version < '4'" },
-]
-sdist = { url = "https://files.pythonhosted.org/packages/61/b6/51022d8b673087262c4bcd1e64c1db3a8ab01510033f7f82a561998e3499/cyclopts-2.9.9.tar.gz", hash = "sha256:11d7bb59be253329ff49a1b9a634676c7ae708605d4975090783b99d081c1a0b", size = 45179, upload-time = "2024-08-27T21:14:47.137Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/08/7c/f1af5b44f581df05009ecc9b15395532fc18c7aa7292b5e98501af25c2db/cyclopts-2.9.9-py3-none-any.whl", hash = "sha256:d0ce956c70f3070e5bc16824ecb5ebba155be45ef4aadbb78ac4753dd99367e3", size = 51062, upload-time = "2024-08-27T21:14:45.977Z" },
-]
-
-[[package]]
-name = "docstring-parser"
-version = "0.17.0"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/b2/9d/c3b43da9515bd270df0f80548d9944e389870713cc1fe2b8fb35fe2bcefd/docstring_parser-0.17.0.tar.gz", hash = "sha256:583de4a309722b3315439bb31d64ba3eebada841f2e2cee23b99df001434c912", size = 27442, upload-time = "2025-07-21T07:35:01.868Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/55/e2/2537ebcff11c1ee1ff17d8d0b6f4db75873e3b0fb32c2d4a2ee31ecb310a/docstring_parser-0.17.0-py3-none-any.whl", hash = "sha256:cf2569abd23dce8099b300f9b4fa8191e9582dda731fd533daf54c4551658708", size = 36896, upload-time = "2025-07-21T07:35:00.684Z" },
-]
-
-[[package]]
-name = "docutils"
-version = "0.22.2"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/4a/c0/89fe6215b443b919cb98a5002e107cb5026854ed1ccb6b5833e0768419d1/docutils-0.22.2.tar.gz", hash = "sha256:9fdb771707c8784c8f2728b67cb2c691305933d68137ef95a75db5f4dfbc213d", size = 2289092, upload-time = "2025-09-20T17:55:47.994Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/66/dd/f95350e853a4468ec37478414fc04ae2d61dad7a947b3015c3dcc51a09b9/docutils-0.22.2-py3-none-any.whl", hash = "sha256:b0e98d679283fc3bb0ead8a5da7f501baa632654e7056e9c5846842213d674d8", size = 632667, upload-time = "2025-09-20T17:55:43.052Z" },
-]
-
-[[package]]
-name = "iniconfig"
-version = "2.1.0"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793, upload-time = "2025-03-19T20:09:59.721Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" },
-]
-
-[[package]]
-name = "lxml"
-version = "5.4.0"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/76/3d/14e82fc7c8fb1b7761f7e748fd47e2ec8276d137b6acfe5a4bb73853e08f/lxml-5.4.0.tar.gz", hash = "sha256:d12832e1dbea4be280b22fd0ea7c9b87f0d8fc51ba06e92dc62d52f804f78ebd", size = 3679479, upload-time = "2025-04-23T01:50:29.322Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/f8/4c/d101ace719ca6a4ec043eb516fcfcb1b396a9fccc4fcd9ef593df34ba0d5/lxml-5.4.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b5aff6f3e818e6bdbbb38e5967520f174b18f539c2b9de867b1e7fde6f8d95a4", size = 8127392, upload-time = "2025-04-23T01:46:04.09Z" },
- { url = "https://files.pythonhosted.org/packages/11/84/beddae0cec4dd9ddf46abf156f0af451c13019a0fa25d7445b655ba5ccb7/lxml-5.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:942a5d73f739ad7c452bf739a62a0f83e2578afd6b8e5406308731f4ce78b16d", size = 4415103, upload-time = "2025-04-23T01:46:07.227Z" },
- { url = "https://files.pythonhosted.org/packages/d0/25/d0d93a4e763f0462cccd2b8a665bf1e4343dd788c76dcfefa289d46a38a9/lxml-5.4.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:460508a4b07364d6abf53acaa0a90b6d370fafde5693ef37602566613a9b0779", size = 5024224, upload-time = "2025-04-23T01:46:10.237Z" },
- { url = "https://files.pythonhosted.org/packages/31/ce/1df18fb8f7946e7f3388af378b1f34fcf253b94b9feedb2cec5969da8012/lxml-5.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:529024ab3a505fed78fe3cc5ddc079464e709f6c892733e3f5842007cec8ac6e", size = 4769913, upload-time = "2025-04-23T01:46:12.757Z" },
- { url = "https://files.pythonhosted.org/packages/4e/62/f4a6c60ae7c40d43657f552f3045df05118636be1165b906d3423790447f/lxml-5.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ca56ebc2c474e8f3d5761debfd9283b8b18c76c4fc0967b74aeafba1f5647f9", size = 5290441, upload-time = "2025-04-23T01:46:16.037Z" },
- { url = "https://files.pythonhosted.org/packages/9e/aa/04f00009e1e3a77838c7fc948f161b5d2d5de1136b2b81c712a263829ea4/lxml-5.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a81e1196f0a5b4167a8dafe3a66aa67c4addac1b22dc47947abd5d5c7a3f24b5", size = 4820165, upload-time = "2025-04-23T01:46:19.137Z" },
- { url = "https://files.pythonhosted.org/packages/c9/1f/e0b2f61fa2404bf0f1fdf1898377e5bd1b74cc9b2cf2c6ba8509b8f27990/lxml-5.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00b8686694423ddae324cf614e1b9659c2edb754de617703c3d29ff568448df5", size = 4932580, upload-time = "2025-04-23T01:46:21.963Z" },
- { url = "https://files.pythonhosted.org/packages/24/a2/8263f351b4ffe0ed3e32ea7b7830f845c795349034f912f490180d88a877/lxml-5.4.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:c5681160758d3f6ac5b4fea370495c48aac0989d6a0f01bb9a72ad8ef5ab75c4", size = 4759493, upload-time = "2025-04-23T01:46:24.316Z" },
- { url = "https://files.pythonhosted.org/packages/05/00/41db052f279995c0e35c79d0f0fc9f8122d5b5e9630139c592a0b58c71b4/lxml-5.4.0-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:2dc191e60425ad70e75a68c9fd90ab284df64d9cd410ba8d2b641c0c45bc006e", size = 5324679, upload-time = "2025-04-23T01:46:27.097Z" },
- { url = "https://files.pythonhosted.org/packages/1d/be/ee99e6314cdef4587617d3b3b745f9356d9b7dd12a9663c5f3b5734b64ba/lxml-5.4.0-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:67f779374c6b9753ae0a0195a892a1c234ce8416e4448fe1e9f34746482070a7", size = 4890691, upload-time = "2025-04-23T01:46:30.009Z" },
- { url = "https://files.pythonhosted.org/packages/ad/36/239820114bf1d71f38f12208b9c58dec033cbcf80101cde006b9bde5cffd/lxml-5.4.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:79d5bfa9c1b455336f52343130b2067164040604e41f6dc4d8313867ed540079", size = 4955075, upload-time = "2025-04-23T01:46:32.33Z" },
- { url = "https://files.pythonhosted.org/packages/d4/e1/1b795cc0b174efc9e13dbd078a9ff79a58728a033142bc6d70a1ee8fc34d/lxml-5.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3d3c30ba1c9b48c68489dc1829a6eede9873f52edca1dda900066542528d6b20", size = 4838680, upload-time = "2025-04-23T01:46:34.852Z" },
- { url = "https://files.pythonhosted.org/packages/72/48/3c198455ca108cec5ae3662ae8acd7fd99476812fd712bb17f1b39a0b589/lxml-5.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:1af80c6316ae68aded77e91cd9d80648f7dd40406cef73df841aa3c36f6907c8", size = 5391253, upload-time = "2025-04-23T01:46:37.608Z" },
- { url = "https://files.pythonhosted.org/packages/d6/10/5bf51858971c51ec96cfc13e800a9951f3fd501686f4c18d7d84fe2d6352/lxml-5.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:4d885698f5019abe0de3d352caf9466d5de2baded00a06ef3f1216c1a58ae78f", size = 5261651, upload-time = "2025-04-23T01:46:40.183Z" },
- { url = "https://files.pythonhosted.org/packages/2b/11/06710dd809205377da380546f91d2ac94bad9ff735a72b64ec029f706c85/lxml-5.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:aea53d51859b6c64e7c51d522c03cc2c48b9b5d6172126854cc7f01aa11f52bc", size = 5024315, upload-time = "2025-04-23T01:46:43.333Z" },
- { url = "https://files.pythonhosted.org/packages/f5/b0/15b6217834b5e3a59ebf7f53125e08e318030e8cc0d7310355e6edac98ef/lxml-5.4.0-cp312-cp312-win32.whl", hash = "sha256:d90b729fd2732df28130c064aac9bb8aff14ba20baa4aee7bd0795ff1187545f", size = 3486149, upload-time = "2025-04-23T01:46:45.684Z" },
- { url = "https://files.pythonhosted.org/packages/91/1e/05ddcb57ad2f3069101611bd5f5084157d90861a2ef460bf42f45cced944/lxml-5.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:1dc4ca99e89c335a7ed47d38964abcb36c5910790f9bd106f2a8fa2ee0b909d2", size = 3817095, upload-time = "2025-04-23T01:46:48.521Z" },
- { url = "https://files.pythonhosted.org/packages/87/cb/2ba1e9dd953415f58548506fa5549a7f373ae55e80c61c9041b7fd09a38a/lxml-5.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:773e27b62920199c6197130632c18fb7ead3257fce1ffb7d286912e56ddb79e0", size = 8110086, upload-time = "2025-04-23T01:46:52.218Z" },
- { url = "https://files.pythonhosted.org/packages/b5/3e/6602a4dca3ae344e8609914d6ab22e52ce42e3e1638c10967568c5c1450d/lxml-5.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ce9c671845de9699904b1e9df95acfe8dfc183f2310f163cdaa91a3535af95de", size = 4404613, upload-time = "2025-04-23T01:46:55.281Z" },
- { url = "https://files.pythonhosted.org/packages/4c/72/bf00988477d3bb452bef9436e45aeea82bb40cdfb4684b83c967c53909c7/lxml-5.4.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9454b8d8200ec99a224df8854786262b1bd6461f4280064c807303c642c05e76", size = 5012008, upload-time = "2025-04-23T01:46:57.817Z" },
- { url = "https://files.pythonhosted.org/packages/92/1f/93e42d93e9e7a44b2d3354c462cd784dbaaf350f7976b5d7c3f85d68d1b1/lxml-5.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cccd007d5c95279e529c146d095f1d39ac05139de26c098166c4beb9374b0f4d", size = 4760915, upload-time = "2025-04-23T01:47:00.745Z" },
- { url = "https://files.pythonhosted.org/packages/45/0b/363009390d0b461cf9976a499e83b68f792e4c32ecef092f3f9ef9c4ba54/lxml-5.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0fce1294a0497edb034cb416ad3e77ecc89b313cff7adbee5334e4dc0d11f422", size = 5283890, upload-time = "2025-04-23T01:47:04.702Z" },
- { url = "https://files.pythonhosted.org/packages/19/dc/6056c332f9378ab476c88e301e6549a0454dbee8f0ae16847414f0eccb74/lxml-5.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:24974f774f3a78ac12b95e3a20ef0931795ff04dbb16db81a90c37f589819551", size = 4812644, upload-time = "2025-04-23T01:47:07.833Z" },
- { url = "https://files.pythonhosted.org/packages/ee/8a/f8c66bbb23ecb9048a46a5ef9b495fd23f7543df642dabeebcb2eeb66592/lxml-5.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:497cab4d8254c2a90bf988f162ace2ddbfdd806fce3bda3f581b9d24c852e03c", size = 4921817, upload-time = "2025-04-23T01:47:10.317Z" },
- { url = "https://files.pythonhosted.org/packages/04/57/2e537083c3f381f83d05d9b176f0d838a9e8961f7ed8ddce3f0217179ce3/lxml-5.4.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:e794f698ae4c5084414efea0f5cc9f4ac562ec02d66e1484ff822ef97c2cadff", size = 4753916, upload-time = "2025-04-23T01:47:12.823Z" },
- { url = "https://files.pythonhosted.org/packages/d8/80/ea8c4072109a350848f1157ce83ccd9439601274035cd045ac31f47f3417/lxml-5.4.0-cp313-cp313-manylinux_2_28_ppc64le.whl", hash = "sha256:2c62891b1ea3094bb12097822b3d44b93fc6c325f2043c4d2736a8ff09e65f60", size = 5289274, upload-time = "2025-04-23T01:47:15.916Z" },
- { url = "https://files.pythonhosted.org/packages/b3/47/c4be287c48cdc304483457878a3f22999098b9a95f455e3c4bda7ec7fc72/lxml-5.4.0-cp313-cp313-manylinux_2_28_s390x.whl", hash = "sha256:142accb3e4d1edae4b392bd165a9abdee8a3c432a2cca193df995bc3886249c8", size = 4874757, upload-time = "2025-04-23T01:47:19.793Z" },
- { url = "https://files.pythonhosted.org/packages/2f/04/6ef935dc74e729932e39478e44d8cfe6a83550552eaa072b7c05f6f22488/lxml-5.4.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:1a42b3a19346e5601d1b8296ff6ef3d76038058f311902edd574461e9c036982", size = 4947028, upload-time = "2025-04-23T01:47:22.401Z" },
- { url = "https://files.pythonhosted.org/packages/cb/f9/c33fc8daa373ef8a7daddb53175289024512b6619bc9de36d77dca3df44b/lxml-5.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4291d3c409a17febf817259cb37bc62cb7eb398bcc95c1356947e2871911ae61", size = 4834487, upload-time = "2025-04-23T01:47:25.513Z" },
- { url = "https://files.pythonhosted.org/packages/8d/30/fc92bb595bcb878311e01b418b57d13900f84c2b94f6eca9e5073ea756e6/lxml-5.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4f5322cf38fe0e21c2d73901abf68e6329dc02a4994e483adbcf92b568a09a54", size = 5381688, upload-time = "2025-04-23T01:47:28.454Z" },
- { url = "https://files.pythonhosted.org/packages/43/d1/3ba7bd978ce28bba8e3da2c2e9d5ae3f8f521ad3f0ca6ea4788d086ba00d/lxml-5.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:0be91891bdb06ebe65122aa6bf3fc94489960cf7e03033c6f83a90863b23c58b", size = 5242043, upload-time = "2025-04-23T01:47:31.208Z" },
- { url = "https://files.pythonhosted.org/packages/ee/cd/95fa2201041a610c4d08ddaf31d43b98ecc4b1d74b1e7245b1abdab443cb/lxml-5.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:15a665ad90054a3d4f397bc40f73948d48e36e4c09f9bcffc7d90c87410e478a", size = 5021569, upload-time = "2025-04-23T01:47:33.805Z" },
- { url = "https://files.pythonhosted.org/packages/2d/a6/31da006fead660b9512d08d23d31e93ad3477dd47cc42e3285f143443176/lxml-5.4.0-cp313-cp313-win32.whl", hash = "sha256:d5663bc1b471c79f5c833cffbc9b87d7bf13f87e055a5c86c363ccd2348d7e82", size = 3485270, upload-time = "2025-04-23T01:47:36.133Z" },
- { url = "https://files.pythonhosted.org/packages/fc/14/c115516c62a7d2499781d2d3d7215218c0731b2c940753bf9f9b7b73924d/lxml-5.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:bcb7a1096b4b6b24ce1ac24d4942ad98f983cd3810f9711bcd0293f43a9d8b9f", size = 3814606, upload-time = "2025-04-23T01:47:39.028Z" },
-]
-
-[[package]]
-name = "lxml-stubs"
-version = "0.5.1"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/99/da/1a3a3e5d159b249fc2970d73437496b908de8e4716a089c69591b4ffa6fd/lxml-stubs-0.5.1.tar.gz", hash = "sha256:e0ec2aa1ce92d91278b719091ce4515c12adc1d564359dfaf81efa7d4feab79d", size = 14778, upload-time = "2024-01-10T09:37:46.521Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/1f/c9/e0f8e4e6e8a69e5959b06499582dca6349db6769cc7fdfb8a02a7c75a9ae/lxml_stubs-0.5.1-py3-none-any.whl", hash = "sha256:1f689e5dbc4b9247cb09ae820c7d34daeb1fdbd1db06123814b856dae7787272", size = 13584, upload-time = "2024-01-10T09:37:44.931Z" },
-]
-
-[[package]]
-name = "markdown-it-py"
-version = "4.0.0"
-source = { registry = "https://pypi.org/simple" }
-dependencies = [
- { name = "mdurl" },
-]
-sdist = { url = "https://files.pythonhosted.org/packages/5b/f5/4ec618ed16cc4f8fb3b701563655a69816155e79e24a17b651541804721d/markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3", size = 73070, upload-time = "2025-08-11T12:57:52.854Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321, upload-time = "2025-08-11T12:57:51.923Z" },
-]
-
-[[package]]
-name = "mdurl"
-version = "0.1.2"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload-time = "2022-08-14T12:40:10.846Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" },
-]
-
-[[package]]
-name = "packaging"
-version = "25.0"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" },
-]
-
-[[package]]
-name = "pluggy"
-version = "1.6.0"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" },
-]
-
-[[package]]
-name = "plumbum"
-version = "1.9.0"
-source = { registry = "https://pypi.org/simple" }
-dependencies = [
- { name = "pywin32", marker = "platform_python_implementation != 'PyPy' and sys_platform == 'win32'" },
-]
-sdist = { url = "https://files.pythonhosted.org/packages/f0/5d/49ba324ad4ae5b1a4caefafbce7a1648540129344481f2ed4ef6bb68d451/plumbum-1.9.0.tar.gz", hash = "sha256:e640062b72642c3873bd5bdc3effed75ba4d3c70ef6b6a7b907357a84d909219", size = 319083, upload-time = "2024-10-05T05:59:27.059Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/4f/9d/d03542c93bb3d448406731b80f39c3d5601282f778328c22c77d270f4ed4/plumbum-1.9.0-py3-none-any.whl", hash = "sha256:9fd0d3b0e8d86e4b581af36edf3f3bbe9d1ae15b45b8caab28de1bcb27aaa7f5", size = 127970, upload-time = "2024-10-05T05:59:25.102Z" },
-]
-
-[[package]]
-name = "pygments"
-version = "2.19.2"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" },
-]
-
-[[package]]
-name = "pytest"
-version = "8.4.2"
-source = { registry = "https://pypi.org/simple" }
-dependencies = [
- { name = "colorama", marker = "sys_platform == 'win32'" },
- { name = "iniconfig" },
- { name = "packaging" },
- { name = "pluggy" },
- { name = "pygments" },
-]
-sdist = { url = "https://files.pythonhosted.org/packages/a3/5c/00a0e072241553e1a7496d638deababa67c5058571567b92a7eaa258397c/pytest-8.4.2.tar.gz", hash = "sha256:86c0d0b93306b961d58d62a4db4879f27fe25513d4b969df351abdddb3c30e01", size = 1519618, upload-time = "2025-09-04T14:34:22.711Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/a8/a4/20da314d277121d6534b3a980b29035dcd51e6744bd79075a6ce8fa4eb8d/pytest-8.4.2-py3-none-any.whl", hash = "sha256:872f880de3fc3a5bdc88a11b39c9710c3497a547cfa9320bc3c5e62fbf272e79", size = 365750, upload-time = "2025-09-04T14:34:20.226Z" },
-]
-
-[[package]]
-name = "pywin32"
-version = "311"
-source = { registry = "https://pypi.org/simple" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/e7/ab/01ea1943d4eba0f850c3c61e78e8dd59757ff815ff3ccd0a84de5f541f42/pywin32-311-cp312-cp312-win32.whl", hash = "sha256:750ec6e621af2b948540032557b10a2d43b0cee2ae9758c54154d711cc852d31", size = 8706543, upload-time = "2025-07-14T20:13:20.765Z" },
- { url = "https://files.pythonhosted.org/packages/d1/a8/a0e8d07d4d051ec7502cd58b291ec98dcc0c3fff027caad0470b72cfcc2f/pywin32-311-cp312-cp312-win_amd64.whl", hash = "sha256:b8c095edad5c211ff31c05223658e71bf7116daa0ecf3ad85f3201ea3190d067", size = 9495040, upload-time = "2025-07-14T20:13:22.543Z" },
- { url = "https://files.pythonhosted.org/packages/ba/3a/2ae996277b4b50f17d61f0603efd8253cb2d79cc7ae159468007b586396d/pywin32-311-cp312-cp312-win_arm64.whl", hash = "sha256:e286f46a9a39c4a18b319c28f59b61de793654af2f395c102b4f819e584b5852", size = 8710102, upload-time = "2025-07-14T20:13:24.682Z" },
- { url = "https://files.pythonhosted.org/packages/a5/be/3fd5de0979fcb3994bfee0d65ed8ca9506a8a1260651b86174f6a86f52b3/pywin32-311-cp313-cp313-win32.whl", hash = "sha256:f95ba5a847cba10dd8c4d8fefa9f2a6cf283b8b88ed6178fa8a6c1ab16054d0d", size = 8705700, upload-time = "2025-07-14T20:13:26.471Z" },
- { url = "https://files.pythonhosted.org/packages/e3/28/e0a1909523c6890208295a29e05c2adb2126364e289826c0a8bc7297bd5c/pywin32-311-cp313-cp313-win_amd64.whl", hash = "sha256:718a38f7e5b058e76aee1c56ddd06908116d35147e133427e59a3983f703a20d", size = 9494700, upload-time = "2025-07-14T20:13:28.243Z" },
- { url = "https://files.pythonhosted.org/packages/04/bf/90339ac0f55726dce7d794e6d79a18a91265bdf3aa70b6b9ca52f35e022a/pywin32-311-cp313-cp313-win_arm64.whl", hash = "sha256:7b4075d959648406202d92a2310cb990fea19b535c7f4a78d3f5e10b926eeb8a", size = 8709318, upload-time = "2025-07-14T20:13:30.348Z" },
- { url = "https://files.pythonhosted.org/packages/c9/31/097f2e132c4f16d99a22bfb777e0fd88bd8e1c634304e102f313af69ace5/pywin32-311-cp314-cp314-win32.whl", hash = "sha256:b7a2c10b93f8986666d0c803ee19b5990885872a7de910fc460f9b0c2fbf92ee", size = 8840714, upload-time = "2025-07-14T20:13:32.449Z" },
- { url = "https://files.pythonhosted.org/packages/90/4b/07c77d8ba0e01349358082713400435347df8426208171ce297da32c313d/pywin32-311-cp314-cp314-win_amd64.whl", hash = "sha256:3aca44c046bd2ed8c90de9cb8427f581c479e594e99b5c0bb19b29c10fd6cb87", size = 9656800, upload-time = "2025-07-14T20:13:34.312Z" },
- { url = "https://files.pythonhosted.org/packages/c0/d2/21af5c535501a7233e734b8af901574572da66fcc254cb35d0609c9080dd/pywin32-311-cp314-cp314-win_arm64.whl", hash = "sha256:a508e2d9025764a8270f93111a970e1d0fbfc33f4153b388bb649b7eec4f9b42", size = 8932540, upload-time = "2025-07-14T20:13:36.379Z" },
-]
-
-[[package]]
-name = "pyyaml"
-version = "6.0.2"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631, upload-time = "2024-08-06T20:33:50.674Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/86/0c/c581167fc46d6d6d7ddcfb8c843a4de25bdd27e4466938109ca68492292c/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", size = 183873, upload-time = "2024-08-06T20:32:25.131Z" },
- { url = "https://files.pythonhosted.org/packages/a8/0c/38374f5bb272c051e2a69281d71cba6fdb983413e6758b84482905e29a5d/PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", size = 173302, upload-time = "2024-08-06T20:32:26.511Z" },
- { url = "https://files.pythonhosted.org/packages/c3/93/9916574aa8c00aa06bbac729972eb1071d002b8e158bd0e83a3b9a20a1f7/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", size = 739154, upload-time = "2024-08-06T20:32:28.363Z" },
- { url = "https://files.pythonhosted.org/packages/95/0f/b8938f1cbd09739c6da569d172531567dbcc9789e0029aa070856f123984/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425", size = 766223, upload-time = "2024-08-06T20:32:30.058Z" },
- { url = "https://files.pythonhosted.org/packages/b9/2b/614b4752f2e127db5cc206abc23a8c19678e92b23c3db30fc86ab731d3bd/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476", size = 767542, upload-time = "2024-08-06T20:32:31.881Z" },
- { url = "https://files.pythonhosted.org/packages/d4/00/dd137d5bcc7efea1836d6264f049359861cf548469d18da90cd8216cf05f/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48", size = 731164, upload-time = "2024-08-06T20:32:37.083Z" },
- { url = "https://files.pythonhosted.org/packages/c9/1f/4f998c900485e5c0ef43838363ba4a9723ac0ad73a9dc42068b12aaba4e4/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b", size = 756611, upload-time = "2024-08-06T20:32:38.898Z" },
- { url = "https://files.pythonhosted.org/packages/df/d1/f5a275fdb252768b7a11ec63585bc38d0e87c9e05668a139fea92b80634c/PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4", size = 140591, upload-time = "2024-08-06T20:32:40.241Z" },
- { url = "https://files.pythonhosted.org/packages/0c/e8/4f648c598b17c3d06e8753d7d13d57542b30d56e6c2dedf9c331ae56312e/PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8", size = 156338, upload-time = "2024-08-06T20:32:41.93Z" },
- { url = "https://files.pythonhosted.org/packages/ef/e3/3af305b830494fa85d95f6d95ef7fa73f2ee1cc8ef5b495c7c3269fb835f/PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba", size = 181309, upload-time = "2024-08-06T20:32:43.4Z" },
- { url = "https://files.pythonhosted.org/packages/45/9f/3b1c20a0b7a3200524eb0076cc027a970d320bd3a6592873c85c92a08731/PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1", size = 171679, upload-time = "2024-08-06T20:32:44.801Z" },
- { url = "https://files.pythonhosted.org/packages/7c/9a/337322f27005c33bcb656c655fa78325b730324c78620e8328ae28b64d0c/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133", size = 733428, upload-time = "2024-08-06T20:32:46.432Z" },
- { url = "https://files.pythonhosted.org/packages/a3/69/864fbe19e6c18ea3cc196cbe5d392175b4cf3d5d0ac1403ec3f2d237ebb5/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484", size = 763361, upload-time = "2024-08-06T20:32:51.188Z" },
- { url = "https://files.pythonhosted.org/packages/04/24/b7721e4845c2f162d26f50521b825fb061bc0a5afcf9a386840f23ea19fa/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5", size = 759523, upload-time = "2024-08-06T20:32:53.019Z" },
- { url = "https://files.pythonhosted.org/packages/2b/b2/e3234f59ba06559c6ff63c4e10baea10e5e7df868092bf9ab40e5b9c56b6/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc", size = 726660, upload-time = "2024-08-06T20:32:54.708Z" },
- { url = "https://files.pythonhosted.org/packages/fe/0f/25911a9f080464c59fab9027482f822b86bf0608957a5fcc6eaac85aa515/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652", size = 751597, upload-time = "2024-08-06T20:32:56.985Z" },
- { url = "https://files.pythonhosted.org/packages/14/0d/e2c3b43bbce3cf6bd97c840b46088a3031085179e596d4929729d8d68270/PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", size = 140527, upload-time = "2024-08-06T20:33:03.001Z" },
- { url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446, upload-time = "2024-08-06T20:33:04.33Z" },
-]
-
-[[package]]
-name = "rich"
-version = "14.1.0"
-source = { registry = "https://pypi.org/simple" }
-dependencies = [
- { name = "markdown-it-py" },
- { name = "pygments" },
-]
-sdist = { url = "https://files.pythonhosted.org/packages/fe/75/af448d8e52bf1d8fa6a9d089ca6c07ff4453d86c65c145d0a300bb073b9b/rich-14.1.0.tar.gz", hash = "sha256:e497a48b844b0320d45007cdebfeaeed8db2a4f4bcf49f15e455cfc4af11eaa8", size = 224441, upload-time = "2025-07-25T07:32:58.125Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/e3/30/3c4d035596d3cf444529e0b2953ad0466f6049528a879d27534700580395/rich-14.1.0-py3-none-any.whl", hash = "sha256:536f5f1785986d6dbdea3c75205c473f970777b4a0d6c6dd1b696aa05a3fa04f", size = 243368, upload-time = "2025-07-25T07:32:56.73Z" },
-]
-
-[[package]]
-name = "rich-rst"
-version = "1.3.1"
-source = { registry = "https://pypi.org/simple" }
-dependencies = [
- { name = "docutils" },
- { name = "rich" },
-]
-sdist = { url = "https://files.pythonhosted.org/packages/b0/69/5514c3a87b5f10f09a34bb011bc0927bc12c596c8dae5915604e71abc386/rich_rst-1.3.1.tar.gz", hash = "sha256:fad46e3ba42785ea8c1785e2ceaa56e0ffa32dbe5410dec432f37e4107c4f383", size = 13839, upload-time = "2024-04-30T04:40:38.125Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/fd/bc/cc4e3dbc5e7992398dcb7a8eda0cbcf4fb792a0cdb93f857b478bf3cf884/rich_rst-1.3.1-py3-none-any.whl", hash = "sha256:498a74e3896507ab04492d326e794c3ef76e7cda078703aa592d1853d91098c1", size = 11621, upload-time = "2024-04-30T04:40:32.619Z" },
-]
-
-[[package]]
-name = "shared-actions"
-version = "1.2.2"
-source = { editable = "." }
-dependencies = [
- { name = "cyclopts" },
- { name = "lxml" },
- { name = "plumbum" },
- { name = "typer" },
-]
-
-[package.dev-dependencies]
-dev = [
- { name = "lxml-stubs" },
- { name = "pytest" },
- { name = "pyyaml" },
- { name = "ty" },
- { name = "uuid6" },
-]
-
-[package.metadata]
-requires-dist = [
- { name = "cyclopts", specifier = ">=2.9,<3.0" },
- { name = "lxml", specifier = ">=5.2,<6.0" },
- { name = "plumbum", specifier = ">=1.8,<2.0" },
- { name = "typer", specifier = ">=0.9,<1.0" },
-]
-
-[package.metadata.requires-dev]
-dev = [
- { name = "lxml-stubs", specifier = ">=0.5.1" },
- { name = "pytest", specifier = ">=8.0,<9.0" },
- { name = "pyyaml", specifier = ">=6.0,<7.0" },
- { name = "ty", specifier = ">=0.0.1a20" },
- { name = "uuid6", specifier = ">=2025.0.1" },
-]
-
-[[package]]
-name = "shellingham"
-version = "1.5.4"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/58/15/8b3609fd3830ef7b27b655beb4b4e9c62313a4e8da8c676e142cc210d58e/shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de", size = 10310, upload-time = "2023-10-24T04:13:40.426Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755, upload-time = "2023-10-24T04:13:38.866Z" },
-]
-
-[[package]]
-name = "ty"
-version = "0.0.1a20"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/7a/82/a5e3b4bc5280ec49c4b0b43d0ff727d58c7df128752c9c6f97ad0b5f575f/ty-0.0.1a20.tar.gz", hash = "sha256:933b65a152f277aa0e23ba9027e5df2c2cc09e18293e87f2a918658634db5f15", size = 4194773, upload-time = "2025-09-03T12:35:46.775Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/45/c8/f7d39392043d5c04936f6cad90e50eb661965ed092ca4bfc01db917d7b8a/ty-0.0.1a20-py3-none-linux_armv6l.whl", hash = "sha256:f73a7aca1f0d38af4d6999b375eb00553f3bfcba102ae976756cc142e14f3450", size = 8443599, upload-time = "2025-09-03T12:35:04.289Z" },
- { url = "https://files.pythonhosted.org/packages/1e/57/5aec78f9b8a677b7439ccded7d66c3361e61247e0f6b14e659b00dd01008/ty-0.0.1a20-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:cad12c857ea4b97bf61e02f6796e13061ccca5e41f054cbd657862d80aa43bae", size = 8618102, upload-time = "2025-09-03T12:35:07.448Z" },
- { url = "https://files.pythonhosted.org/packages/15/20/50c9107d93cdb55676473d9dc4e2339af6af606660c9428d3b86a1b2a476/ty-0.0.1a20-py3-none-macosx_11_0_arm64.whl", hash = "sha256:f153b65c7fcb6b8b59547ddb6353761b3e8d8bb6f0edd15e3e3ac14405949f7a", size = 8192167, upload-time = "2025-09-03T12:35:09.706Z" },
- { url = "https://files.pythonhosted.org/packages/85/28/018b2f330109cee19e81c5ca9df3dc29f06c5778440eb9af05d4550c4302/ty-0.0.1a20-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8c4336987a6a781d4392a9fd7b3a39edb7e4f3dd4f860e03f46c932b52aefa2", size = 8349256, upload-time = "2025-09-03T12:35:11.76Z" },
- { url = "https://files.pythonhosted.org/packages/cd/c9/2f8797a05587158f52b142278796ffd72c893bc5ad41840fce5aeb65c6f2/ty-0.0.1a20-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3ff75cd4c744d09914e8c9db8d99e02f82c9379ad56b0a3fc4c5c9c923cfa84e", size = 8271214, upload-time = "2025-09-03T12:35:13.741Z" },
- { url = "https://files.pythonhosted.org/packages/30/d4/2cac5e5eb9ee51941358cb3139aadadb59520cfaec94e4fcd2b166969748/ty-0.0.1a20-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e26437772be7f7808868701f2bf9e14e706a6ec4c7d02dbd377ff94d7ba60c11", size = 9264939, upload-time = "2025-09-03T12:35:16.896Z" },
- { url = "https://files.pythonhosted.org/packages/93/96/a6f2b54e484b2c6a5488f217882237dbdf10f0fdbdb6cd31333d57afe494/ty-0.0.1a20-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:83a7ee12465841619b5eb3ca962ffc7d576bb1c1ac812638681aee241acbfbbe", size = 9743137, upload-time = "2025-09-03T12:35:19.799Z" },
- { url = "https://files.pythonhosted.org/packages/6e/67/95b40dcbec3d222f3af5fe5dd1ce066d42f8a25a2f70d5724490457048e7/ty-0.0.1a20-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:726d0738be4459ac7ffae312ba96c5f486d6cbc082723f322555d7cba9397871", size = 9368153, upload-time = "2025-09-03T12:35:22.569Z" },
- { url = "https://files.pythonhosted.org/packages/2c/24/689fa4c4270b9ef9a53dc2b1d6ffade259ba2c4127e451f0629e130ea46a/ty-0.0.1a20-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0b481f26513f38543df514189fb16744690bcba8d23afee95a01927d93b46e36", size = 9099637, upload-time = "2025-09-03T12:35:24.94Z" },
- { url = "https://files.pythonhosted.org/packages/a1/5b/913011cbf3ea4030097fb3c4ce751856114c9e1a5e1075561a4c5242af9b/ty-0.0.1a20-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7abbe3c02218c12228b1d7c5f98c57240029cc3bcb15b6997b707c19be3908c1", size = 8952000, upload-time = "2025-09-03T12:35:27.288Z" },
- { url = "https://files.pythonhosted.org/packages/df/f9/f5ba2ae455b20c5bb003f9940ef8142a8c4ed9e27de16e8f7472013609db/ty-0.0.1a20-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:fff51c75ee3f7cc6d7722f2f15789ef8ffe6fd2af70e7269ac785763c906688e", size = 8217938, upload-time = "2025-09-03T12:35:29.54Z" },
- { url = "https://files.pythonhosted.org/packages/eb/62/17002cf9032f0981cdb8c898d02422c095c30eefd69ca62a8b705d15bd0f/ty-0.0.1a20-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:b4124ab75e0e6f09fe7bc9df4a77ee43c5e0ef7e61b0c149d7c089d971437cbd", size = 8292369, upload-time = "2025-09-03T12:35:31.748Z" },
- { url = "https://files.pythonhosted.org/packages/28/d6/0879b1fb66afe1d01d45c7658f3849aa641ac4ea10679404094f3b40053e/ty-0.0.1a20-py3-none-musllinux_1_2_i686.whl", hash = "sha256:8a138fa4f74e6ed34e9fd14652d132409700c7ff57682c2fed656109ebfba42f", size = 8811973, upload-time = "2025-09-03T12:35:33.997Z" },
- { url = "https://files.pythonhosted.org/packages/60/1e/70bf0348cfe8ba5f7532983f53c508c293ddf5fa9f942ed79a3c4d576df3/ty-0.0.1a20-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:8eff8871d6b88d150e2a67beba2c57048f20c090c219f38ed02eebaada04c124", size = 9010990, upload-time = "2025-09-03T12:35:36.766Z" },
- { url = "https://files.pythonhosted.org/packages/b7/ca/03d85c7650359247b1ca3f38a0d869a608ef540450151920e7014ed58292/ty-0.0.1a20-py3-none-win32.whl", hash = "sha256:3c2ace3a22fab4bd79f84c74e3dab26e798bfba7006bea4008d6321c1bd6efc6", size = 8100746, upload-time = "2025-09-03T12:35:40.007Z" },
- { url = "https://files.pythonhosted.org/packages/94/53/7a1937b8c7a66d0c8ed7493de49ed454a850396fe137d2ae12ed247e0b2f/ty-0.0.1a20-py3-none-win_amd64.whl", hash = "sha256:f41e77ff118da3385915e13c3f366b3a2f823461de54abd2e0ca72b170ba0f19", size = 8748861, upload-time = "2025-09-03T12:35:42.175Z" },
- { url = "https://files.pythonhosted.org/packages/27/36/5a3a70c5d497d3332f9e63cabc9c6f13484783b832fecc393f4f1c0c4aa8/ty-0.0.1a20-py3-none-win_arm64.whl", hash = "sha256:d8ac1c5a14cda5fad1a8b53959d9a5d979fe16ce1cc2785ea8676fed143ac85f", size = 8269906, upload-time = "2025-09-03T12:35:45.045Z" },
-]
-
-[[package]]
-name = "typer"
-version = "0.17.4"
-source = { registry = "https://pypi.org/simple" }
-dependencies = [
- { name = "click" },
- { name = "rich" },
- { name = "shellingham" },
- { name = "typing-extensions" },
-]
-sdist = { url = "https://files.pythonhosted.org/packages/92/e8/2a73ccf9874ec4c7638f172efc8972ceab13a0e3480b389d6ed822f7a822/typer-0.17.4.tar.gz", hash = "sha256:b77dc07d849312fd2bb5e7f20a7af8985c7ec360c45b051ed5412f64d8dc1580", size = 103734, upload-time = "2025-09-05T18:14:40.746Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/93/72/6b3e70d32e89a5cbb6a4513726c1ae8762165b027af569289e19ec08edd8/typer-0.17.4-py3-none-any.whl", hash = "sha256:015534a6edaa450e7007eba705d5c18c3349dcea50a6ad79a5ed530967575824", size = 46643, upload-time = "2025-09-05T18:14:39.166Z" },
-]
-
-[[package]]
-name = "typing-extensions"
-version = "4.15.0"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" },
-]
-
-[[package]]
-name = "uuid6"
-version = "2025.0.1"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/ca/b7/4c0f736ca824b3a25b15e8213d1bcfc15f8ac2ae48d1b445b310892dc4da/uuid6-2025.0.1.tar.gz", hash = "sha256:cd0af94fa428675a44e32c5319ec5a3485225ba2179eefcf4c3f205ae30a81bd", size = 13932, upload-time = "2025-07-04T18:30:35.186Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/3d/b2/93faaab7962e2aa8d6e174afb6f76be2ca0ce89fde14d3af835acebcaa59/uuid6-2025.0.1-py3-none-any.whl", hash = "sha256:80530ce4d02a93cdf82e7122ca0da3ebbbc269790ec1cb902481fa3e9cc9ff99", size = 6979, upload-time = "2025-07-04T18:30:34.001Z" },
-]