From 6d0312194501f14692b1eb081ebe8f87d62004c2 Mon Sep 17 00:00:00 2001 From: pmalarme Date: Wed, 18 Feb 2026 16:05:41 +0100 Subject: [PATCH 01/90] feat: improve security posture and documentation --- .gitattributes | 1 + .github/agents/agentic-workflows.agent.md | 143 ++ .github/agents/security-reviewer.agent.md | 155 ++ .github/aw/actions-lock.json | 14 + .../agentic-workflows.instructions.md | 70 + .../copilot-agents.instructions.md | 66 + .github/workflows/checks.yml | 41 + .github/workflows/codeql-analysis.yml | 3 +- .github/workflows/docs.yml | 63 + .github/workflows/release.yml | 41 + .github/workflows/security-review.lock.yml | 1191 ++++++++++++++ .github/workflows/security-review.md | 78 + .gitignore | 1 - CODING_STANDARDS.md | 195 ++- DEVELOPMENT.md | 130 +- README.md | 689 ++++++-- .../agents/agent1/__init__.py | 4 +- .../agents/agent1/agent.py | 13 +- .../agents/agent1/validators/__init__.py | 5 + .../validators/blank_string_validator.py | 106 ++ .../agent1/validators/errors/__init__.py | 13 + .../validators/errors/empty_string_error.py | 12 + .../errors/missing_parameter_error.py | 13 + .../errors/none_not_allowed_error.py | 14 + .../validators/errors/string_type_error.py | 14 + agents/agent1/tests/test_agent.py | 11 +- .../tests/test_blank_string_validator.py | 71 + docs/manual/agent-guide-template.md | 13 + pyproject.toml | 23 +- shared_tasks.toml | 4 - uv.lock | 1448 +++++++++++++++++ 31 files changed, 4424 insertions(+), 221 deletions(-) create mode 100644 .gitattributes create mode 100644 .github/agents/agentic-workflows.agent.md create mode 100644 .github/agents/security-reviewer.agent.md create mode 100644 .github/aw/actions-lock.json create mode 100644 .github/instructions/agentic-workflows.instructions.md create mode 100644 .github/instructions/copilot-agents.instructions.md create mode 100644 .github/workflows/checks.yml create mode 100644 .github/workflows/docs.yml create mode 100644 .github/workflows/release.yml create mode 100644 .github/workflows/security-review.lock.yml create mode 100644 .github/workflows/security-review.md create mode 100644 agents/agent1/src/python_agent_template/agents/agent1/validators/__init__.py create mode 100644 agents/agent1/src/python_agent_template/agents/agent1/validators/blank_string_validator.py create mode 100644 agents/agent1/src/python_agent_template/agents/agent1/validators/errors/__init__.py create mode 100644 agents/agent1/src/python_agent_template/agents/agent1/validators/errors/empty_string_error.py create mode 100644 agents/agent1/src/python_agent_template/agents/agent1/validators/errors/missing_parameter_error.py create mode 100644 agents/agent1/src/python_agent_template/agents/agent1/validators/errors/none_not_allowed_error.py create mode 100644 agents/agent1/src/python_agent_template/agents/agent1/validators/errors/string_type_error.py create mode 100644 agents/agent1/tests/test_blank_string_validator.py create mode 100644 uv.lock diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000..1b06f3e --- /dev/null +++ b/.gitattributes @@ -0,0 +1 @@ +.github/workflows/*.lock.yml linguist-generated=true merge=ours diff --git a/.github/agents/agentic-workflows.agent.md b/.github/agents/agentic-workflows.agent.md new file mode 100644 index 0000000..443fcaa --- /dev/null +++ b/.github/agents/agentic-workflows.agent.md @@ -0,0 +1,143 @@ +--- +description: GitHub Agentic Workflows (gh-aw) - Create, debug, and upgrade AI-powered workflows with intelligent prompt routing +disable-model-invocation: true +--- + +# GitHub Agentic Workflows Agent + +This agent helps you work with **GitHub Agentic Workflows (gh-aw)**, a CLI extension for creating AI-powered workflows in natural language using markdown files. + +## What This Agent Does + +This is a **dispatcher agent** that routes your request to the appropriate specialized prompt based on your task: + +- **Creating new workflows**: Routes to `create` prompt +- **Updating existing workflows**: Routes to `update` prompt +- **Debugging workflows**: Routes to `debug` prompt +- **Upgrading workflows**: Routes to `upgrade-agentic-workflows` prompt +- **Creating shared components**: Routes to `create-shared-agentic-workflow` prompt + +Workflows may optionally include: + +- **Project tracking / monitoring** (GitHub Projects updates, status reporting) +- **Orchestration / coordination** (one workflow assigning agents or dispatching and coordinating other workflows) + +## Files This Applies To + +- Workflow files: `.github/workflows/*.md` and `.github/workflows/**/*.md` +- Workflow lock files: `.github/workflows/*.lock.yml` +- Shared components: `.github/workflows/shared/*.md` +- Configuration: https://github.com/github/gh-aw/blob/v0.46.0/.github/aw/github-agentic-workflows.md + +## Problems This Solves + +- **Workflow Creation**: Design secure, validated agentic workflows with proper triggers, tools, and permissions +- **Workflow Debugging**: Analyze logs, identify missing tools, investigate failures, and fix configuration issues +- **Version Upgrades**: Migrate workflows to new gh-aw versions, apply codemods, fix breaking changes +- **Component Design**: Create reusable shared workflow components that wrap MCP servers + +## How to Use + +When you interact with this agent, it will: + +1. **Understand your intent** - Determine what kind of task you're trying to accomplish +2. **Route to the right prompt** - Load the specialized prompt file for your task +3. **Execute the task** - Follow the detailed instructions in the loaded prompt + +## Available Prompts + +### Create New Workflow +**Load when**: User wants to create a new workflow from scratch, add automation, or design a workflow that doesn't exist yet + +**Prompt file**: https://github.com/github/gh-aw/blob/v0.46.0/.github/aw/create-agentic-workflow.md + +**Use cases**: +- "Create a workflow that triages issues" +- "I need a workflow to label pull requests" +- "Design a weekly research automation" + +### Update Existing Workflow +**Load when**: User wants to modify, improve, or refactor an existing workflow + +**Prompt file**: https://github.com/github/gh-aw/blob/v0.46.0/.github/aw/update-agentic-workflow.md + +**Use cases**: +- "Add web-fetch tool to the issue-classifier workflow" +- "Update the PR reviewer to use discussions instead of issues" +- "Improve the prompt for the weekly-research workflow" + +### Debug Workflow +**Load when**: User needs to investigate, audit, debug, or understand a workflow, troubleshoot issues, analyze logs, or fix errors + +**Prompt file**: https://github.com/github/gh-aw/blob/v0.46.0/.github/aw/debug-agentic-workflow.md + +**Use cases**: +- "Why is this workflow failing?" +- "Analyze the logs for workflow X" +- "Investigate missing tool calls in run #12345" + +### Upgrade Agentic Workflows +**Load when**: User wants to upgrade workflows to a new gh-aw version or fix deprecations + +**Prompt file**: https://github.com/github/gh-aw/blob/v0.46.0/.github/aw/upgrade-agentic-workflows.md + +**Use cases**: +- "Upgrade all workflows to the latest version" +- "Fix deprecated fields in workflows" +- "Apply breaking changes from the new release" + +### Create Shared Agentic Workflow +**Load when**: User wants to create a reusable workflow component or wrap an MCP server + +**Prompt file**: https://github.com/github/gh-aw/blob/v0.46.0/.github/aw/create-shared-agentic-workflow.md + +**Use cases**: +- "Create a shared component for Notion integration" +- "Wrap the Slack MCP server as a reusable component" +- "Design a shared workflow for database queries" + +## Instructions + +When a user interacts with you: + +1. **Identify the task type** from the user's request +2. **Load the appropriate prompt** from the GitHub repository URLs listed above +3. **Follow the loaded prompt's instructions** exactly +4. **If uncertain**, ask clarifying questions to determine the right prompt + +## Quick Reference + +```bash +# Initialize repository for agentic workflows +gh aw init + +# Generate the lock file for a workflow +gh aw compile [workflow-name] + +# Debug workflow runs +gh aw logs [workflow-name] +gh aw audit + +# Upgrade workflows +gh aw fix --write +gh aw compile --validate +``` + +## Key Features of gh-aw + +- **Natural Language Workflows**: Write workflows in markdown with YAML frontmatter +- **AI Engine Support**: Copilot, Claude, Codex, or custom engines +- **MCP Server Integration**: Connect to Model Context Protocol servers for tools +- **Safe Outputs**: Structured communication between AI and GitHub API +- **Strict Mode**: Security-first validation and sandboxing +- **Shared Components**: Reusable workflow building blocks +- **Repo Memory**: Persistent git-backed storage for agents +- **Sandboxed Execution**: All workflows run in the Agent Workflow Firewall (AWF) sandbox, enabling full `bash` and `edit` tools by default + +## Important Notes + +- Always reference the instructions file at https://github.com/github/gh-aw/blob/v0.46.0/.github/aw/github-agentic-workflows.md for complete documentation +- Use the MCP tool `agentic-workflows` when running in GitHub Copilot Cloud +- Workflows must be compiled to `.lock.yml` files before running in GitHub Actions +- **Bash tools are enabled by default** - Don't restrict bash commands unnecessarily since workflows are sandboxed by the AWF +- Follow security best practices: minimal permissions, explicit network access, no template injection diff --git a/.github/agents/security-reviewer.agent.md b/.github/agents/security-reviewer.agent.md new file mode 100644 index 0000000..6a787d5 --- /dev/null +++ b/.github/agents/security-reviewer.agent.md @@ -0,0 +1,155 @@ +--- +name: Security Reviewer Agent +description: Reviews code changes against 15 security posture categories for a Python monorepo that builds AI agents. +model: GPT-5.3-Codex (copilot) +--- + +# Security Reviewer Agent + +You are a security reviewer for a Python monorepo that builds AI agents. Your job is to review code changes for security issues, following the project's coding standards and secure-by-default principles. Be thorough, specific, and actionable. Reference file paths and line numbers when reporting findings. + +Review every changed file against **all** of the following security postures. For each category, flag violations and suggest concrete fixes. + +--- + +## 1. Input Validation and Sanitization + +- [ ] All external inputs are validated at the boundary before use: user data, CLI arguments, configuration values, environment variables, webhook payloads, queue messages, HTTP request bodies/headers/query parameters, and model/tool outputs. +- [ ] Guard clauses reject invalid, unexpected, or out-of-range values early with clear, actionable error messages. +- [ ] Allowlists are preferred over denylists when constraining input values. +- [ ] String inputs are checked for blank/whitespace-only values where semantically required. +- [ ] Numeric inputs are bounds-checked (min/max, overflow, negative values). +- [ ] Untrusted inputs are never interpolated directly into shell commands, SQL queries, URL paths, log messages, templates, or external API calls without sanitization. +- [ ] File paths derived from user input are validated against path traversal (`../`, absolute paths, symlink escapes). +- [ ] Deserialization of untrusted data avoids unsafe methods (`pickle.loads`, `yaml.unsafe_load`, `eval`, `exec`). + +## 2. Secrets and Credentials + +- [ ] No secrets, tokens, API keys, passwords, connection strings, or private keys appear in source code, tests, configuration files, comments, or documentation. +- [ ] Secrets are loaded exclusively from environment variables, secret stores, or secure configuration systems. +- [ ] Secrets never appear in log output, exception messages, error responses, stack traces, or debug dumps. +- [ ] `.env` files or secret fixtures are listed in `.gitignore` and never committed. +- [ ] Default values for configuration do not contain real credentials or placeholder strings that look like credentials (e.g., `password123`, `changeme`). +- [ ] Secrets are not passed as command-line arguments (visible in process listings). + +## 3. Subprocess and Command Execution + +- [ ] No use of `shell=True` in `subprocess.run`, `subprocess.Popen`, or similar APIs. +- [ ] Subprocess arguments are passed as lists, never as concatenated strings. +- [ ] Any user/model/config-derived values influencing subprocess arguments are validated and sanitized before use. +- [ ] Timeouts are set on subprocess calls to prevent unbounded waits. +- [ ] `check=True` is used (or return codes are explicitly handled) to avoid silent failures. +- [ ] No use of `os.system`, `os.popen`, or backtick-style execution. +- [ ] No use of `eval()`, `exec()`, `compile()` with untrusted input. + +## 4. Network and HTTP Security + +- [ ] HTTPS is used for all network communication; plain HTTP is not used for sensitive data. +- [ ] Timeouts are configured on all HTTP clients and network connections. +- [ ] Retries are configured with backoff to avoid overwhelming services. +- [ ] TLS certificate verification is not disabled (`verify=False`, `PYTHONHTTPSVERIFY=0`). +- [ ] Redirect following is limited or disabled where untrusted URLs are involved. +- [ ] URLs constructed from user input are validated against SSRF (Server-Side Request Forgery) — no arbitrary internal network access. +- [ ] Response data from external services is validated before use, not blindly trusted. +- [ ] CORS, CSP, and other security headers are set appropriately if the agent exposes HTTP endpoints. + +## 5. Authentication and Authorization + +- [ ] Authentication mechanisms use established libraries or frameworks, not custom implementations. +- [ ] API keys and bearer tokens are transmitted only over HTTPS and in headers, not in URLs or query parameters. +- [ ] Authorization checks enforce the principle of least privilege: code requests only the permissions, scopes, and access levels strictly necessary. +- [ ] Service accounts and bot credentials use minimal required permissions. +- [ ] Token expiry and refresh are handled correctly; expired tokens are not reused. + +## 6. Logging and Observability Hygiene + +- [ ] No secrets, tokens, credentials, API keys, or PII appear in log output at any level. +- [ ] Sensitive fields are redacted or masked before logging (e.g., `api_key=***`). +- [ ] Structured logging or `%`-style lazy formatting is used; no f-strings in log calls. +- [ ] Log levels are appropriate: no sensitive debug output at INFO or above in production paths. +- [ ] Error messages exposed to users do not leak internal implementation details, stack traces, or file paths. +- [ ] `print` and `pprint` are not used (enforced by Ruff T20); all output goes through `logging`. + +## 7. Error Handling and Information Disclosure + +- [ ] Exception messages are actionable but do not leak secrets, internal paths, stack traces, or infrastructure details to external callers. +- [ ] Broad exception catches (`except Exception`, bare `except`) are justified and do not silently swallow security-relevant failures. +- [ ] Custom exceptions follow project conventions (`__slots__ = ()`, inherit from closest builtin, structured context). +- [ ] Failed authentication/authorization attempts return generic messages (e.g., "unauthorized") — do not reveal whether the user/token exists. +- [ ] Error responses to external callers do not differ in ways that enable enumeration attacks. + +## 8. Dependency and Supply Chain Security + +- [ ] `uv.lock` is in sync with `pyproject.toml`; no lock file drift. +- [ ] New dependencies are from well-known, actively maintained packages. +- [ ] Dependency updates are reviewed for changelogs and security advisories before merging. +- [ ] No `--no-verify`, `--trusted-host`, or pip `--index-url` overrides pointing to untrusted registries. +- [ ] No vendored or copy-pasted third-party code without license and provenance review. +- [ ] GitHub Actions dependencies use pinned versions (SHA or tag), not mutable references like `@main`. + +## 9. File System and Resource Safety + +- [ ] Temporary files and directories are created securely (`tempfile.mkstemp`, `tempfile.TemporaryDirectory`) — not with predictable names. +- [ ] File paths from untrusted sources are canonicalized and checked against an allowed base directory before access. +- [ ] File permissions are set restrictively for sensitive outputs (e.g., credentials files, key material). +- [ ] Large file reads or writes are bounded to prevent denial-of-service through resource exhaustion. +- [ ] Uploaded or downloaded files are size-limited and type-validated. + +## 10. Cryptography and Randomness + +- [ ] Cryptographic operations use standard library modules (`hashlib`, `hmac`, `secrets`) or established libraries (`cryptography`), not custom implementations. +- [ ] Insecure hash algorithms (MD5, SHA1) are not used for security-sensitive purposes (password hashing, integrity verification, signatures). +- [ ] Random values used for security purposes (tokens, nonces, session IDs) use `secrets` module, not `random`. +- [ ] Constant-time comparison (`hmac.compare_digest`) is used for token/signature validation to prevent timing attacks. + +## 11. Configuration and Environment + +- [ ] Configuration is validated at startup; invalid or missing required values cause a clear, fast failure. +- [ ] Default configuration values are safe and documented. +- [ ] Debug modes, verbose logging, and development-only features are not enabled in production configuration paths. +- [ ] Feature flags or environment toggles that disable security controls are clearly documented and auditable. + +## 12. Concurrency and Race Conditions + +- [ ] Shared mutable state is protected by appropriate synchronization (locks, queues, atomic operations). +- [ ] TOCTOU (time-of-check-time-of-use) patterns on files or resources are avoided or mitigated. +- [ ] Async code does not block the event loop with synchronous I/O or CPU-bound work. +- [ ] `CancelledError` is not silently swallowed in async code. + +## 13. Container and Deployment Security (Dockerfile) + +- [ ] Container images use a minimal base image and do not run as root. +- [ ] Only required files are copied into the image; `.dockerignore` excludes secrets, tests, and dev artifacts. +- [ ] No secrets are baked into image layers (build args, ENV, COPY). +- [ ] Packages and dependencies are installed from pinned, verified sources. +- [ ] Health checks do not expose sensitive information. + +## 14. CI/CD and GitHub Actions Security + +- [ ] Workflow permissions follow least privilege (`permissions:` block scoped narrowly). +- [ ] Secrets are accessed via `${{ secrets.* }}`, never hardcoded in workflow files. +- [ ] Third-party GitHub Actions are pinned by SHA, not mutable tags. +- [ ] `persist-credentials: false` is set on checkout steps where credentials are not needed. +- [ ] Pull request workflows from forks do not have write access to the repository. +- [ ] Workflow artifacts do not contain secrets or sensitive data. + +## 15. Test Security Coverage + +- [ ] Tests cover failure paths for security-sensitive boundaries (bad inputs, rejected credentials, invalid tokens, unauthorized access). +- [ ] Tests verify that validation rejects known-bad inputs (injection payloads, path traversals, oversized inputs). +- [ ] Tests confirm that secrets do not appear in logs or error output. +- [ ] No real credentials, tokens, or external service calls appear in unit tests. +- [ ] Security-sensitive test fixtures use obviously fake values (e.g., `fake-token-for-testing`). + +--- + +## Output Format + +For each finding, report: +1. **Category** (from the list above) +2. **Severity** (critical / high / medium / low / informational) +3. **File and line** (exact location) +4. **Description** (what is wrong and why it matters) +5. **Recommendation** (specific fix or mitigation) + +If no issues are found in a category, state that explicitly. Summarize the overall security posture at the end. diff --git a/.github/aw/actions-lock.json b/.github/aw/actions-lock.json new file mode 100644 index 0000000..404454a --- /dev/null +++ b/.github/aw/actions-lock.json @@ -0,0 +1,14 @@ +{ + "entries": { + "actions/github-script@v8": { + "repo": "actions/github-script", + "version": "v8", + "sha": "ed597411d8f924073f98dfc5c65a23a2325f34cd" + }, + "github/gh-aw/actions/setup@v0.46.0": { + "repo": "github/gh-aw/actions/setup", + "version": "v0.46.0", + "sha": "f88ec26c65cc20ebb8ceabe809c9153385945bfe" + } + } +} diff --git a/.github/instructions/agentic-workflows.instructions.md b/.github/instructions/agentic-workflows.instructions.md new file mode 100644 index 0000000..0b4db03 --- /dev/null +++ b/.github/instructions/agentic-workflows.instructions.md @@ -0,0 +1,70 @@ +--- +applyTo: ".github/workflows/*.md" +--- + +# Copilot Instructions (Agentic Workflows) + +Agentic workflow files are [GitHub Agentic Workflows](https://github.github.com/gh-aw/) markdown files compiled to GitHub Actions `.lock.yml` files via `gh aw compile`. + +## File Format + +- Each workflow is a Markdown file in `.github/workflows/` with YAML frontmatter between `---` markers followed by a Markdown body. +- The frontmatter defines triggers, permissions, tools, safe-outputs, imports, and other configuration. The Markdown body contains the natural-language prompt executed by the AI engine at runtime. +- Only frontmatter changes require recompilation (`gh aw compile`). Edits to the Markdown body take effect at runtime without recompiling. + +## Frontmatter Reference + +- `on:` — Standard GitHub Actions trigger syntax (e.g., `pull_request`, `issues`, `schedule`). Extended with `reaction:`, `stop-after:`, `manual-approval:`, `forks:`, `skip-roles:`, `skip-bots:`. +- `description:` — Human-readable workflow description rendered as a comment in the lock file. +- `engine:` — AI engine (`copilot`, `claude`, `codex`, `custom`). Defaults to `copilot` if omitted. +- `imports:` — List of shared workflow components or agent files. Paths are **relative to the importing file** (e.g., `../agents/my-agent.md`). Only one agent file (from `.github/agents/`) can be imported per workflow. +- `permissions:` — Standard GitHub Actions permissions. When using safe-outputs for write actions (comments, reviews), the workflow itself typically only needs `read` permissions; the safe-output sandbox handles writes. +- `tools:` — Tool configurations: `bash`, `edit`, `github`, `web-fetch`, `web-search`, `playwright`, `cache-memory`, and MCP servers. +- `safe-outputs:` — Constrained write operations the AI can perform (e.g., `create-pull-request-review-comment`, `submit-pull-request-review`, `add-reviewer`, `create-issue-comment`). Each has a `max:` limit. +- `safe-inputs:` — Custom MCP tools defined inline using JavaScript or shell scripts. +- `network:` — Network access controls with domain allowlists and ecosystem identifiers (e.g., `python`, `node`, `defaults`). +- `strict:` — Enables enhanced security validation (default: `true`). Set `false` only for development/testing. +- `roles:` — Repository permission levels allowed to trigger the workflow. Defaults to `[admin, maintainer, write]`. +- `runs-on:` — Runner label (default: `ubuntu-latest`). +- `timeout-minutes:` — Workflow timeout (default: `20`). Use hyphen, not underscore. +- `concurrency:` — Concurrency policy for the agent job. +- `steps:` — Custom steps that run **before** agentic execution (outside the firewall sandbox; use only for deterministic preparation). +- `post-steps:` — Custom steps that run **after** agentic execution (outside the sandbox; use for cleanup/artifacts). +- `jobs:` — Custom jobs that run before the agentic job (outside the sandbox). +- `env:` — Workflow-level environment variables. +- `secrets:` — Secret values passed to the workflow (always use `${{ secrets.NAME }}`). +- `runtimes:` — Override default runtime versions (e.g., `node`, `python`, `uv`). +- `cache:` — Cache configuration using `actions/cache` syntax. + +## Markdown Body + +- The Markdown body is the prompt sent to the AI engine. Write clear, specific instructions. +- Use `${{ github.event.* }}` expressions to reference trigger context (e.g., PR number, issue body). +- Use `${{ needs.job-name.outputs.* }}` to reference outputs from custom `jobs:`. +- Structure the prompt with headings, numbered steps, and bullet points for clarity. +- Be explicit about expected behavior: what to review, what actions to take, and how to format output. + +## Imports + +- **Relative paths**: Import paths are resolved relative to the importing file. From `.github/workflows/`, use `../agents/my-agent.md` for agent files. +- **Remote imports**: Use `owner/repo/path@ref` format (e.g., `acme/shared-workflows/tools.md@v1.0.0`). +- **One agent per workflow**: Only one `.github/agents/` file can be imported per workflow. +- **Shared components**: Files without an `on:` field are shared components — validated but not compiled into Actions. +- **Frontmatter merging**: Imported `tools:`, `mcp-servers:`, `safe-outputs:`, `network:`, `runtimes:`, `services:`, and `steps:` are merged into the main workflow. `permissions:` are validated but not merged — the main workflow must declare all required permissions. + +## Compilation + +- Compile all workflows: `gh aw compile`. +- Compile a specific workflow: `gh aw compile ` (the basename without `.md`). +- Both the `.md` source and the generated `.lock.yml` must be committed. +- Use `gh aw compile --strict` to enforce action pinning, network config, and safe-output requirements. +- Use `gh aw compile --validate` for schema and action SHA validation. + +## Security Best Practices + +- Prefer `safe-outputs:` over `permissions: pull-requests: write` — let the sandbox handle writes. +- Set `max:` limits on all safe-outputs to bound AI behavior. +- Use `network:` allowlists with ecosystem identifiers (`python`, `node`, `defaults`) rather than individual domains. +- Keep `strict: true` (default) for production workflows. +- Never put secrets in the Markdown body; use `${{ secrets.NAME }}` in frontmatter `env:` or `secrets:`. +- Custom `steps:`, `post-steps:`, and `jobs:` run outside the firewall sandbox — use only for deterministic work. diff --git a/.github/instructions/copilot-agents.instructions.md b/.github/instructions/copilot-agents.instructions.md new file mode 100644 index 0000000..148b245 --- /dev/null +++ b/.github/instructions/copilot-agents.instructions.md @@ -0,0 +1,66 @@ +--- +applyTo: ".github/agents/*.agent.md" +--- + +# Copilot Instructions (Copilot Custom Agents) + +Copilot custom agents are Markdown prompt files in `.github/agents/` that customize AI engine behavior for specific tasks. They can be used directly in GitHub Copilot Chat (via `/agent`) or imported into [GitHub Agentic Workflows](https://github.github.com/gh-aw/). + +## File Format + +- Each agent is a Markdown file in `.github/agents/` using the `*.agent.md` naming convention, with YAML frontmatter between `---` markers followed by a Markdown body containing the agent prompt. +- The frontmatter defines agent metadata. The Markdown body contains the system-level instructions that shape the AI's behavior. + +## Frontmatter Fields + +- `name:` — Human-readable agent name (e.g., `Security Reviewer Agent`). +- `description:` — Brief description of the agent's purpose and scope. +- `tools:` — Tool configurations available to the agent (merged into importing workflows). +- `mcp-servers:` — MCP server configurations (merged into importing workflows). +- `disable-model-invocation:` — Set `true` for dispatcher/routing agents that only organize other agents without invoking the model themselves. + +Do **not** add fields that belong in workflows (e.g., `on:`, `permissions:`, `engine:`, `safe-outputs:`). These are set by the importing workflow, not the agent. + +## Markdown Body (Prompt) + +- Start with a clear role statement: who the agent is and what it does. +- Be specific about the task scope, methodology, and expected output format. +- Use headings and checklists to organize categories of work. +- Include concrete examples of good and bad patterns when helpful. +- Reference project conventions (e.g., `CODING_STANDARDS.md`) for context. +- Keep prompts actionable — every instruction should translate to observable AI behavior. + +## Agent Design Guidelines + +- **Single responsibility**: Each agent should focus on one well-defined task (e.g., security review, code review, documentation review). Compose agents via separate workflows rather than making one agent do everything. +- **Be thorough but bounded**: List all categories/checks the agent should evaluate, but keep each check concise. Use checklists (`- [ ]`) for systematic coverage. +- **Specify output format**: Define how findings should be reported (e.g., severity levels, file/line references, structured tables). This ensures consistent, actionable output. +- **Severity classification**: When the agent identifies issues, define a severity scale (e.g., critical, high, medium, low, informational) and explain how each level maps to actions. +- **Minimize false positives**: Include guidance like "only flag issues you are confident about" and "state uncertainty as informational." +- **Project context**: Reference the project's tech stack, conventions, and standards so the agent's recommendations are relevant (e.g., "This is a Python monorepo using Ruff, Pyright strict, Bandit"). + +## Importing into Agentic Workflows + +- Import via the `imports:` field in a workflow's frontmatter. Paths are relative to the importing file (e.g., `../agents/my-agent.agent.md` from `.github/workflows/`). +- Only **one agent** can be imported per workflow. +- Agent `tools:` and `mcp-servers:` are merged into the workflow. All other frontmatter fields (`name`, `description`) are metadata only. +- Agents can also be imported from remote repositories: `owner/repo/.github/agents/agent.agent.md@v1.0.0`. + +## Using in Copilot Chat + +- In GitHub Copilot Chat, type `/agent` and select the agent by name. +- The agent prompt is injected as system-level context, shaping how Copilot responds. +- Dispatcher agents (with `disable-model-invocation: true`) route requests to specialized prompts or workflows. + +## Naming Conventions + +- All agent files must use the `*.agent.md` suffix: `security-reviewer.agent.md`, `code-reviewer.agent.md`, `docs-checker.agent.md`. +- Use kebab-case for the base name. +- The `name:` field in frontmatter should be a human-readable title (e.g., `Security Reviewer Agent`). +- The `agentic-workflows.agent.md` file is reserved for the gh-aw dispatcher agent created by `gh aw init`. + +## Security + +- Never include secrets, tokens, or credentials in agent prompts. +- Do not instruct agents to bypass security controls, disable TLS verification, or ignore errors. +- Agent prompts should reinforce project security standards (e.g., referencing `CODING_STANDARDS.md` security section). diff --git a/.github/workflows/checks.yml b/.github/workflows/checks.yml new file mode 100644 index 0000000..788fbce --- /dev/null +++ b/.github/workflows/checks.yml @@ -0,0 +1,41 @@ +name: checks + +on: + workflow_dispatch: + pull_request: + push: + branches: ["main", "feature*", "fix*"] + +permissions: + contents: read + +jobs: + lint-test: + name: lint, type-check, test (python ${{ matrix.python-version }}) + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + python-version: ["3.10", "3.11", "3.12", "3.13"] + + steps: + - name: Checkout + uses: actions/checkout@v6 + with: + persist-credentials: false + + - name: Set up uv + uses: astral-sh/setup-uv@v5 + with: + python-version: ${{ matrix.python-version }} + enable-cache: true + + - name: Install dependencies + run: uv sync --all-extras --dev + + - name: Run checks + run: uv run poe check + + - name: Run changed-agents lint (fast path) + if: ${{ github.event_name == 'pull_request' }} + run: uv run python scripts/run_tasks_in_changed_agents.py lint diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index bd0c8ee..ce8585c 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -2,10 +2,9 @@ name: "CodeQL Analysis" on: workflow_dispatch: + pull_request: push: branches: [ "main", "feature*", "fix*" ] - pull_request: - branches: [ "main" ] schedule: - cron: '45 1 * * 1' diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml new file mode 100644 index 0000000..bd9efc1 --- /dev/null +++ b/.github/workflows/docs.yml @@ -0,0 +1,63 @@ +name: docs + +on: + workflow_dispatch: + push: + branches: ["main"] + paths: + - "docs/**" + - "agents/*/docs/**" + - "agents/*/src/**" + - "scripts/generate_docs.py" + - "pyproject.toml" + +permissions: + contents: read + pages: write + id-token: write + +concurrency: + group: pages + cancel-in-progress: true + +jobs: + build: + name: build docs + runs-on: ubuntu-latest + + steps: + - name: Checkout + uses: actions/checkout@v6 + with: + persist-credentials: false + + - name: Set up uv + uses: astral-sh/setup-uv@v5 + with: + python-version: "3.13" + enable-cache: true + + - name: Install dependencies (with docs group) + run: uv sync --all-packages --all-extras --dev -U --prerelease=if-necessary-or-explicit --group docs + + - name: Build documentation + run: uv run python scripts/generate_docs.py + + - name: Upload Pages artifact + uses: actions/upload-pages-artifact@v4 + with: + path: docs/generated + + deploy: + name: deploy to GitHub Pages + needs: build + runs-on: ubuntu-latest + + environment: + name: github-pages + url: ${{ steps.deployment.outputs.page_url }} + + steps: + - name: Deploy to GitHub Pages + id: deployment + uses: actions/deploy-pages@v4 diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000..bc03407 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,41 @@ +name: release + +on: + workflow_dispatch: + release: + types: [published] + +permissions: + contents: read + packages: write + +jobs: + publish: + name: build and publish + runs-on: ubuntu-latest + + steps: + - name: Checkout + uses: actions/checkout@v6 + with: + persist-credentials: false + + - name: Set up uv + uses: astral-sh/setup-uv@v5 + with: + python-version: "3.13" + enable-cache: true + + - name: Install dependencies + run: uv sync --all-extras --dev + + - name: Build changed agent packages + run: uv run poe build-changed + + - name: Publish to GitHub Packages + env: + UV_PUBLISH_URL: https://nuget.pkg.github.com/${{ github.repository_owner }}/upload + UV_PUBLISH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: uv run poe publish + # Each agent has its own version; the registry rejects duplicate + # versions, so only agents with bumped versions are actually uploaded. diff --git a/.github/workflows/security-review.lock.yml b/.github/workflows/security-review.lock.yml new file mode 100644 index 0000000..76120ae --- /dev/null +++ b/.github/workflows/security-review.lock.yml @@ -0,0 +1,1191 @@ +# +# ___ _ _ +# / _ \ | | (_) +# | |_| | __ _ ___ _ __ | |_ _ ___ +# | _ |/ _` |/ _ \ '_ \| __| |/ __| +# | | | | (_| | __/ | | | |_| | (__ +# \_| |_/\__, |\___|_| |_|\__|_|\___| +# __/ | +# _ _ |___/ +# | | | | / _| | +# | | | | ___ _ __ _ __| |_| | _____ ____ +# | |/\| |/ _ \ '__| |/ /| _| |/ _ \ \ /\ / / ___| +# \ /\ / (_) | | | | ( | | | | (_) \ V V /\__ \ +# \/ \/ \___/|_| |_|\_\|_| |_|\___/ \_/\_/ |___/ +# +# This file was automatically generated by gh-aw (v0.46.0). DO NOT EDIT. +# +# To update this file, edit the corresponding .md file and run: +# gh aw compile +# Not all edits will cause changes to this file. +# +# For more information: https://github.github.com/gh-aw/introduction/overview/ +# +# Automated security review for pull requests. Analyzes changed files against 15 security posture categories and posts inline review comments on findings, then requests Copilot code review. +# +# Resolved workflow manifest: +# Imports: +# - ../agents/security-reviewer.agent.md +# +# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"eb4207411f27c5086812defd1c381fa0ac703b11169172ae6b0f2ef12abc08b2"} + +name: "Security Review" +"on": + pull_request: + types: + - opened + - synchronize + +permissions: {} + +concurrency: + group: "gh-aw-${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}" + cancel-in-progress: true + +run-name: "Security Review" + +jobs: + activation: + needs: pre_activation + if: > + (needs.pre_activation.outputs.activated == 'true') && ((github.event_name != 'pull_request') || (github.event.pull_request.head.repo.id == github.repository_id)) + runs-on: ubuntu-slim + permissions: + contents: read + outputs: + body: ${{ steps.sanitized.outputs.body }} + comment_id: "" + comment_repo: "" + text: ${{ steps.sanitized.outputs.text }} + title: ${{ steps.sanitized.outputs.title }} + steps: + - name: Setup Scripts + uses: github/gh-aw/actions/setup@f88ec26c65cc20ebb8ceabe809c9153385945bfe # v0.46.0 + with: + destination: /opt/gh-aw/actions + - name: Validate context variables + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/validate_context_variables.cjs'); + await main(); + - name: Checkout .github and .agents folders + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + sparse-checkout: | + .github + .agents + fetch-depth: 1 + persist-credentials: false + - name: Check workflow file timestamps + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_WORKFLOW_FILE: "security-review.lock.yml" + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/check_workflow_timestamp_api.cjs'); + await main(); + - name: Compute current body text + id: sanitized + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/compute_text.cjs'); + await main(); + - name: Create prompt with built-in context + env: + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }} + GH_AW_GITHUB_ACTOR: ${{ github.actor }} + GH_AW_GITHUB_EVENT_COMMENT_ID: ${{ github.event.comment.id }} + GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER: ${{ github.event.discussion.number }} + GH_AW_GITHUB_EVENT_ISSUE_NUMBER: ${{ github.event.issue.number }} + GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER: ${{ github.event.pull_request.number }} + GH_AW_GITHUB_REPOSITORY: ${{ github.repository }} + GH_AW_GITHUB_RUN_ID: ${{ github.run_id }} + GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }} + run: | + bash /opt/gh-aw/actions/create_prompt_first.sh + cat << 'GH_AW_PROMPT_EOF' > "$GH_AW_PROMPT" + + GH_AW_PROMPT_EOF + cat "/opt/gh-aw/prompts/xpia.md" >> "$GH_AW_PROMPT" + cat "/opt/gh-aw/prompts/temp_folder_prompt.md" >> "$GH_AW_PROMPT" + cat "/opt/gh-aw/prompts/markdown.md" >> "$GH_AW_PROMPT" + cat << 'GH_AW_PROMPT_EOF' >> "$GH_AW_PROMPT" + + GitHub API Access Instructions + + The gh CLI is NOT authenticated. Do NOT use gh commands for GitHub operations. + + + To create or modify GitHub resources (issues, discussions, pull requests, etc.), you MUST call the appropriate safe output tool. Simply writing content will NOT work - the workflow requires actual tool calls. + + Temporary IDs: Some safe output tools support a temporary ID field (usually named temporary_id) so you can reference newly-created items elsewhere in the SAME agent output (for example, using #aw_abc1 in a later body). + + **IMPORTANT - temporary_id format rules:** + - If you DON'T need to reference the item later, OMIT the temporary_id field entirely (it will be auto-generated if needed) + - If you DO need cross-references/chaining, you MUST match this EXACT validation regex: /^aw_[A-Za-z0-9]{3,8}$/i + - Format: aw_ prefix followed by 3 to 8 alphanumeric characters (A-Z, a-z, 0-9, case-insensitive) + - Valid alphanumeric characters: ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789 + - INVALID examples: aw_ab (too short), aw_123456789 (too long), aw_test-id (contains hyphen), aw_id_123 (contains underscore) + - VALID examples: aw_abc, aw_abc1, aw_Test123, aw_A1B2C3D4, aw_12345678 + - To generate valid IDs: use 3-8 random alphanumeric characters or omit the field to let the system auto-generate + + Do NOT invent other aw_* formats — downstream steps will reject them with validation errors matching against /^aw_[A-Za-z0-9]{3,8}$/i. + + Discover available tools from the safeoutputs MCP server. + + **Critical**: Tool calls write structured data that downstream jobs process. Without tool calls, follow-up actions will be skipped. + + **Note**: If you made no other safe output tool calls during this workflow execution, call the "noop" tool to provide a status message indicating completion or that no actions were needed. + + + + The following GitHub context information is available for this workflow: + {{#if __GH_AW_GITHUB_ACTOR__ }} + - **actor**: __GH_AW_GITHUB_ACTOR__ + {{/if}} + {{#if __GH_AW_GITHUB_REPOSITORY__ }} + - **repository**: __GH_AW_GITHUB_REPOSITORY__ + {{/if}} + {{#if __GH_AW_GITHUB_WORKSPACE__ }} + - **workspace**: __GH_AW_GITHUB_WORKSPACE__ + {{/if}} + {{#if __GH_AW_GITHUB_EVENT_ISSUE_NUMBER__ }} + - **issue-number**: #__GH_AW_GITHUB_EVENT_ISSUE_NUMBER__ + {{/if}} + {{#if __GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER__ }} + - **discussion-number**: #__GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER__ + {{/if}} + {{#if __GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER__ }} + - **pull-request-number**: #__GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER__ + {{/if}} + {{#if __GH_AW_GITHUB_EVENT_COMMENT_ID__ }} + - **comment-id**: __GH_AW_GITHUB_EVENT_COMMENT_ID__ + {{/if}} + {{#if __GH_AW_GITHUB_RUN_ID__ }} + - **workflow-run-id**: __GH_AW_GITHUB_RUN_ID__ + {{/if}} + + + GH_AW_PROMPT_EOF + cat << 'GH_AW_PROMPT_EOF' >> "$GH_AW_PROMPT" + + GH_AW_PROMPT_EOF + cat << 'GH_AW_PROMPT_EOF' >> "$GH_AW_PROMPT" + {{#runtime-import .github/agents/security-reviewer.agent.md}} + GH_AW_PROMPT_EOF + cat << 'GH_AW_PROMPT_EOF' >> "$GH_AW_PROMPT" + {{#runtime-import .github/workflows/security-review.md}} + GH_AW_PROMPT_EOF + - name: Interpolate variables and render templates + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER: ${{ github.event.pull_request.number }} + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/interpolate_prompt.cjs'); + await main(); + - name: Substitute placeholders + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + GH_AW_GITHUB_ACTOR: ${{ github.actor }} + GH_AW_GITHUB_EVENT_COMMENT_ID: ${{ github.event.comment.id }} + GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER: ${{ github.event.discussion.number }} + GH_AW_GITHUB_EVENT_ISSUE_NUMBER: ${{ github.event.issue.number }} + GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER: ${{ github.event.pull_request.number }} + GH_AW_GITHUB_REPOSITORY: ${{ github.repository }} + GH_AW_GITHUB_RUN_ID: ${{ github.run_id }} + GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }} + GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_ACTIVATED: ${{ needs.pre_activation.outputs.activated }} + GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_MATCHED_COMMAND: ${{ needs.pre_activation.outputs.matched_command }} + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + + const substitutePlaceholders = require('/opt/gh-aw/actions/substitute_placeholders.cjs'); + + // Call the substitution function + return await substitutePlaceholders({ + file: process.env.GH_AW_PROMPT, + substitutions: { + GH_AW_GITHUB_ACTOR: process.env.GH_AW_GITHUB_ACTOR, + GH_AW_GITHUB_EVENT_COMMENT_ID: process.env.GH_AW_GITHUB_EVENT_COMMENT_ID, + GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER: process.env.GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER, + GH_AW_GITHUB_EVENT_ISSUE_NUMBER: process.env.GH_AW_GITHUB_EVENT_ISSUE_NUMBER, + GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER: process.env.GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER, + GH_AW_GITHUB_REPOSITORY: process.env.GH_AW_GITHUB_REPOSITORY, + GH_AW_GITHUB_RUN_ID: process.env.GH_AW_GITHUB_RUN_ID, + GH_AW_GITHUB_WORKSPACE: process.env.GH_AW_GITHUB_WORKSPACE, + GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_ACTIVATED: process.env.GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_ACTIVATED, + GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_MATCHED_COMMAND: process.env.GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_MATCHED_COMMAND + } + }); + - name: Validate prompt placeholders + env: + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + run: bash /opt/gh-aw/actions/validate_prompt_placeholders.sh + - name: Print prompt + env: + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + run: bash /opt/gh-aw/actions/print_prompt_summary.sh + - name: Upload prompt artifact + if: success() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + with: + name: prompt + path: /tmp/gh-aw/aw-prompts/prompt.txt + retention-days: 1 + + agent: + needs: activation + runs-on: ubuntu-latest + permissions: + contents: read + pull-requests: read + env: + DEFAULT_BRANCH: ${{ github.event.repository.default_branch }} + GH_AW_ASSETS_ALLOWED_EXTS: "" + GH_AW_ASSETS_BRANCH: "" + GH_AW_ASSETS_MAX_SIZE_KB: 0 + GH_AW_MCP_LOG_DIR: /tmp/gh-aw/mcp-logs/safeoutputs + GH_AW_SAFE_OUTPUTS: /opt/gh-aw/safeoutputs/outputs.jsonl + GH_AW_SAFE_OUTPUTS_CONFIG_PATH: /opt/gh-aw/safeoutputs/config.json + GH_AW_SAFE_OUTPUTS_TOOLS_PATH: /opt/gh-aw/safeoutputs/tools.json + GH_AW_WORKFLOW_ID_SANITIZED: securityreview + outputs: + checkout_pr_success: ${{ steps.checkout-pr.outputs.checkout_pr_success || 'true' }} + has_patch: ${{ steps.collect_output.outputs.has_patch }} + model: ${{ steps.generate_aw_info.outputs.model }} + output: ${{ steps.collect_output.outputs.output }} + output_types: ${{ steps.collect_output.outputs.output_types }} + secret_verification_result: ${{ steps.validate-secret.outputs.verification_result }} + steps: + - name: Setup Scripts + uses: github/gh-aw/actions/setup@f88ec26c65cc20ebb8ceabe809c9153385945bfe # v0.46.0 + with: + destination: /opt/gh-aw/actions + - name: Checkout repository + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + persist-credentials: false + - name: Merge remote .github folder + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_AGENT_FILE: ".github/agents/security-reviewer.agent.md" + GH_AW_AGENT_IMPORT_SPEC: "../agents/security-reviewer.agent.md" + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/merge_remote_agent_github_folder.cjs'); + await main(); + - name: Create gh-aw temp directory + run: bash /opt/gh-aw/actions/create_gh_aw_tmp_dir.sh + - name: Configure Git credentials + env: + REPO_NAME: ${{ github.repository }} + SERVER_URL: ${{ github.server_url }} + run: | + git config --global user.email "github-actions[bot]@users.noreply.github.com" + git config --global user.name "github-actions[bot]" + # Re-authenticate git with GitHub token + SERVER_URL_STRIPPED="${SERVER_URL#https://}" + git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git" + echo "Git configured with standard GitHub Actions identity" + - name: Checkout PR branch + id: checkout-pr + if: | + github.event.pull_request + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + with: + github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/checkout_pr_branch.cjs'); + await main(); + - name: Generate agentic run info + id: generate_aw_info + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + with: + script: | + const fs = require('fs'); + + const awInfo = { + engine_id: "copilot", + engine_name: "GitHub Copilot CLI", + model: process.env.GH_AW_MODEL_AGENT_COPILOT || "", + version: "", + agent_version: "0.0.410", + cli_version: "v0.46.0", + workflow_name: "Security Review", + experimental: false, + supports_tools_allowlist: true, + run_id: context.runId, + run_number: context.runNumber, + run_attempt: process.env.GITHUB_RUN_ATTEMPT, + repository: context.repo.owner + '/' + context.repo.repo, + ref: context.ref, + sha: context.sha, + actor: context.actor, + event_name: context.eventName, + staged: false, + allowed_domains: ["defaults"], + firewall_enabled: true, + awf_version: "v0.20.0", + awmg_version: "v0.1.4", + steps: { + firewall: "squid" + }, + created_at: new Date().toISOString() + }; + + // Write to /tmp/gh-aw directory to avoid inclusion in PR + const tmpPath = '/tmp/gh-aw/aw_info.json'; + fs.writeFileSync(tmpPath, JSON.stringify(awInfo, null, 2)); + console.log('Generated aw_info.json at:', tmpPath); + console.log(JSON.stringify(awInfo, null, 2)); + + // Set model as output for reuse in other steps/jobs + core.setOutput('model', awInfo.model); + - name: Validate COPILOT_GITHUB_TOKEN secret + id: validate-secret + run: /opt/gh-aw/actions/validate_multi_secret.sh COPILOT_GITHUB_TOKEN 'GitHub Copilot CLI' https://github.github.com/gh-aw/reference/engines/#github-copilot-default + env: + COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }} + - name: Install GitHub Copilot CLI + run: /opt/gh-aw/actions/install_copilot_cli.sh 0.0.410 + - name: Install awf binary + run: bash /opt/gh-aw/actions/install_awf_binary.sh v0.20.0 + - name: Determine automatic lockdown mode for GitHub MCP Server + id: determine-automatic-lockdown + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_GITHUB_TOKEN: ${{ secrets.GH_AW_GITHUB_TOKEN }} + GH_AW_GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN }} + with: + script: | + const determineAutomaticLockdown = require('/opt/gh-aw/actions/determine_automatic_lockdown.cjs'); + await determineAutomaticLockdown(github, context, core); + - name: Download container images + run: bash /opt/gh-aw/actions/download_docker_images.sh ghcr.io/github/gh-aw-firewall/agent:0.20.0 ghcr.io/github/gh-aw-firewall/api-proxy:0.20.0 ghcr.io/github/gh-aw-firewall/squid:0.20.0 ghcr.io/github/gh-aw-mcpg:v0.1.4 ghcr.io/github/github-mcp-server:v0.30.3 node:lts-alpine + - name: Write Safe Outputs Config + run: | + mkdir -p /opt/gh-aw/safeoutputs + mkdir -p /tmp/gh-aw/safeoutputs + mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs + cat > /opt/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_EOF' + {"add_reviewer":{"max":1,"reviewers":["copilot"]},"create_pull_request_review_comment":{"max":20},"missing_data":{},"missing_tool":{},"noop":{"max":1},"submit_pull_request_review":{"max":1}} + GH_AW_SAFE_OUTPUTS_CONFIG_EOF + cat > /opt/gh-aw/safeoutputs/tools.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_EOF' + [ + { + "description": "Create a review comment on a specific line of code in a pull request. Use this for inline code review feedback, suggestions, or questions about specific code changes. For general PR comments not tied to specific lines, use add_comment instead. CONSTRAINTS: Maximum 20 review comment(s) can be created. Comments will be on the RIGHT side of the diff.", + "inputSchema": { + "additionalProperties": false, + "properties": { + "body": { + "description": "Review comment content in Markdown. Provide specific, actionable feedback about the code at this location.", + "type": "string" + }, + "line": { + "description": "Line number for the comment. For single-line comments, this is the target line. For multi-line comments, this is the ending line.", + "type": [ + "number", + "string" + ] + }, + "path": { + "description": "File path relative to the repository root (e.g., 'src/auth/login.js'). Must be a file that was changed in the PR.", + "type": "string" + }, + "side": { + "description": "Side of the diff to comment on: RIGHT for the new version (additions), LEFT for the old version (deletions). Defaults to RIGHT.", + "enum": [ + "LEFT", + "RIGHT" + ], + "type": "string" + }, + "start_line": { + "description": "Starting line number for multi-line comments. When set, the comment spans from start_line to line. Omit for single-line comments.", + "type": [ + "number", + "string" + ] + } + }, + "required": [ + "path", + "line", + "body" + ], + "type": "object" + }, + "name": "create_pull_request_review_comment" + }, + { + "description": "Submit a pull request review with a status decision. All create_pull_request_review_comment outputs are automatically collected and included as inline comments in this review. Use APPROVE to approve the PR, REQUEST_CHANGES to request changes, or COMMENT for general feedback without a decision. If you don't call this tool, review comments are still submitted as a COMMENT review. CONSTRAINTS: Maximum 1 review(s) can be submitted.", + "inputSchema": { + "additionalProperties": false, + "properties": { + "body": { + "description": "Overall review summary in Markdown. Provide a high-level assessment of the changes. Required for REQUEST_CHANGES; optional for APPROVE and COMMENT.", + "type": "string" + }, + "event": { + "description": "Review decision: APPROVE to approve the pull request, REQUEST_CHANGES to formally request changes before merging, or COMMENT for general feedback without a formal decision. Defaults to COMMENT when omitted.", + "enum": [ + "APPROVE", + "REQUEST_CHANGES", + "COMMENT" + ], + "type": "string" + } + }, + "type": "object" + }, + "name": "submit_pull_request_review" + }, + { + "description": "Add reviewers to a GitHub pull request. Reviewers receive notifications and can approve or request changes. Use 'copilot' as a reviewer name to request the Copilot PR review bot. CONSTRAINTS: Maximum 1 reviewer(s) can be added.", + "inputSchema": { + "additionalProperties": false, + "properties": { + "pull_request_number": { + "description": "Pull request number to add reviewers to. This is the numeric ID from the GitHub URL (e.g., 876 in github.com/owner/repo/pull/876). If omitted, adds reviewers to the PR that triggered this workflow.", + "type": [ + "number", + "string" + ] + }, + "reviewers": { + "description": "GitHub usernames to add as reviewers (e.g., ['octocat', 'copilot']). Users must have access to the repository.", + "items": { + "type": "string" + }, + "type": "array" + } + }, + "required": [ + "reviewers" + ], + "type": "object" + }, + "name": "add_reviewer" + }, + { + "description": "Report that a tool or capability needed to complete the task is not available, or share any information you deem important about missing functionality or limitations. Use this when you cannot accomplish what was requested because the required functionality is missing or access is restricted.", + "inputSchema": { + "additionalProperties": false, + "properties": { + "alternatives": { + "description": "Any workarounds, manual steps, or alternative approaches the user could take (max 256 characters).", + "type": "string" + }, + "reason": { + "description": "Explanation of why this tool is needed or what information you want to share about the limitation (max 256 characters).", + "type": "string" + }, + "tool": { + "description": "Optional: Name or description of the missing tool or capability (max 128 characters). Be specific about what functionality is needed.", + "type": "string" + } + }, + "required": [ + "reason" + ], + "type": "object" + }, + "name": "missing_tool" + }, + { + "description": "Log a transparency message when no significant actions are needed. Use this to confirm workflow completion and provide visibility when analysis is complete but no changes or outputs are required (e.g., 'No issues found', 'All checks passed'). This ensures the workflow produces human-visible output even when no other actions are taken.", + "inputSchema": { + "additionalProperties": false, + "properties": { + "message": { + "description": "Status or completion message to log. Should explain what was analyzed and the outcome (e.g., 'Code review complete - no issues found', 'Analysis complete - all tests passing').", + "type": "string" + } + }, + "required": [ + "message" + ], + "type": "object" + }, + "name": "noop" + }, + { + "description": "Report that data or information needed to complete the task is not available. Use this when you cannot accomplish what was requested because required data, context, or information is missing.", + "inputSchema": { + "additionalProperties": false, + "properties": { + "alternatives": { + "description": "Any workarounds, manual steps, or alternative approaches the user could take (max 256 characters).", + "type": "string" + }, + "context": { + "description": "Additional context about the missing data or where it should come from (max 256 characters).", + "type": "string" + }, + "data_type": { + "description": "Type or description of the missing data or information (max 128 characters). Be specific about what data is needed.", + "type": "string" + }, + "reason": { + "description": "Explanation of why this data is needed to complete the task (max 256 characters).", + "type": "string" + } + }, + "required": [], + "type": "object" + }, + "name": "missing_data" + } + ] + GH_AW_SAFE_OUTPUTS_TOOLS_EOF + cat > /opt/gh-aw/safeoutputs/validation.json << 'GH_AW_SAFE_OUTPUTS_VALIDATION_EOF' + { + "add_reviewer": { + "defaultMax": 3, + "fields": { + "pull_request_number": { + "issueOrPRNumber": true + }, + "reviewers": { + "required": true, + "type": "array", + "itemType": "string", + "itemSanitize": true, + "itemMaxLength": 39 + } + } + }, + "create_pull_request_review_comment": { + "defaultMax": 1, + "fields": { + "body": { + "required": true, + "type": "string", + "sanitize": true, + "maxLength": 65000 + }, + "line": { + "required": true, + "positiveInteger": true + }, + "path": { + "required": true, + "type": "string" + }, + "side": { + "type": "string", + "enum": [ + "LEFT", + "RIGHT" + ] + }, + "start_line": { + "optionalPositiveInteger": true + } + }, + "customValidation": "startLineLessOrEqualLine" + }, + "missing_tool": { + "defaultMax": 20, + "fields": { + "alternatives": { + "type": "string", + "sanitize": true, + "maxLength": 512 + }, + "reason": { + "required": true, + "type": "string", + "sanitize": true, + "maxLength": 256 + }, + "tool": { + "type": "string", + "sanitize": true, + "maxLength": 128 + } + } + }, + "noop": { + "defaultMax": 1, + "fields": { + "message": { + "required": true, + "type": "string", + "sanitize": true, + "maxLength": 65000 + } + } + }, + "submit_pull_request_review": { + "defaultMax": 1, + "fields": { + "body": { + "type": "string", + "sanitize": true, + "maxLength": 65000 + }, + "event": { + "type": "string", + "enum": [ + "APPROVE", + "REQUEST_CHANGES", + "COMMENT" + ] + } + } + } + } + GH_AW_SAFE_OUTPUTS_VALIDATION_EOF + - name: Generate Safe Outputs MCP Server Config + id: safe-outputs-config + run: | + # Generate a secure random API key (360 bits of entropy, 40+ chars) + # Mask immediately to prevent timing vulnerabilities + API_KEY=$(openssl rand -base64 45 | tr -d '/+=') + echo "::add-mask::${API_KEY}" + + PORT=3001 + + # Set outputs for next steps + { + echo "safe_outputs_api_key=${API_KEY}" + echo "safe_outputs_port=${PORT}" + } >> "$GITHUB_OUTPUT" + + echo "Safe Outputs MCP server will run on port ${PORT}" + + - name: Start Safe Outputs MCP HTTP Server + id: safe-outputs-start + env: + DEBUG: '*' + GH_AW_SAFE_OUTPUTS_PORT: ${{ steps.safe-outputs-config.outputs.safe_outputs_port }} + GH_AW_SAFE_OUTPUTS_API_KEY: ${{ steps.safe-outputs-config.outputs.safe_outputs_api_key }} + GH_AW_SAFE_OUTPUTS_TOOLS_PATH: /opt/gh-aw/safeoutputs/tools.json + GH_AW_SAFE_OUTPUTS_CONFIG_PATH: /opt/gh-aw/safeoutputs/config.json + GH_AW_MCP_LOG_DIR: /tmp/gh-aw/mcp-logs/safeoutputs + run: | + # Environment variables are set above to prevent template injection + export DEBUG + export GH_AW_SAFE_OUTPUTS_PORT + export GH_AW_SAFE_OUTPUTS_API_KEY + export GH_AW_SAFE_OUTPUTS_TOOLS_PATH + export GH_AW_SAFE_OUTPUTS_CONFIG_PATH + export GH_AW_MCP_LOG_DIR + + bash /opt/gh-aw/actions/start_safe_outputs_server.sh + + - name: Start MCP Gateway + id: start-mcp-gateway + env: + GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }} + GH_AW_SAFE_OUTPUTS_API_KEY: ${{ steps.safe-outputs-start.outputs.api_key }} + GH_AW_SAFE_OUTPUTS_PORT: ${{ steps.safe-outputs-start.outputs.port }} + GITHUB_MCP_LOCKDOWN: ${{ steps.determine-automatic-lockdown.outputs.lockdown == 'true' && '1' || '0' }} + GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + run: | + set -eo pipefail + mkdir -p /tmp/gh-aw/mcp-config + + # Export gateway environment variables for MCP config and gateway script + export MCP_GATEWAY_PORT="80" + export MCP_GATEWAY_DOMAIN="host.docker.internal" + MCP_GATEWAY_API_KEY=$(openssl rand -base64 45 | tr -d '/+=') + echo "::add-mask::${MCP_GATEWAY_API_KEY}" + export MCP_GATEWAY_API_KEY + export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" + mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export DEBUG="*" + + export GH_AW_ENGINE="copilot" + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.4' + + mkdir -p /home/runner/.copilot + cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh + { + "mcpServers": { + "github": { + "type": "stdio", + "container": "ghcr.io/github/github-mcp-server:v0.30.3", + "env": { + "GITHUB_LOCKDOWN_MODE": "$GITHUB_MCP_LOCKDOWN", + "GITHUB_PERSONAL_ACCESS_TOKEN": "\${GITHUB_MCP_SERVER_TOKEN}", + "GITHUB_READ_ONLY": "1", + "GITHUB_TOOLSETS": "repos,pull_requests" + } + }, + "safeoutputs": { + "type": "http", + "url": "http://host.docker.internal:$GH_AW_SAFE_OUTPUTS_PORT", + "headers": { + "Authorization": "\${GH_AW_SAFE_OUTPUTS_API_KEY}" + } + } + }, + "gateway": { + "port": $MCP_GATEWAY_PORT, + "domain": "${MCP_GATEWAY_DOMAIN}", + "apiKey": "${MCP_GATEWAY_API_KEY}", + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + } + } + GH_AW_MCP_CONFIG_EOF + - name: Generate workflow overview + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + with: + script: | + const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); + await generateWorkflowOverview(core); + - name: Download prompt artifact + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + with: + name: prompt + path: /tmp/gh-aw/aw-prompts + - name: Clean git credentials + run: bash /opt/gh-aw/actions/clean_git_credentials.sh + - name: Execute GitHub Copilot CLI + id: agentic_execution + # Copilot CLI tool arguments (sorted): + timeout-minutes: 20 + run: | + set -o pipefail + sudo -E awf --env-all --container-workdir "${GITHUB_WORKSPACE}" --allow-domains api.business.githubcopilot.com,api.enterprise.githubcopilot.com,api.github.com,api.githubcopilot.com,api.individual.githubcopilot.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,github.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,ppa.launchpad.net,raw.githubusercontent.com,registry.npmjs.org,s.symcb.com,s.symcd.com,security.ubuntu.com,telemetry.enterprise.githubcopilot.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --enable-host-access --image-tag 0.20.0 --skip-pull --enable-api-proxy \ + -- /bin/bash -c '/usr/local/bin/copilot --add-dir /tmp/gh-aw/ --log-level all --log-dir /tmp/gh-aw/sandbox/agent/logs/ --add-dir "${GITHUB_WORKSPACE}" --disable-builtin-mcps --allow-all-tools --allow-all-paths --share /tmp/gh-aw/sandbox/agent/logs/conversation.md --prompt "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"${GH_AW_MODEL_AGENT_COPILOT:+ --model "$GH_AW_MODEL_AGENT_COPILOT"}' 2>&1 | tee -a /tmp/gh-aw/agent-stdio.log + env: + COPILOT_AGENT_RUNNER_TYPE: STANDALONE + COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }} + GH_AW_MCP_CONFIG: /home/runner/.copilot/mcp-config.json + GH_AW_MODEL_AGENT_COPILOT: ${{ vars.GH_AW_MODEL_AGENT_COPILOT || '' }} + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }} + GITHUB_HEAD_REF: ${{ github.head_ref }} + GITHUB_REF_NAME: ${{ github.ref_name }} + GITHUB_STEP_SUMMARY: ${{ env.GITHUB_STEP_SUMMARY }} + GITHUB_WORKSPACE: ${{ github.workspace }} + XDG_CONFIG_HOME: /home/runner + - name: Configure Git credentials + env: + REPO_NAME: ${{ github.repository }} + SERVER_URL: ${{ github.server_url }} + run: | + git config --global user.email "github-actions[bot]@users.noreply.github.com" + git config --global user.name "github-actions[bot]" + # Re-authenticate git with GitHub token + SERVER_URL_STRIPPED="${SERVER_URL#https://}" + git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git" + echo "Git configured with standard GitHub Actions identity" + - name: Copy Copilot session state files to logs + if: always() + continue-on-error: true + run: | + # Copy Copilot session state files to logs folder for artifact collection + # This ensures they are in /tmp/gh-aw/ where secret redaction can scan them + SESSION_STATE_DIR="$HOME/.copilot/session-state" + LOGS_DIR="/tmp/gh-aw/sandbox/agent/logs" + + if [ -d "$SESSION_STATE_DIR" ]; then + echo "Copying Copilot session state files from $SESSION_STATE_DIR to $LOGS_DIR" + mkdir -p "$LOGS_DIR" + cp -v "$SESSION_STATE_DIR"/*.jsonl "$LOGS_DIR/" 2>/dev/null || true + echo "Session state files copied successfully" + else + echo "No session-state directory found at $SESSION_STATE_DIR" + fi + - name: Stop MCP Gateway + if: always() + continue-on-error: true + env: + MCP_GATEWAY_PORT: ${{ steps.start-mcp-gateway.outputs.gateway-port }} + MCP_GATEWAY_API_KEY: ${{ steps.start-mcp-gateway.outputs.gateway-api-key }} + GATEWAY_PID: ${{ steps.start-mcp-gateway.outputs.gateway-pid }} + run: | + bash /opt/gh-aw/actions/stop_mcp_gateway.sh "$GATEWAY_PID" + - name: Redact secrets in logs + if: always() + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/redact_secrets.cjs'); + await main(); + env: + GH_AW_SECRET_NAMES: 'COPILOT_GITHUB_TOKEN,GH_AW_GITHUB_MCP_SERVER_TOKEN,GH_AW_GITHUB_TOKEN,GITHUB_TOKEN' + SECRET_COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }} + SECRET_GH_AW_GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN }} + SECRET_GH_AW_GITHUB_TOKEN: ${{ secrets.GH_AW_GITHUB_TOKEN }} + SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Upload Safe Outputs + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + with: + name: safe-output + path: ${{ env.GH_AW_SAFE_OUTPUTS }} + if-no-files-found: warn + - name: Ingest agent output + id: collect_output + if: always() + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }} + GH_AW_ALLOWED_DOMAINS: "api.business.githubcopilot.com,api.enterprise.githubcopilot.com,api.github.com,api.githubcopilot.com,api.individual.githubcopilot.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,github.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,ppa.launchpad.net,raw.githubusercontent.com,registry.npmjs.org,s.symcb.com,s.symcd.com,security.ubuntu.com,telemetry.enterprise.githubcopilot.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com" + GITHUB_SERVER_URL: ${{ github.server_url }} + GITHUB_API_URL: ${{ github.api_url }} + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/collect_ndjson_output.cjs'); + await main(); + - name: Upload sanitized agent output + if: always() && env.GH_AW_AGENT_OUTPUT + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + with: + name: agent-output + path: ${{ env.GH_AW_AGENT_OUTPUT }} + if-no-files-found: warn + - name: Upload engine output files + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + with: + name: agent_outputs + path: | + /tmp/gh-aw/sandbox/agent/logs/ + /tmp/gh-aw/redacted-urls.log + if-no-files-found: ignore + - name: Parse agent logs for step summary + if: always() + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_AGENT_OUTPUT: /tmp/gh-aw/sandbox/agent/logs/ + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/parse_copilot_log.cjs'); + await main(); + - name: Parse MCP Gateway logs for step summary + if: always() + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/parse_mcp_gateway_log.cjs'); + await main(); + - name: Print firewall logs + if: always() + continue-on-error: true + env: + AWF_LOGS_DIR: /tmp/gh-aw/sandbox/firewall/logs + run: | + # Fix permissions on firewall logs so they can be uploaded as artifacts + # AWF runs with sudo, creating files owned by root + sudo chmod -R a+r /tmp/gh-aw/sandbox/firewall/logs 2>/dev/null || true + # Only run awf logs summary if awf command exists (it may not be installed if workflow failed before install step) + if command -v awf &> /dev/null; then + awf logs summary | tee -a "$GITHUB_STEP_SUMMARY" + else + echo 'AWF binary not installed, skipping firewall log summary' + fi + - name: Upload agent artifacts + if: always() + continue-on-error: true + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + with: + name: agent-artifacts + path: | + /tmp/gh-aw/aw-prompts/prompt.txt + /tmp/gh-aw/aw_info.json + /tmp/gh-aw/mcp-logs/ + /tmp/gh-aw/sandbox/firewall/logs/ + /tmp/gh-aw/agent-stdio.log + /tmp/gh-aw/agent/ + if-no-files-found: ignore + + conclusion: + needs: + - activation + - agent + - detection + - safe_outputs + if: (always()) && (needs.agent.result != 'skipped') + runs-on: ubuntu-slim + permissions: + contents: read + pull-requests: write + outputs: + noop_message: ${{ steps.noop.outputs.noop_message }} + tools_reported: ${{ steps.missing_tool.outputs.tools_reported }} + total_count: ${{ steps.missing_tool.outputs.total_count }} + steps: + - name: Setup Scripts + uses: github/gh-aw/actions/setup@f88ec26c65cc20ebb8ceabe809c9153385945bfe # v0.46.0 + with: + destination: /opt/gh-aw/actions + - name: Download agent output artifact + continue-on-error: true + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + with: + name: agent-output + path: /tmp/gh-aw/safeoutputs/ + - name: Setup agent output environment variable + run: | + mkdir -p /tmp/gh-aw/safeoutputs/ + find "/tmp/gh-aw/safeoutputs/" -type f -print + echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV" + - name: Process No-Op Messages + id: noop + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }} + GH_AW_NOOP_MAX: 1 + GH_AW_WORKFLOW_NAME: "Security Review" + with: + github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/noop.cjs'); + await main(); + - name: Record Missing Tool + id: missing_tool + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }} + GH_AW_WORKFLOW_NAME: "Security Review" + with: + github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/missing_tool.cjs'); + await main(); + - name: Handle Agent Failure + id: handle_agent_failure + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }} + GH_AW_WORKFLOW_NAME: "Security Review" + GH_AW_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + GH_AW_AGENT_CONCLUSION: ${{ needs.agent.result }} + GH_AW_WORKFLOW_ID: "security-review" + GH_AW_SECRET_VERIFICATION_RESULT: ${{ needs.agent.outputs.secret_verification_result }} + GH_AW_CHECKOUT_PR_SUCCESS: ${{ needs.agent.outputs.checkout_pr_success }} + with: + github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/handle_agent_failure.cjs'); + await main(); + - name: Handle No-Op Message + id: handle_noop_message + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }} + GH_AW_WORKFLOW_NAME: "Security Review" + GH_AW_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + GH_AW_AGENT_CONCLUSION: ${{ needs.agent.result }} + GH_AW_NOOP_MESSAGE: ${{ steps.noop.outputs.noop_message }} + GH_AW_NOOP_REPORT_AS_ISSUE: "true" + with: + github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/handle_noop_message.cjs'); + await main(); + + detection: + needs: agent + if: needs.agent.outputs.output_types != '' || needs.agent.outputs.has_patch == 'true' + runs-on: ubuntu-latest + permissions: {} + timeout-minutes: 10 + outputs: + success: ${{ steps.parse_results.outputs.success }} + steps: + - name: Setup Scripts + uses: github/gh-aw/actions/setup@f88ec26c65cc20ebb8ceabe809c9153385945bfe # v0.46.0 + with: + destination: /opt/gh-aw/actions + - name: Download agent artifacts + continue-on-error: true + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + with: + name: agent-artifacts + path: /tmp/gh-aw/threat-detection/ + - name: Download agent output artifact + continue-on-error: true + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + with: + name: agent-output + path: /tmp/gh-aw/threat-detection/ + - name: Echo agent output types + env: + AGENT_OUTPUT_TYPES: ${{ needs.agent.outputs.output_types }} + run: | + echo "Agent output-types: $AGENT_OUTPUT_TYPES" + - name: Setup threat detection + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + WORKFLOW_NAME: "Security Review" + WORKFLOW_DESCRIPTION: "Automated security review for pull requests. Analyzes changed files against 15 security posture categories and posts inline review comments on findings, then requests Copilot code review." + HAS_PATCH: ${{ needs.agent.outputs.has_patch }} + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/setup_threat_detection.cjs'); + await main(); + - name: Ensure threat-detection directory and log + run: | + mkdir -p /tmp/gh-aw/threat-detection + touch /tmp/gh-aw/threat-detection/detection.log + - name: Validate COPILOT_GITHUB_TOKEN secret + id: validate-secret + run: /opt/gh-aw/actions/validate_multi_secret.sh COPILOT_GITHUB_TOKEN 'GitHub Copilot CLI' https://github.github.com/gh-aw/reference/engines/#github-copilot-default + env: + COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }} + - name: Install GitHub Copilot CLI + run: /opt/gh-aw/actions/install_copilot_cli.sh 0.0.410 + - name: Execute GitHub Copilot CLI + id: agentic_execution + # Copilot CLI tool arguments (sorted): + # --allow-tool shell(cat) + # --allow-tool shell(grep) + # --allow-tool shell(head) + # --allow-tool shell(jq) + # --allow-tool shell(ls) + # --allow-tool shell(tail) + # --allow-tool shell(wc) + timeout-minutes: 20 + run: | + set -o pipefail + COPILOT_CLI_INSTRUCTION="$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" + mkdir -p /tmp/ + mkdir -p /tmp/gh-aw/ + mkdir -p /tmp/gh-aw/agent/ + mkdir -p /tmp/gh-aw/sandbox/agent/logs/ + copilot --add-dir /tmp/ --add-dir /tmp/gh-aw/ --add-dir /tmp/gh-aw/agent/ --log-level all --log-dir /tmp/gh-aw/sandbox/agent/logs/ --disable-builtin-mcps --allow-tool 'shell(cat)' --allow-tool 'shell(grep)' --allow-tool 'shell(head)' --allow-tool 'shell(jq)' --allow-tool 'shell(ls)' --allow-tool 'shell(tail)' --allow-tool 'shell(wc)' --share /tmp/gh-aw/sandbox/agent/logs/conversation.md --prompt "$COPILOT_CLI_INSTRUCTION"${GH_AW_MODEL_DETECTION_COPILOT:+ --model "$GH_AW_MODEL_DETECTION_COPILOT"} 2>&1 | tee /tmp/gh-aw/threat-detection/detection.log + env: + COPILOT_AGENT_RUNNER_TYPE: STANDALONE + COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }} + GH_AW_MODEL_DETECTION_COPILOT: ${{ vars.GH_AW_MODEL_DETECTION_COPILOT || '' }} + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + GITHUB_HEAD_REF: ${{ github.head_ref }} + GITHUB_REF_NAME: ${{ github.ref_name }} + GITHUB_STEP_SUMMARY: ${{ env.GITHUB_STEP_SUMMARY }} + GITHUB_WORKSPACE: ${{ github.workspace }} + XDG_CONFIG_HOME: /home/runner + - name: Parse threat detection results + id: parse_results + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/parse_threat_detection_results.cjs'); + await main(); + - name: Upload threat detection log + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + with: + name: threat-detection.log + path: /tmp/gh-aw/threat-detection/detection.log + if-no-files-found: ignore + + pre_activation: + if: (github.event_name != 'pull_request') || (github.event.pull_request.head.repo.id == github.repository_id) + runs-on: ubuntu-slim + outputs: + activated: ${{ steps.check_membership.outputs.is_team_member == 'true' }} + steps: + - name: Setup Scripts + uses: github/gh-aw/actions/setup@f88ec26c65cc20ebb8ceabe809c9153385945bfe # v0.46.0 + with: + destination: /opt/gh-aw/actions + - name: Check team membership for workflow + id: check_membership + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_REQUIRED_ROLES: admin,maintainer,write + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/check_membership.cjs'); + await main(); + + safe_outputs: + needs: + - agent + - detection + if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (needs.detection.outputs.success == 'true') + runs-on: ubuntu-slim + permissions: + contents: read + pull-requests: write + timeout-minutes: 15 + env: + GH_AW_ENGINE_ID: "copilot" + GH_AW_WORKFLOW_ID: "security-review" + GH_AW_WORKFLOW_NAME: "Security Review" + outputs: + add_reviewer_reviewers_added: ${{ steps.process_safe_outputs.outputs.reviewers_added }} + create_discussion_error_count: ${{ steps.process_safe_outputs.outputs.create_discussion_error_count }} + create_discussion_errors: ${{ steps.process_safe_outputs.outputs.create_discussion_errors }} + process_safe_outputs_processed_count: ${{ steps.process_safe_outputs.outputs.processed_count }} + process_safe_outputs_temporary_id_map: ${{ steps.process_safe_outputs.outputs.temporary_id_map }} + steps: + - name: Setup Scripts + uses: github/gh-aw/actions/setup@f88ec26c65cc20ebb8ceabe809c9153385945bfe # v0.46.0 + with: + destination: /opt/gh-aw/actions + - name: Download agent output artifact + continue-on-error: true + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + with: + name: agent-output + path: /tmp/gh-aw/safeoutputs/ + - name: Setup agent output environment variable + run: | + mkdir -p /tmp/gh-aw/safeoutputs/ + find "/tmp/gh-aw/safeoutputs/" -type f -print + echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV" + - name: Process Safe Outputs + id: process_safe_outputs + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }} + GH_AW_SAFE_OUTPUTS_HANDLER_CONFIG: "{\"create_pull_request_review_comment\":{\"max\":20,\"side\":\"RIGHT\"},\"missing_data\":{},\"missing_tool\":{},\"submit_pull_request_review\":{\"footer\":\"if-body\",\"max\":1}}" + with: + github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/safe_output_handler_manager.cjs'); + await main(); diff --git a/.github/workflows/security-review.md b/.github/workflows/security-review.md new file mode 100644 index 0000000..26dfdec --- /dev/null +++ b/.github/workflows/security-review.md @@ -0,0 +1,78 @@ +--- +description: > + Automated security review for pull requests. Analyzes changed files against + 15 security posture categories and posts inline review comments on findings, + then requests Copilot code review. + +on: + pull_request: + types: [opened, synchronize] + +imports: + - ../agents/security-reviewer.agent.md + +permissions: + contents: read + pull-requests: read + +tools: + github: + toolsets: [repos, pull_requests] + +safe-outputs: + create-pull-request-review-comment: + max: 20 + submit-pull-request-review: + max: 1 + footer: "if-body" + add-reviewer: + reviewers: [copilot] + max: 1 +--- + +# Security Review + +Review the code changes in pull request +#${{ github.event.pull_request.number }} using the imported security review +agent instructions. + +## Instructions + +1. **Fetch the pull request diff.** Read the pull request details and all + changed files for PR #${{ github.event.pull_request.number }}. + +2. **Review every changed file** against all 15 security posture categories + from the imported agent instructions. Focus only on the lines that were + added or modified in the diff — do not flag pre-existing code that was not + touched. + +3. **Post inline review comments** on specific code lines where you find + security issues. Each comment must include: + - The security category (e.g., "Input Validation", "Secrets") + - Severity: critical, high, medium, low, or informational + - A clear description of the issue and why it matters + - A concrete, actionable recommendation or code fix + +4. **Submit the review.** After posting all inline comments: + - If you found any **critical** or **high** severity issues, submit the + review with `REQUEST_CHANGES` and a summary body listing the top findings. + - If you found only **medium** or **low** issues, submit with `COMMENT` and + a brief summary. + - If no issues were found, submit with `COMMENT` and a body stating the + changes look secure. + +5. **Request Copilot review.** After submitting the security review, add + `copilot` as a reviewer on the pull request for an additional code quality + review. + +## Review Guidelines + +- **Only review changed lines.** Do not flag pre-existing issues in untouched + code. +- **Be specific and actionable.** Each finding must include a concrete fix. +- **Prioritize by severity.** Focus on critical and high issues first. +- **Use the project context.** This is a Python monorepo using Ruff, Pyright + strict mode, Bandit, and pytest. The project follows secure-by-default + principles documented in `CODING_STANDARDS.md`. +- **Do not produce false positives.** If you are unsure whether something is a + real issue, state your uncertainty and classify it as informational. diff --git a/.gitignore b/.gitignore index e2f6850..fbf2879 100644 --- a/.gitignore +++ b/.gitignore @@ -126,7 +126,6 @@ ipython_config.py # in the .venv directory. It is recommended not to include this directory in version control. .pixi .uv/ -uv.lock # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm __pypackages__/ diff --git a/CODING_STANDARDS.md b/CODING_STANDARDS.md index f73d4f1..b74f2bf 100644 --- a/CODING_STANDARDS.md +++ b/CODING_STANDARDS.md @@ -38,6 +38,10 @@ For methods implementing integration with external services, use descriptive pre Always respect the [KISS principle](https://en.wikipedia.org/wiki/KISS_principle). Write code that is easy to read and understand. Avoid clever tricks or complex constructs that may confuse future maintainers. +**Adopt a secure-by-default approach.** + +Treat all external inputs as untrusted. Validate and sanitize at the boundary before use (environment variables, configuration values, etc.). Never hardcode or log secrets, tokens, or credentials in source code; load them from secret stores or environment variables. Prefer the principle of least privilege — request only the permissions, scopes, and access levels that are strictly necessary. See [Security](#security) for detailed practices and tooling. + **Document intent, not mechanics.** Document non-obvious or surprising behavior with comments and docstrings. Explain the "why" behind decisions, not just the "what". Always update public-facing docs when behavior, configuration, or defaults change. @@ -48,16 +52,7 @@ When implementing features, consider edge cases and failure modes. Validate inpu **Treat configuration as a public contract.** -Treat configuration as part of the public contract: validate it, document defaults, and fail fast on invalid values. - -**Handle configuration and secrets safely.** - -Never hardcode secrets, tokens, credentials, or service endpoints in source code. -Load them from environment variables, secret stores, or configuration objects instead. - -Document all required configuration keys and environment variables in README files or public documentation. Provide defaults only when they are safe and explicitly documented. - -When logging, never emit secrets or sensitive data. Mask or strip confidential fields, and avoid logging full request or response payloads if they may contain secrets or personally identifiable information (PII). +Treat configuration as part of the public contract: validate it, document defaults, and fail fast on invalid values. Document all required configuration keys and environment variables in README files or public documentation. Provide defaults only when they are safe and explicitly documented. **Follow the Clean Code Essentials.** @@ -89,6 +84,23 @@ def create_user(email: str) -> User: over deeply nested `if/else` blocks that obscure the happy path. +For repeated validation concerns on public APIs, a helper or a decorator can keep the happy path pristine while enforcing the same policy: + +```python +@require_non_blank_strings("username", "email") +def create_user(username: str, email: str) -> User: + return User(username=username, email=email) +``` + +or + +```python +def create_user(username: str, email: str) -> User: + validate_string_is_not_blank(value=username, parameter_name="username") + validate_string_is_not_blank(value=email, parameter_name="email") + return User(username=username, email=email) +``` + **Avoid boolean mode flags.** Avoid boolean mode flags in APIs because they make call sites ambiguous and encourage feature creep. Feature creep is the gradual, unchecked addition of new capabilities or options to a function, method, or API, often without a clear need or cohesive design, increasing complexity and reducing clarity over time. @@ -122,6 +134,29 @@ Make error messages actionable and safe. Errors should explain what failed, incl For example, prefer `ValueError("unsupported region 'eu-west-9'; expected one of: ...")` over generic messages like "invalid input". +**Design focused exception classes.** + +When built-in exceptions (`ValueError`, `TypeError`, `KeyError`) are not precise enough, create project-specific exceptions that inherit from the closest standard base. Keep exception hierarchies shallow — one level of custom classes inheriting from builtins is usually sufficient. + +Each custom exception should: +- Inherit from the most appropriate built-in (`ValueError`, `TypeError`, etc.). +- Accept structured context in its constructor and format a clear message in `__init__`. +- Use `__slots__ = ()` to keep instances lightweight. +- Live in a dedicated `errors` module within the relevant package, re-exported from `__init__.py`. + +```python +class EmptyStringError(ValueError): + """Raised when a string is empty after trimming whitespace.""" + + __slots__ = () + + def __init__(self, parameter: str) -> None: + """Initialize the error with parameter context.""" + super().__init__(f"param '{parameter}' must be non-empty.") +``` + +Avoid deep hierarchies or generic "catch-all" base exceptions per agent unless there is a clear need for blanket handling. Callers should be able to catch the specific exception they care about without importing an entire tree. + ## Testing Conventions **Test behavior, not implementation.** @@ -180,7 +215,7 @@ We use [Ruff](https://github.com/astral-sh/ruff) for both linting and formatting **Rules enforced:** - See: [pyproject.toml](./pyproject.toml) `[tool.ruff.lint]` -- [Naming (N)](https://docs.astral.sh/ruff/rules/#pep8-naming-n) — all names follows [PEP8](https://www.python.org/dev/peps/pep-0008/) naming conventions (snake_case functions/vars, CapWords classes, UPPER_SNAKE constants). +- [Naming (N)](https://docs.astral.sh/ruff/rules/#pep8-naming-n) — all names follow [PEP8](https://www.python.org/dev/peps/pep-0008/) naming conventions (snake_case functions/vars, CapWords classes, UPPER_SNAKE constants). - Imports: - [I (isort)](https://docs.astral.sh/ruff/rules/#isort-i) — imports are sorted/grouped. - [ICN (import conventions)](https://docs.astral.sh/ruff/rules/#flake8-import-conventions-icn) — prefer consistent relative vs absolute imports and avoid pointless aliases. @@ -199,10 +234,10 @@ We use [Ruff](https://github.com/astral-sh/ruff) for both linting and formatting - [Pylint-style (PLC/PLE/PLR/PLW)](https://docs.astral.sh/ruff/rules/#pylint-pl) — [Pylint](https://pylint.pycqa.org/) checks for code smell, complexity, bad builtins, etc. - [Bug risks (B)](https://docs.astral.sh/ruff/rules/#flake8-bugbear-b) — checks common bug patterns and potential design issues with bug bear. -- [Datetime TZ (DTZ)](https://docs.astral.sh/ruff/rules/#flake8-datetimez-dtz) — datetime should always used with timezone. -- [Implicit string concat (ISC)](https://docs.astral.sh/ruff/rules/#flake8-implicit-str-concat-isc) — checks implicit explicit string concatenation issues. +- [Datetime TZ (DTZ)](https://docs.astral.sh/ruff/rules/#flake8-datetimez-dtz) — datetime should always be used with timezone. +- [Implicit string concat (ISC)](https://docs.astral.sh/ruff/rules/#flake8-implicit-str-concat-isc) — checks for implicit string concatenation issues. - [pygrep-hooks (PGH)](https://docs.astral.sh/ruff/rules/#pygrep-hooks-pgh) — checks `noqa` and `type: ignore` annotations. Checks also invalid mock access. -- [Pytest style (PT)](https://docs.astral.sh/ruff/rules/#flake8-pytest-style-pt) — checks commom issues and inconsistencies in pytest-based tests. +- [Pytest style (PT)](https://docs.astral.sh/ruff/rules/#flake8-pytest-style-pt) — checks common issues and inconsistencies in pytest-based tests. - [Security (S)](https://docs.astral.sh/ruff/rules/#flake8-bandit-s) — use [Bandit](https://bandit.readthedocs.io/en/latest/) to find some security issues. - [TODO/FIXME hygiene (TD)](https://docs.astral.sh/ruff/rules/#td-flake8-todos) — ensures TODO are properly formatted and linked to an issue. - [FIX](https://docs.astral.sh/ruff/rules/#flake8-fixme-fix) — flags FIX/FIXME/XXX/TODO comment patterns. @@ -289,6 +324,34 @@ Keep imports sorted by Ruff, which groups them as standard library, third-party, Re-export public symbols in each agent's `__init__.py` to provide a clean public API, and keep internal modules private. Watch out for circular imports; when they occur, move shared types or utilities into dedicated modules to break the cycle. +## Type Annotations + +**Always annotate public APIs.** + +All public functions, methods, and class attributes must have complete type annotations, including return types. Private helpers should also be annotated when it improves clarity. Both Pyright (strict) and Mypy (strict) run in the quality gate, so all annotations must satisfy both checkers. + +**Use `from __future__ import annotations`.** + +Include `from __future__ import annotations` at the top of every module. This enables PEP 604 union syntax (`X | Y`) on Python 3.10+ and defers annotation evaluation, avoiding forward-reference issues. + +**Prefer modern union syntax.** + +Use `X | Y` instead of `Union[X, Y]`, and `X | None` instead of `Optional[X]`. The modern syntax is more readable and consistent with how Python is evolving. + +```python +# ✅ Preferred +def find_user(user_id: str) -> User | None: + ... + +# ❌ Avoid +def find_user(user_id: str) -> Optional[User]: + ... +``` + +**Type narrowing and casts.** + +Prefer `isinstance()` checks or sentinel patterns for type narrowing over `typing.cast()`. Use `cast()` only when the type checker cannot infer the correct type and you are certain of the runtime type. + ## Package Structure and Public API See [DEVELOPMENT.md](DEVELOPMENT.md) for the full monorepo layout and development guide. @@ -333,9 +396,35 @@ Never block the event loop with CPU-bound work or blocking I/O. Offload such wor Prefer `asyncio.TaskGroup` or `gather` with `return_exceptions=False` unless you have a clear strategy for error aggregation. -## Subprocesses and External Calls +## Security + +This section details the practices that support the [secure-by-default approach](#general-coding-conventions) established in the general conventions. + +### Input Validation and Sanitization + +Validate all external inputs at the boundary — user data, configuration values, environment variables, webhook payloads, queue messages, and model/tool outputs. Use guard clauses to reject invalid or unexpected values early with clear error messages. Never pass unsanitized inputs to subprocesses, shell commands, SQL queries, or external API calls. -When calling subprocesses, avoid `shell=True` to prevent shell injection vulnerabilities. Instead, pass argument lists directly to `subprocess` functions: +```python +# ✅ Validate external input at the boundary +if event_type not in ALLOWED_EVENTS: + raise UnsupportedEventError(event_type, expected=ALLOWED_EVENTS) +``` + +### Secrets and Credentials + +Never hardcode secrets, tokens, API keys, or credentials in source code, tests, or configuration files. Load them from secret stores or environment variables. Never include secrets in logs, exception messages, or error output. + +```python +# ✅ Load secrets from environment or secret stores +api_key = os.environ["API_KEY"] + +# ❌ Never hardcode secrets +API_KEY = "sk-abc123..." # exposed in source control +``` + +### Subprocess Safety + +When calling subprocesses, avoid `shell=True` to prevent shell injection. Pass argument lists directly: ```python # ✅ Preferred @@ -345,7 +434,65 @@ subprocess.run(["git", "status"], check=True) subprocess.run("git status", shell=True) ``` -Validate any untrusted inputs (user-provided data, config/env values, webhooks, queue messages, tool or model outputs) that influence subprocess arguments or external API requests. Set timeouts on network and subprocess calls where feasible to avoid unbounded waits that can hang your application. +Validate any inputs that influence subprocess arguments. Set timeouts on subprocess calls to avoid unbounded waits. + +### Network and HTTP + +Prefer HTTPS for all network communication. Set timeouts and configure retries on HTTP clients. Avoid transmitting sensitive data over plain HTTP. + +### Logging Hygiene + +Never log secrets, tokens, credentials, or PII. Redact sensitive fields before logging. See the [Logging](#logging) section for formatting and level conventions. + +### Dependency Management + +Keep `uv.lock` in sync with `pyproject.toml`; fail builds on drift. Review dependency updates for security implications before merging. + +### Automated Security Tooling + +The project uses multiple layers of automated security scanning: + +- **[Bandit](https://bandit.readthedocs.io/)** runs as part of the quality gate (`uv run poe check`) and via Ruff rule [S](https://docs.astral.sh/ruff/rules/#flake8-bandit-s), performing static analysis to find common security issues in Python code (hardcoded passwords, use of `exec`, insecure hash functions, etc.). +- **[CodeQL](https://codeql.github.com/)** runs on every push and pull request via GitHub Actions ([`.github/workflows/codeql-analysis.yml`](.github/workflows/codeql-analysis.yml)), performing deep semantic analysis to detect vulnerabilities such as injection flaws, path traversals, and insecure data flows. A weekly scheduled scan also runs against the default branch. +- **[Dependabot](https://docs.github.com/en/code-security/dependabot)** monitors Python (pip/uv) and GitHub Actions dependencies weekly, automatically opening pull requests when security updates or new versions are available. Configuration is in [`.github/dependabot.yml`](.github/dependabot.yml). + +### Security Review + +An automated [security review agentic workflow](.github/workflows/security-review.md) runs on every pull request, analyzing changed files against 15 security posture categories and posting inline review comments for any findings. After the security review completes, it automatically requests a Copilot code review for additional coverage. The full security checklist is defined in [`.github/agents/security-reviewer.agent.md`](.github/agents/security-reviewer.agent.md). + +## Logging + +Use the standard `logging` module instead of `print` (enforced by Ruff rule T20). Create one logger per module at module level: + +```python +import logging + +logger = logging.getLogger(__name__) +``` + +Use lazy formatting with `%`-style placeholders so arguments are only interpolated when the log level is enabled: + +```python +# ✅ Preferred — lazy interpolation +logger.info("Processing user %s", user_id) + +# ❌ Avoid — eager f-string evaluation +logger.info(f"Processing user {user_id}") +``` + +Choose log levels intentionally: +- `DEBUG` — detailed diagnostic information useful during development. +- `INFO` — confirmation that things are working as expected. +- `WARNING` — something unexpected happened but the application continues. +- `ERROR` — a failure that prevents a specific operation from completing. +- `CRITICAL` — a failure that may prevent the application from continuing. + +Guard expensive log construction behind a level check when the arguments themselves are costly to compute: + +```python +if logger.isEnabledFor(logging.DEBUG): + logger.debug("Payload details: %s", expensive_serialize(payload)) +``` ## Performance Considerations @@ -373,20 +520,6 @@ def parameters(self) -> dict[str, Any]: return self.input_model.model_json_schema() ``` -### Prefer Attribute Access Over isinstance() - -When checking types in hot paths, prefer checking a `type` attribute (fast string comparison) over `isinstance()` (slower due to method resolution order traversal): - -```python -# ✅ Preferred - type attribute comparison -if content.type == "function_call": - # handle function call - -# ❌ Avoid in hot paths - isinstance() is slower -if isinstance(content, FunctionCallContent): - # handle function call -``` - ### Avoid Redundant Serialization When the same data needs to be used in multiple places, compute it once and reuse it: diff --git a/DEVELOPMENT.md b/DEVELOPMENT.md index a35127f..5464a2e 100644 --- a/DEVELOPMENT.md +++ b/DEVELOPMENT.md @@ -124,7 +124,28 @@ This solution is a monorepo hosting multiple Python-based agents. Each agent is ``` Repo root -├─ .github/ # workflows, instructions, templates +├─ .github/ # GitHub configuration and automation +│ ├─ workflows/ # GitHub Actions workflows +│ │ ├─ checks.yml # lint, type-check, test on PRs and pushes +│ │ ├─ docs.yml # build Sphinx docs, deploy to GitHub Pages +│ │ ├─ release.yml # build and publish packages +│ │ ├─ codeql-analysis.yml # CodeQL security scanning +│ │ ├─ security-review.md # agentic workflow (security review) +│ │ └─ security-review.lock.yml # compiled agentic workflow (generated) +│ ├─ agents/ # Copilot custom agents (*.agent.md) +│ │ ├─ security-reviewer.agent.md # security review prompt +│ │ └─ agentic-workflows.agent.md # dispatcher agent (gh aw init) +│ ├─ instructions/ # Copilot custom instructions +│ │ ├─ python.instructions.md # Python coding conventions +│ │ ├─ agents.instructions.md # agent development guidelines +│ │ ├─ docs.instructions.md # documentation conventions +│ │ ├─ agentic-workflows.instructions.md # agentic workflow authoring +│ │ └─ copilot-agents.instructions.md # Copilot agent file format +│ ├─ ISSUE_TEMPLATE/ # issue templates (bug, feature) +│ ├─ pull_request_template.md # PR template +│ ├─ dependabot.yml # Dependabot config +│ ├─ copilot-instructions.md # global Copilot instructions +│ └─ aw/ # agentic workflow lock data (generated) ├─ agents/ │ └─ / │ ├─ src//agents// # agent code (entrypoint, core logic) @@ -153,7 +174,8 @@ Repo root ├─ LICENSE # root license ├─ pyproject.toml # root config, deps, tasks ├─ README.md # project overview -└─ shared_tasks.toml # shared Poe tasks +├─ shared_tasks.toml # shared Poe tasks (included by agents) +└─ dist/ # build output (all agents, gitignored) ``` ### Poe Tasks @@ -300,6 +322,60 @@ Run tests sequentially for each agent using the `test` task of the agent. uv run poe test ``` +#### Build and Publish + +This group of Poe tasks handles building and publishing agent packages. Each agent has its own version in its `pyproject.toml`, enabling independent release lifecycles. + +##### clean-dist + +Remove the `dist/` directory at the workspace root: + +```sh +uv run poe clean-dist +``` + +All agents' build artifacts land in the workspace root `dist/` directory, so a single clean removes everything. + +##### build + +Build all agent packages (cleans `dist/` first): + +```sh +uv run poe build +``` + +This is a sequence of: + +- [clean-dist](#clean-dist) +- `build-all-agents` (fans out `uv build` to every agent via `run_tasks_in_agents_if_exists.py`) + +##### build-changed + +Build only agents with changed files (cleans `dist/` first): + +```sh +uv run poe build-changed +``` + +This is a sequence of: + +- [clean-dist](#clean-dist) +- `build-changed-agents` (fans out `uv build` only to agents whose files changed, detected via `run_tasks_in_changed_agents.py`) + +This is used by the release workflow so that only agent(s) with bumped versions get built and published. + +##### publish + +Publish all packages in `dist/` to the configured package index: + +```sh +uv run poe publish +``` + +This runs a single `uv publish` from the workspace root, uploading everything in `dist/`. The target index is configured in `pyproject.toml` under `[tool.uv.index]` (defaults to GitHub Packages). + +Each agent has its own version; the registry rejects duplicate versions, so only agents whose version was bumped actually get uploaded. + ##### pre-commit-check Run partial checks for pre-commit: @@ -435,3 +511,53 @@ When working on an agent, follow these steps: - Run the agent using: `uv run [args]` from the agent directory or `uv run --package [args]` from the root. - Check that the agent is running properly and that the changes are working as expected. - Commit and push your changes in a feature branch and open a pull request for review. + +### Build, Publish, and Release + +Each agent is an independent package with its own version in its `pyproject.toml`, enabling independent SDLC lifecycles. See the [Build and Publish](#build-and-publish) Poe tasks section for detailed task descriptions. + +#### Release workflow + +The [release workflow](.github/workflows/release.yml) runs on GitHub release events and `workflow_dispatch`. It: + +1. Checks out the code and installs dependencies. +2. Runs `poe build-changed` to build only agents with changes. +3. Runs `poe publish` to upload the built packages to the configured registry. + +To release an agent: bump its version in `agents//pyproject.toml`, merge to main, and create a GitHub release. + +#### Changing the publish target + +By default, packages are published to GitHub Packages. To publish to a different registry (e.g., PyPI, a private Artifactory, or Azure Artifacts), update two places: + +1. **`pyproject.toml`** — update the `[[tool.uv.index]]` section at the bottom of the file: + + ```toml + [[tool.uv.index]] + name = "pypi" # or your registry name + url = "https://pypi.org/simple/" + publish-url = "https://upload.pypi.org/legacy/" + explicit = true + ``` + +2. **`.github/workflows/release.yml`** — update the environment variables in the publish step: + + ```yaml + - name: Publish to PyPI + env: + UV_PUBLISH_URL: https://upload.pypi.org/legacy/ + UV_PUBLISH_TOKEN: ${{ secrets.PYPI_TOKEN }} + run: uv run poe publish + ``` + + For PyPI, create an API token and store it as a repository secret (`PYPI_TOKEN`). For GitHub Packages, the built-in `GITHUB_TOKEN` is used automatically. + +### Documentation + +Documentation is built using Sphinx and published to GitHub Pages via the [docs workflow](.github/workflows/docs.yml). + +- Install docs deps: `uv run poe docs-install` +- Build locally: `uv run poe docs` +- The docs workflow triggers on pushes to `main` when documentation sources, agent source code, or the docs generation script change. + +> **Note:** `docs/generated/` and `agents/*/docs/generated/` are produced by CI; do not edit or commit them. diff --git a/README.md b/README.md index 6f22ee6..ddddb56 100644 --- a/README.md +++ b/README.md @@ -1,188 +1,567 @@ # python-agent-template -Security-first template for building and shipping multiple Python agents from one monorepo, derived from the Microsoft Agent Framework. It aims to give newcomers a ready-to-run, batteries-included starting point with guardrails (typing, linting, security, CI, releases) that you can adapt to your org’s standards. +A security-first monorepo template for building, testing, and shipping Python agents — or any AI-generated / "vibe coded" project that needs production-grade guardrails from day one. -> Disclaimer: This template is based on the Microsoft Agent Framework and provided for learning/acceleration. Evaluate and adapt to your organization’s security, compliance, and coding standards before production use. +Whether you are building LLM agents, automation bots, or any Python package, this template gives you a batteries-included starting point: strict typing, multi-layer security scanning, automated CI/CD, and a release pipeline — so you can focus on your code while the guardrails catch mistakes before they reach production. -## What’s inside and why -- **uv + poe**: fast installs and repeatable task runner (fmt/lint/types/tests/bandit). -- **Ruff, Pyright, MyPy, Bandit, PyTest, markdown code fence lint**: code quality and security guardrails. -- **Task fan-out scripts**: run tasks across all agents or only changed agents to keep CI fast. -- **Security automation**: CodeQL scanning and Dependabot for updates; good hygiene baseline. -- **Docs generation**: experimental py2docfx workflow (disabled by default) to emit docfx YAML. -- **Licensing**: each agent can ship its own LICENSE for package publication. +> **Disclaimer:** Derived from the [Microsoft Agent Framework](https://github.com/microsoft/agent-framework) for learning and acceleration. Evaluate and adapt to your organization's security, compliance, and coding standards before production use. -## Prerequisites -- Python 3.10–3.13 installed locally (3.13 default for `poe setup`). -- curl available to install uv, or install uv via your package manager: `curl -LsSf https://astral.sh/uv/install.sh | sh`. -- Git for cloning and hooks. +--- -## Getting started (root workspace) -1) Install uv: `curl -LsSf https://astral.sh/uv/install.sh | sh` -2) Install dev deps (workspace-wide): `uv run poe setup` -3) Run full quality gate: `uv run poe check` +## Why this template? -`poe setup` creates/refreshes `.venv`, installs all dev dependencies with uv, and installs pre-commit hooks so staged changes get checked automatically. +AI code assistants and vibe coding accelerate development but can introduce subtle bugs, security issues, and type errors. This template wraps every code change in **six layers of automated checks** — from your editor to production — so generated code gets the same scrutiny as hand-written code. -Local setup quickstart: clone the repo, ensure Python 3.10–3.13 is installed, run `uv run poe setup` to create/refresh `.venv` and install hooks, then `uv run poe check` to validate the workspace. For speed, `python scripts/run_tasks_in_changed_agents.py ` narrows lint/type/test to modified agents. +```mermaid +flowchart TB + subgraph L1["1. Editor"] + direction LR + E1[Pylance type checking] + E2[Ruff auto-format on save] + E3[Copilot custom instructions] + end + + subgraph L2["2. Pre-commit hooks"] + direction LR + H1[Ruff format + lint] + H2[MyPy scoped] + H3[Bandit security] + H4[Whitespace / EOF / config checks] + H5[Markdown fence lint] + H6[uv-lock sync] + end + + subgraph L3["3. CI - Quality gate"] + direction LR + C1["Ruff format + lint"] + C2[Pyright strict] + C3[MyPy strict] + C4[Bandit] + C5[PyTest + coverage] + C6[Markdown code lint] + end + + subgraph L4["4. CI - Security Scanning"] + direction LR + S1[CodeQL — SAST for Python + Actions] + S2[Dependabot — dependency updates] + S3[Copilot security review agent — 15 posture categories] + end + + subgraph L5["5. Copilot Review"] + direction LR + CR1[Copilot code review — assigned automatically] + CR2[AI-powered suggestions and comments] + end + + subgraph L6["6. Release"] + direction LR + R1[Build changed agents only] + R2[Publish to GitHub Packages] + end + + L1 --> L2 + L2 --> L3 + L3 --> L4 + L4 --> L5 + L5 --> L6 +``` + +Each layer catches different classes of issues: + +| Layer | When it runs | What it catches | +| --- | --- | --- | +| **Editor** | As you type | Type errors, formatting, AI-aware context via custom instructions | +| **Pre-commit** | On `git commit` (staged files) | Style drift, security anti-patterns, broken configs, stale lockfiles | +| **CI quality gate** | On PR / push | Full repo-wide type safety, test regressions, code quality | +| **CI security** | On PR / push / schedule | Dataflow vulnerabilities, outdated dependencies, security posture gaps | +| **Copilot Review** | On PR (after security scan) | AI-powered code review with suggestions and inline comments | +| **Release** | On GitHub release | Builds and publishes only changed agents to the package registry | + +--- + +## Getting started + +### Prerequisites + +- Python 3.10–3.13 (3.13 recommended). +- [uv](https://docs.astral.sh/uv/) for environment and dependency management. +- Git for version control and hooks. + +### Quick setup -## Tasks: check vs pre-commit -- `poe check` (full suite, repo-wide): - - Ruff format (`fmt`), Ruff lint (`lint`), Pyright, MyPy, Bandit, PyTest, Markdown code fence lint. - - Runs across all agents. Use for CI and pre-merge confidence; catches issues outside your current diff. -- Pre-commit hooks (fast, staged-only): - - Ruff format+lint, scoped MyPy, Bandit (via hook), trailing whitespace/EOF fixers, markdown fence checks on staged files. - - Purpose: keep diffs clean and reduce CI churn. Because it only sees staged files, it is fast but not a substitute for `poe check`. +```sh +# 1. Install uv +curl -LsSf https://astral.sh/uv/install.sh | sh -Why staged-only for pre-commit: speed and focus on what you are changing. Why still run full checks in CI: to catch regressions in untouched files, ensure type/safety coverage repo-wide, and validate tests end-to-end. +# 2. Clone and set up +git clone && cd +uv run poe setup + +# 3. Run the full quality gate +uv run poe check +``` + +`poe setup` creates `.venv/`, installs all dev dependencies, and installs pre-commit hooks. `poe check` runs the full quality gate (format, lint, type checks, security, tests, markdown lint) across the entire workspace. + +--- + +## Repository layout + +``` +Repo root +├─ .github/ # GitHub configuration and automation +│ ├─ workflows/ # GitHub Actions workflows +│ │ ├─ checks.yml # lint, type-check, test on PRs and pushes +│ │ ├─ docs.yml # build Sphinx docs, deploy to GitHub Pages +│ │ ├─ release.yml # build and publish packages +│ │ ├─ codeql-analysis.yml # CodeQL security scanning +│ │ ├─ security-review.md # agentic workflow (security review) +│ │ └─ security-review.lock.yml # compiled agentic workflow (generated) +│ ├─ agents/ # Copilot custom agents (*.agent.md) +│ │ ├─ security-reviewer.agent.md # security reviewer agent +│ │ └─ agentic-workflows.agent.md # dispatcher agent (gh aw init) +│ ├─ instructions/ # Copilot custom instructions +│ │ ├─ python.instructions.md # Python coding conventions +│ │ ├─ agents.instructions.md # agent development guidelines +│ │ ├─ docs.instructions.md # documentation conventions +│ │ ├─ agentic-workflows.instructions.md # agentic workflow authoring +│ │ └─ copilot-agents.instructions.md # Copilot agent file format +│ ├─ ISSUE_TEMPLATE/ # issue templates (bug, feature) +│ ├─ pull_request_template.md # PR template +│ ├─ dependabot.yml # Dependabot config +│ ├─ copilot-instructions.md # global Copilot instructions +│ └─ aw/ # agentic workflow lock data (generated) +├─ agents/ +│ └─ / +│ ├─ src//agents// # agent code +│ ├─ tests/ # agent tests +│ ├─ docs/source/ # Sphinx sources +│ ├─ Dockerfile # container image +│ ├─ pyproject.toml # agent config, deps, version +│ └─ LICENSE # agent-specific license +├─ docs/ # unified Sphinx sources + output +├─ scripts/ # shared helpers for tasks/CI +├─ pyproject.toml # root config, deps, poe tasks +├─ shared_tasks.toml # poe tasks shared by all agents +└─ .pre-commit-config.yaml # pre-commit hook definitions +``` -### Pre-commit details -- Install once per clone: `uv run poe pre-commit-install` (adds hooks to `.git/hooks`). -- Run manually on all files: `uv run pre-commit run --all-files` (useful before large refactors or in CI if desired). -- Hook set (from `.pre-commit-config.yaml`): Ruff format + Ruff lint, MyPy (scoped), Bandit, trailing-whitespace/EOF fixers, markdown fenced-code checker, config validators, uv lock refresher. -- If you must skip briefly, prefer `SKIP=hookname pre-commit run` instead of disabling globally. +### Scripts (`scripts/`) -### Task flow diagrams +| Script | Purpose | +| --- | --- | +| `run_tasks_in_agents_if_exists.py` | Fans out a Poe task (fmt, lint, build, ...) to every agent that defines it | +| `run_tasks_in_changed_agents.py` | Same, but only for agents with changed files — used by `build-changed` and CI | +| `check_md_code_blocks.py` | Validates Python code blocks in markdown files | +| `generate_docs.py` | Builds unified and per-agent Sphinx documentation | -Check (repo-wide `poe check`): +--- + +## Quality gates in detail + +### Poe tasks — your single entry point + +All quality checks, builds, and operations are accessed through [Poe the Poet](https://poethepoet.natn.io/) tasks. Run `uv run poe ` from the repo root. + +#### `poe check` — full quality gate + +Runs the complete quality pipeline sequentially. Use before pushing or merging. ```mermaid flowchart LR - A[poe check] --> B[Ruff format] - B --> C[Ruff lint] - C --> D[Pyright] - D --> E[MyPy] - E --> F[Bandit] - F --> G[PyTest + coverage] - G --> H[Markdown code fence lint] + A["poe check"] --> B["lock-verify"] + B --> C["Ruff format"] + C --> D["Ruff lint"] + D --> E["Pyright
(strict)"] + E --> F["MyPy
(strict)"] + F --> G["Bandit
(security)"] + G --> H["PyTest
(+ coverage)"] + H --> I["Markdown
code lint"] ``` -Pre-commit check task (`poe pre-commit-check`, staged-aware): +#### `poe pre-commit-check` — fast staged-only checks + +Runs a subset of checks scoped to staged files. Triggered automatically by pre-commit hooks. ```mermaid flowchart LR - P[poe pre-commit-check] --> P1[Ruff format] - P1 --> P2[Ruff lint] - P2 --> P3[Pyright staged] - P3 --> P4[Markdown code fence lint] + P["poe pre-commit-check
(staged files)"] --> P1["Ruff
format"] + P1 --> P2["Ruff
lint"] + P2 --> P3["Pyright
(staged)"] + P3 --> P4["Markdown
code lint
(staged)"] ``` -Pre-commit hook pipeline (on `git commit`): +#### `poe build` and `poe build-changed` — build pipeline ```mermaid flowchart LR - C[git commit] --> H1[pre-commit framework] - H1 --> H2[Whitespace/EOF/line endings] - H2 --> H3[Config checks YAML/TOML/JSON] - H3 --> H4[pyupgrade] - H4 --> H5[Ruff format + Ruff lint] - H5 --> H6[MyPy scoped] - H6 --> H7[Bandit] - H7 --> H8[Markdown fence check] - H8 --> H9[nbQA notebook parse] - H9 --> H10[uv-lock update if manifests change] + B1["poe build"] --> B2["clean-dist
(rm -rf dist/)"] + B2 --> B3["build-all-agents
(uv build per agent)"] + B3 --> B4["dist/
*.whl + *.tar.gz"] + + BC1["poe build-changed"] --> BC2["clean-dist
(rm -rf dist/)"] + BC2 --> BC3["build-changed-agents
(only modified agents)"] + BC3 --> B4 ``` -## Repository layout -- `agents/` — each agent as a package (e.g., `agent1/`). -- `scripts/` — task fan-out and helper scripts (e.g., run tasks across agents, check markdown code blocks). -- `.github/workflows/` — CI (checks, release, CodeQL) and automation. -- `.pre-commit-config.yaml` — local hook definitions. -- `pyproject.toml` — shared config for uv, ruff, mypy, pyright, bandit, poe tasks. -- `docs/` — output/placeholder; doc generation is experimental. - -Scripts explained (`scripts/`) -- `run_tasks_in_agents_if_exists.py`: runs a given task (fmt/lint/pyright/mypy/bandit/test) in every agent that defines it, so `poe check` can fan out safely even if some agents lack tasks. -- `run_tasks_in_changed_agents.py`: detects which agents changed relative to the target branch and runs the requested task only there; use for fast local/PR lint/type passes. -- `check_md_code_blocks.py`: validates fenced code blocks in README files; helps keep docs runnable. - -Task catalog (root `poe` tasks) -- `poe setup`: create/refresh `.venv`, install deps, install pre-commit hooks (uses `poe venv`, `install`, `pre-commit-install`). -- `poe venv`: `uv venv --clear --python `; default 3.13, override with `-p/--python`. -- `poe install`: `uv sync --all-extras --dev` (docs group excluded by default). -- `poe pre-commit-install`: install and refresh hooks. -- `poe fmt`: Ruff format. -- `poe lint`: Ruff lint. -- `poe pyright`: strict Pyright. -- `poe mypy`: strict MyPy. -- `poe bandit`: Bandit security scan (fans out to agents + scripts). -- `poe bandit-agents`: Bandit against all agents (fan-out via `run_tasks_in_agents_if_exists`). -- `poe bandit-scripts`: Bandit against the `scripts/` tree. -- `poe test`: PyTest + coverage. -- `poe markdown-code-lint`: fenced-code checks in READMEs. -- `poe check`: bundle that runs fmt, lint, pyright, mypy, bandit, test, markdown-code-lint. - -Bundled task contents (what runs where) -- `poe setup`: (1) create/refresh `.venv`, (2) `uv sync --all-extras --dev`, (3) install pre-commit hooks. Use once per clone or after Python version changes. -- `poe check`: Ruff format → Ruff lint → Pyright → MyPy → Bandit → PyTest + coverage → markdown code fence lint. Use before merge/CI to cover the full workspace. -- Pre-commit hook run (staged files only): Ruff format + Ruff lint, scoped MyPy, trailing-whitespace/EOF fixes, markdown fence checks; install with `uv run poe pre-commit-install`. Fast hygiene, not a replacement for `poe check`. - -### What Ruff, Pyright, and MyPy check - -### Detailed checks (Ruff, Pyright, MyPy) - -- Ruff - - Format: Black-like formatter, import sorting; keeps 120-col width and normalizes strings/spacing. - - Lint (selected families): pycodestyle E/W, pyflakes F (unused imports/vars, undefined names), bugbear B (risky patterns), pyupgrade UP (modern syntax), pylint PLC/PLE/PLR/PLW (naming, refactors, errors, warnings), Bandit S (security), pytest PT, return rules RET, async ASYNC, datetime TZ, string concat ISC, simplify SIM, quotes Q, exceptions TRY, todo TD/FIX, naming N, docstyle D (Google convention), import conventions ICN/I, pydantic guards PGH, debugger T100. - - Per-file relaxations: tests allow assert-raises constant (`S101`) and magic numbers (`PLR2004`); notebooks skip copyright and long-line checks. -- Pyright (strict) - - Coverage: `agents` and `scripts`, strict mode, unused imports reported; tests and venv paths excluded. - - Catches: incorrect call signatures, bad attribute access, incompatible unions/Optionals, missing/invalid imports, unreachable code, mismatched overloads, missing type annotations, and unsafe narrowing; includes `scripts` via `extraPaths` so helper scripts must stay typed. -- MyPy (strict) - - Coverage: `agents` and `scripts`, strict + pydantic plugin; disallow untyped defs/decorators, no implicit Optional, warn on return Any, show error codes. - - Catches: type mismatches, Optional misuse, protocol/interface violations, missing annotations, decorator typing gaps; pydantic plugin enforces typed fields and forbid-extra in __init__. - -Tip: Run `poe lint`/`poe pyright`/`poe mypy` individually during development; `poe check` runs them all before tests and docs lint. - -## Using this template for new agents -1) Copy `agents/agent1` to `agents/`. -2) Update metadata in `agents//pyproject.toml` (name, description, URLs, deps). -3) Implement your code under `src//` and extend `tests/`. -4) If you will publish the agent, place a `LICENSE` file in the agent directory and use `license-files = ["LICENSE"]` so wheels/sdists include it. -5) Run `uv run poe check`. +### Pre-commit hooks — on every `git commit` -## Virtualenv setup and cleanup -- Create fresh env and install: `uv run poe setup` (runs `poe venv` → `uv sync` → pre-commit install). Default python is 3.13; override with `-p/--python`. -- Manual fallback if needed: `uv venv --python 3.13 && uv sync --all-extras --dev`. -- Clean everything: remove `.venv` and caches with `rm -rf .venv .pytest_cache .ruff_cache .mypy_cache __pycache__ agents/**/{.pytest_cache,.ruff_cache,.mypy_cache,__pycache__}`. - -## Documentation (experimental) -- Scripts use py2docfx to emit docfx YAML into `docs/`. The docs tasks are commented out by default; install docs deps with `uv sync --group docs` if you want to experiment. Expect rough edges. - -## Tooling reference (what/where/why) - -Local + CI (from `pyproject.toml` and `.pre-commit-config.yaml`) - -| Tool / service | Where it runs | What it does | Why it matters | Docs | -| --- | --- | --- | --- | --- | -| uv | Local + CI | Fast Python installer/resolver and executor for reproducible envs and tasks. | Keeps dependency installs deterministic and quick, so developers actually run checks. | [uv docs](https://docs.astral.sh/uv/) | -| Poe the Poet | Local + CI | Task runner that fans commands to all agents and provides `poe check`/`poe pre-commit-check`. | One entry point for fmt/lint/types/tests/security, reducing configuration drift. | [Poe docs](https://poethepoet.natn.io/) | -| Ruff (format + lint) | Local, pre-commit, CI | Auto-formats and lints Python/imports/docstrings; flags dead code, unsafe patterns, and some security issues. | Removes style noise from reviews and catches correctness issues early. | [Ruff docs](https://docs.astral.sh/ruff/) | -| Pyright (strict) | Local + CI | Fast static type checker with precise inference. | Prevents type regressions and interface drift; great developer ergonomics. | [Pyright docs](https://microsoft.github.io/pyright/) | -| MyPy (strict) | Local + CI (scoped in pre-commit) | Second static type checker with a different inference engine and plugin support (pydantic). | Adds coverage where Pyright differs; reduces blind spots by double-checking types. | [MyPy docs](https://mypy.readthedocs.io/en/stable/) | -| Bandit | Local, pre-commit, CI | Security static analysis for Python. | Flags risky calls (eval, weak crypto, subprocess misuse) before merge. | [Bandit docs](https://bandit.readthedocs.io/en/latest/) | -| PyTest + pytest-cov | Local + CI | Runs tests with coverage reporting. | Proves behavior still works; coverage highlights untested risk. | [PyTest](https://docs.pytest.org/en/latest/), [pytest-cov](https://pytest-cov.readthedocs.io/en/latest/) | -| Markdown code fence lint | Local + CI | Checks fenced code blocks in project READMEs. | Prevents broken snippets and docs drift. | [scripts/check_md_code_blocks.py](scripts/check_md_code_blocks.py) | -| pre-commit framework | Local | Runs the hook set on staged files. | Automates hygiene (format, lint, security) before commits land. | [pre-commit docs](https://pre-commit.com/) | -| pre-commit-hooks bundle | Local | Trims whitespace, fixes EOF, normalizes newlines, validates YAML/TOML/JSON, AST checks, forbids debug statements. | Removes common footguns and keeps config files valid. | [pre-commit-hooks](https://github.com/pre-commit/pre-commit-hooks) | -| pyupgrade hook | Local | Rewrites Python syntax to modern 3.10+. | Eliminates legacy syntax and aligns with supported versions. | [pyupgrade](https://github.com/asottile/pyupgrade) | -| nbQA hook | Local | Validates notebook cells parse as Python. | Stops broken notebooks from entering the repo. | [nbQA docs](https://nbqa.readthedocs.io/en/latest/) | -| uv-lock hook | Local | Refreshes `uv.lock` when `pyproject.toml` changes. | Ensures lockfile matches manifests, preventing supply-chain drift. | [uv-pre-commit](https://github.com/astral-sh/uv-pre-commit) | -| Poe pre-commit-check hook | Local | Runs diff-aware fmt/lint/pyright/markdown checks via Poe. | Fast, staged-only guardrail that mirrors CI styling and type rules. | [pyproject.toml](pyproject.toml) | - -Why both Pyright and MyPy: they use different inference engines and plugin ecosystems, so running both raises signal and lowers the chance of missing type errors. - -GitHub-hosted automation (security and updates) - -| Service | What it does | Why it matters | Docs | -| --- | --- | --- | --- | -| CodeQL Analysis | Code scanning for Python and GitHub Actions code. | Finds dataflow and security issues beyond linters/typing. | [CodeQL docs](https://docs.github.com/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/about-codeql-code-scanning) | -| Dependabot | Weekly updates for pip/uv dependencies and GitHub Actions. | Shrinks vulnerability exposure windows and keeps CI runners current. | [Dependabot docs](https://docs.github.com/code-security/dependabot/dependabot-version-updates) | +Installed via `poe setup` (or `poe pre-commit-install`). Runs on staged files only for speed. + +```mermaid +flowchart TD + GC["git commit"] --> PF["pre-commit framework"] + PF --> S1["Whitespace / EOF
line endings"] + S1 --> S2["Config validation
YAML · TOML · JSON"] + S2 --> S3["AST check
(syntax errors)"] + S3 --> S4["pyupgrade
(modern Python 3.10+)"] + S4 --> S5["Ruff format + lint"] + S5 --> S6["MyPy
(scoped to staged)"] + S6 --> S7["Bandit
(security scan)"] + S7 --> S8["Markdown fence
code check"] + S8 --> S9["nbQA
(notebook parse)"] + S9 --> S10["uv-lock sync
(if manifests changed)"] + S10 --> S11["poe pre-commit-check
(Pyright staged)"] +``` + +### CI workflows — on every PR and push + +```mermaid +flowchart TD + subgraph trigger["Trigger: pull_request / push"] + direction LR + T1["PR opened / sync"] + T2["Push to main,
feature*, fix*"] + end + + trigger --> CW["checks.yml
Python 3.10–3.13 matrix"] + trigger --> CQ["codeql-analysis.yml
CodeQL SAST"] + trigger --> SR["security-review.md
Copilot security agent"] + + CW --> CW1["uv sync"] + CW1 --> CW2["poe check
(full quality gate)"] + + CQ --> CQ1["CodeQL init
(Python + Actions)"] + CQ1 --> CQ2["Autobuild"] + CQ2 --> CQ3["CodeQL analyze"] + + SR --> SR1["Read PR diff"] + SR1 --> SR2["Review 15 security
posture categories"] + SR2 --> SR3["Post inline review
comments"] + SR3 --> SR4["Submit review
(REQUEST_CHANGES
or COMMENT)"] + SR4 --> SR5["Assign Copilot
as PR reviewer"] +``` + +### Release workflow — on GitHub release + +```mermaid +flowchart LR + R0["GitHub release
(published)"] --> R1["Checkout +
uv setup"] + R1 --> R2["uv sync
--all-extras --dev"] + R2 --> R3["poe build-changed
(changed agents only)"] + R3 --> R4["poe publish
(uv publish → dist/)"] + R4 --> R5["GitHub Packages
(or configured registry)"] +``` + +### Docs workflow — on push to main + +```mermaid +flowchart LR + D0["Push to main
(docs/agents/scripts changed)"] --> D1["Install docs deps"] + D1 --> D2["Generate Sphinx docs
(unified + per-agent)"] + D2 --> D3["Deploy to
GitHub Pages"] +``` + +### Continuous security — always-on protection + +```mermaid +flowchart TD + subgraph always["Always-on security"] + direction TB + DEP["Dependabot
Weekly dependency updates
(pip/uv + GitHub Actions)"] + CQL["CodeQL
Scheduled weekly scan
(Monday 01:45 UTC)"] + BP["Branch protection
Required checks · Signed commits
Auto-merge for trusted bots"] + end + + subgraph pr["On every PR"] + direction TB + SR["Copilot security agent
15 posture categories
Inline review comments"] + CHECKS["Quality gate
Ruff · Pyright · MyPy
Bandit · Tests"] + end + + always --> pr +``` + +--- + +## Task reference + +### Setup tasks + +| Task | What it does | +| --- | --- | +| `poe setup` | Create `.venv/`, install deps, install pre-commit hooks | +| `poe venv` | Create/refresh `.venv/` (default Python 3.13, override with `-p`) | +| `poe install` | `uv sync --all-extras --dev` (docs group excluded) | +| `poe pre-commit-install` | Install pre-commit hooks into `.git/hooks` | + +### Quality tasks + +| Task | What it does | +| --- | --- | +| `poe fmt` | Ruff format (Black-like, 120-col, import sorting) | +| `poe lint` | Ruff lint (pycodestyle, pyflakes, bugbear, pylint, Bandit rules, ...) | +| `poe pyright` | Pyright strict type checking | +| `poe mypy` | MyPy strict type checking (+ pydantic plugin) | +| `poe bandit` | Bandit security scan (fans out to agents + scripts) | +| `poe test` | PyTest + coverage across all agents | +| `poe markdown-code-lint` | Lint Python code blocks in markdown files | +| `poe check` | Full quality gate: all of the above in sequence | +| `poe pre-commit-check` | Fast staged-only subset (fmt, lint, pyright, markdown lint) | + +### Build and publish tasks + +| Task | What it does | +| --- | --- | +| `poe clean-dist` | Remove `dist/` directory | +| `poe build` | Clean dist, then build **all** agent packages | +| `poe build-changed` | Clean dist, then build only **changed** agent packages | +| `poe publish` | Upload everything in `dist/` to the package registry | + +### Documentation tasks + +| Task | What it does | +| --- | --- | +| `poe docs-install` | Install Sphinx and documentation dependencies | +| `poe docs` | Build unified + per-agent documentation | + +--- + +## Using this template + +### Step 1: Create a new agent + +```sh +# Copy the template agent +cp -r agents/agent1 agents/ +``` + +### Step 2: Configure the agent + +Edit `agents//pyproject.toml`: +- Update `name`, `description`, `version`, and `urls`. +- Adjust `tool.flit.module` to match the agent's namespace. +- Add agent-specific dependencies. + +### Step 3: Implement and test + +- Write code under `agents//src//agents//`. +- Write tests under `agents//tests/`. +- Run checks: `uv run poe -C agents/ check` or `uv run poe check` from root. + +### Step 4: Run the agent + +```sh +# From agent directory +uv run [args] + +# From workspace root +uv run --package [args] +``` + +### Step 5: Release + +1. Bump the version in `agents//pyproject.toml`. +2. Merge to main. +3. Create a GitHub release — the release workflow builds and publishes automatically. + +--- + +## Build, publish, and release + +Each agent is an independent package with its own version, enabling independent SDLC lifecycles. All build artifacts land in the workspace root `dist/` directory. + +- `poe build` — cleans `dist/` and builds **all** agent packages. +- `poe build-changed` — cleans `dist/` and builds only agents with **changed files**. +- `poe publish` — uploads everything in `dist/`. The registry rejects duplicate versions, so only agents with bumped versions actually get uploaded. + +The [release workflow](.github/workflows/release.yml) runs on GitHub release events and `workflow_dispatch`. It uses `build-changed` → `publish` so only modified agents are built and published. + +### Changing the publish target + +By default, packages are published to **GitHub Packages**. To publish to a different registry (PyPI, Artifactory, Azure Artifacts, etc.), update two places: + +1. **`pyproject.toml`** — update the `[[tool.uv.index]]` section: + + ```toml + [[tool.uv.index]] + name = "pypi" # or your registry name + url = "https://pypi.org/simple/" + publish-url = "https://upload.pypi.org/legacy/" + explicit = true + ``` + +2. **`.github/workflows/release.yml`** — update the publish step environment variables: + + ```yaml + - name: Publish to PyPI + env: + UV_PUBLISH_URL: https://upload.pypi.org/legacy/ + UV_PUBLISH_TOKEN: ${{ secrets.PYPI_TOKEN }} + run: uv run poe publish + ``` + + For PyPI, create an API token and store it as a repository secret (`PYPI_TOKEN`). For GitHub Packages, the built-in `GITHUB_TOKEN` is used automatically. + +--- + +## What the checks catch + +### Ruff (format + lint) + +- **Format:** Black-like formatter, import sorting, 120-col width, normalized strings/spacing. +- **Lint families:** pycodestyle (E/W), pyflakes (F), bugbear (B), pyupgrade (UP), pylint (PLC/PLE/PLR/PLW), Bandit (S), pytest (PT), return rules (RET), async (ASYNC), datetime (TZ), ISC, SIM, quotes (Q), exceptions (TRY), todos (TD/FIX), naming (N), docstyle (D, Google convention), imports (ICN/I), pydantic (PGH), debugger (T100). +- **Relaxations:** tests allow `assert` (`S101`) and magic numbers (`PLR2004`); notebooks skip copyright and long-line checks. + +### Pyright (strict) + +- Covers `agents/` and `scripts/`, strict mode, unused imports reported. +- Catches: incorrect signatures, bad attribute access, incompatible unions/Optionals, missing imports, unreachable code, missing type annotations, unsafe narrowing. + +### MyPy (strict) + +- Covers `agents/` and `scripts/`, strict + pydantic plugin. +- Catches: type mismatches, Optional misuse, protocol violations, missing annotations, decorator typing gaps. + +> **Why both Pyright and MyPy?** They use different inference engines and plugin ecosystems. Running both raises signal and lowers the chance of missing type errors — critical when working with AI-generated code. + +--- + +## Agentic workflows + +The repository includes a [GitHub Agentic Workflow](https://github.github.com/gh-aw/) that automates security review on every pull request. + +### Security review agent + +A Copilot custom agent defined in [`.github/agents/security-reviewer.agent.md`](.github/agents/security-reviewer.agent.md) contains the security review prompt — 15 security posture categories (input validation, secrets, subprocess safety, network security, authentication, logging hygiene, error handling, dependency security, file system safety, cryptography, configuration, concurrency, container security, CI/CD, and test coverage) with detailed checklists for each. + +### Security review workflow + +The agentic workflow at [`.github/workflows/security-review.md`](.github/workflows/security-review.md) imports the security review agent and runs on every `pull_request` event (`opened`, `synchronize`). It: + +1. Reads the pull request diff. +2. Reviews changed files against all 15 security posture categories. +3. Posts inline review comments on specific code lines where issues are found. +4. Submits a consolidated review (`REQUEST_CHANGES` for critical/high, `COMMENT` otherwise). +5. Requests Copilot as a reviewer for additional code quality coverage. + +### Compiling agentic workflows + +Agentic workflow `.md` files must be compiled into GitHub Actions `.lock.yml` files before they can run: + +```bash +# Install the extension (once) +gh extension install github/gh-aw + +# Compile all workflows (generates .github/workflows/*.lock.yml) +gh aw compile + +# Compile a specific workflow +gh aw compile security-review +``` + +Commit both the `.md` source and the generated `.lock.yml` file. Only frontmatter changes require recompilation — edits to the markdown body take effect at runtime without recompiling. + +Configure a `COPILOT_GITHUB_TOKEN` secret in your repository settings (Settings → Secrets and variables → Actions). See the [gh-aw authorization docs](https://github.github.com/gh-aw/reference/auth/) for details. + +### Copilot custom instructions + +The `.github/instructions/` directory contains context-aware instructions that guide Copilot when editing specific file types: + +| File | Applies to | Purpose | +| --- | --- | --- | +| `python.instructions.md` | `**/*.{py,ipynb}` | Python coding conventions, typing, docstrings | +| `agents.instructions.md` | `agents/**/*` | Agent development guidelines and namespace rules | +| `docs.instructions.md` | `docs/**/*`, `agents/*/docs/**/*` | Documentation conventions | +| `agentic-workflows.instructions.md` | `.github/workflows/*.md` | Agentic workflow authoring rules | +| `copilot-agents.instructions.md` | `.github/agents/*.agent.md` | Agent file format and naming conventions | + +--- + +## Documentation + +Documentation is built using Sphinx and published to GitHub Pages via the [docs workflow](.github/workflows/docs.yml). + +```sh +# Install docs dependencies +uv run poe docs-install + +# Build locally +uv run poe docs +``` + +The docs workflow triggers on pushes to `main` when documentation sources, agent source code, or the docs generation script change. + +> **Note:** `docs/generated/` and `agents/*/docs/generated/` are produced by CI; do not edit or commit them. + +--- ## Security and automation -- **Dependabot**: keeps pip/uv and GitHub Actions up to date. Alternatives: Renovate, Snyk, Mend. Important to run some updater to shrink vulnerability exposure windows. -- **CodeQL**: SAST/code scanning for Python and GitHub Actions. Alternatives: semgrep, commercial SAST. Important to have at least one scanner in place. -- **Branch protection/rulesets and auto-fix**: enforce required checks, signed commits, and allow trusted bots (e.g., Dependabot) to auto-merge with autofix where policy allows. + +| Mechanism | What it does | Why it matters | +| --- | --- | --- | +| **Dependabot** | Weekly updates for pip/uv dependencies and GitHub Actions | Shrinks vulnerability exposure windows | +| **CodeQL** | SAST/code scanning for Python and GitHub Actions | Finds dataflow and security issues beyond linters | +| **Copilot security agent** | AI-powered reviews against 15 security posture categories | Catches issues that static analysis misses | +| **Branch protection** | Required checks, signed commits, auto-merge for trusted bots | Prevents unverified code from reaching main | +| **Pre-commit hooks** | Staged-file checks before every commit | Catches issues at the earliest possible point | +| **Dual type checkers** | Pyright + MyPy with different inference engines | Maximal type safety for AI-generated code | + +--- + +## Tooling reference + +### Local + CI tools + +| Tool | Where | What it does | Docs | +| --- | --- | --- | --- | +| uv | Local + CI | Fast Python installer/resolver, reproducible envs | [uv docs](https://docs.astral.sh/uv/) | +| Poe the Poet | Local + CI | Task runner, fan-out to agents | [Poe docs](https://poethepoet.natn.io/) | +| Ruff | Local + CI | Format + lint (single fast tool) | [Ruff docs](https://docs.astral.sh/ruff/) | +| Pyright | Local + CI | Strict static type checker | [Pyright docs](https://microsoft.github.io/pyright/) | +| MyPy | Local + CI | Strict type checker + pydantic plugin | [MyPy docs](https://mypy.readthedocs.io/en/stable/) | +| Bandit | Local + CI | Python security static analysis | [Bandit docs](https://bandit.readthedocs.io/en/latest/) | +| PyTest | Local + CI | Tests + coverage | [PyTest docs](https://docs.pytest.org/en/latest/) | +| pre-commit | Local | Hook framework for staged-file checks | [pre-commit docs](https://pre-commit.com/) | + +### GitHub-hosted automation + +| Service | What it does | Docs | +| --- | --- | --- | +| CodeQL Analysis | Code scanning for Python and GitHub Actions | [CodeQL docs](https://docs.github.com/code-security/code-scanning) | +| Dependabot | Weekly dependency and Actions updates | [Dependabot docs](https://docs.github.com/code-security/dependabot) | +| Copilot security review | Agentic AI security review on PRs | [gh-aw docs](https://github.github.com/gh-aw/) | + +--- + +## Virtualenv setup and cleanup + +```sh +# Create fresh env and install everything +uv run poe setup + +# Specify a Python version +uv run poe setup --python 3.12 + +# Manual fallback +uv venv --python 3.13 && uv sync --all-extras --dev + +# Clean everything +rm -rf .venv .pytest_cache .ruff_cache .mypy_cache __pycache__ \ + agents/**/{.pytest_cache,.ruff_cache,.mypy_cache,__pycache__} +``` ## Copyright option -- Ruff copyright enforcement is available but disabled. If your org requires it, enable the `flake8-copyright` block in `pyproject.toml` and add headers. Leave it off to avoid breaking contributions until ready. + +Ruff copyright enforcement is available but disabled. If your org requires it, enable the `flake8-copyright` block in `pyproject.toml` and add headers. Leave it off to avoid breaking contributions until ready. diff --git a/agents/agent1/src/python_agent_template/agents/agent1/__init__.py b/agents/agent1/src/python_agent_template/agents/agent1/__init__.py index 61b3cdc..6701ebb 100644 --- a/agents/agent1/src/python_agent_template/agents/agent1/__init__.py +++ b/agents/agent1/src/python_agent_template/agents/agent1/__init__.py @@ -1,5 +1,5 @@ """agent1 package exports.""" -from .agent import AgentConfig, ExampleAgent, MissingNameError +from .agent import AgentConfig, ExampleAgent -__all__ = ["AgentConfig", "ExampleAgent", "MissingNameError"] +__all__ = ["AgentConfig", "ExampleAgent"] diff --git a/agents/agent1/src/python_agent_template/agents/agent1/agent.py b/agents/agent1/src/python_agent_template/agents/agent1/agent.py index a4eb021..363ec7c 100644 --- a/agents/agent1/src/python_agent_template/agents/agent1/agent.py +++ b/agents/agent1/src/python_agent_template/agents/agent1/agent.py @@ -4,6 +4,8 @@ from dataclasses import dataclass +from .validators.blank_string_validator import require_non_blank_strings + @dataclass class AgentConfig: @@ -12,14 +14,6 @@ class AgentConfig: greeting: str = "hello" -class MissingNameError(ValueError): - """Raised when a name argument is missing.""" - - def __init__(self) -> None: - """Initialize the missing-name error with a default message.""" - super().__init__("name required") - - class ExampleAgent: """Simple greeter agent.""" @@ -27,8 +21,7 @@ def __init__(self, config: AgentConfig | None = None) -> None: """Initialize the agent with optional config.""" self.config = config or AgentConfig() + @require_non_blank_strings("name") def run(self, name: str) -> str: """Return a greeting for the provided name.""" - if not name: - raise MissingNameError return f"{self.config.greeting}, {name}!" diff --git a/agents/agent1/src/python_agent_template/agents/agent1/validators/__init__.py b/agents/agent1/src/python_agent_template/agents/agent1/validators/__init__.py new file mode 100644 index 0000000..07497af --- /dev/null +++ b/agents/agent1/src/python_agent_template/agents/agent1/validators/__init__.py @@ -0,0 +1,5 @@ +"""Validator utilities for agent inputs.""" + +from .blank_string_validator import require_non_blank_strings, validate_string_is_not_blank + +__all__ = ["require_non_blank_strings", "validate_string_is_not_blank"] diff --git a/agents/agent1/src/python_agent_template/agents/agent1/validators/blank_string_validator.py b/agents/agent1/src/python_agent_template/agents/agent1/validators/blank_string_validator.py new file mode 100644 index 0000000..b7659b9 --- /dev/null +++ b/agents/agent1/src/python_agent_template/agents/agent1/validators/blank_string_validator.py @@ -0,0 +1,106 @@ +"""Decorator for validating non-empty string parameters.""" + +from __future__ import annotations + +import inspect +from collections.abc import Callable +from functools import wraps +from typing import Any, ParamSpec, TypeVar + +from .errors import EmptyStringError, MissingParameterError, NoneNotAllowedError, StringTypeError + +P = ParamSpec("P") +R = TypeVar("R") + + +def _validate_string_is_not_blank( + value: Any, + parameter_name: str, +) -> None: + """Validate that a string is non-blank. + + Args: + value: The value to validate. + parameter_name: The name of the parameter being validated. + + Raises: + NoneNotAllowedError: If the value is None. + StringTypeError: If the value is not a string. + EmptyStringError: If the string is empty. + """ + if value is None: + raise NoneNotAllowedError(parameter_name) + + if not isinstance(value, str): + raise StringTypeError(parameter_name) + + trimmed = value.strip() + if not trimmed: + raise EmptyStringError(parameter_name) + + +def require_non_blank_strings( + *parameter_names: str, + use_partial_bind: bool = True, +) -> Callable[[Callable[P, R]], Callable[P, R]]: + """Ensure specified parameters are non-blank strings. + + Args: + *parameter_names: Parameter names to validate. + use_partial_bind: When true, uses signature.bind_partial. When false, uses signature.bind to mirror normal call + validation. + + Raises: + MissingParameterError: If a specified parameter is missing from the call. + NoneNotAllowedError: If a parameter value is None. + StringTypeError: If a parameter value is not a string. + EmptyStringError: If a parameter value is empty. + """ + names = tuple(dict.fromkeys(parameter_names)) + + def decorator(func: Callable[P, R]) -> Callable[P, R]: + sig = inspect.signature(func) + binder = sig.bind_partial if use_partial_bind else sig.bind + func_name = func.__name__ + + @wraps(func) + def wrapper(*args: Any, **kwargs: Any) -> R: + bound = binder(*args, **kwargs) + bound.apply_defaults() + + for name in names: + if name not in bound.arguments: + raise MissingParameterError(name, func_name) + + value = bound.arguments[name] + + _validate_string_is_not_blank( + value, + parameter_name=name, + ) + + return func(*bound.args, **bound.kwargs) + + return wrapper + + return decorator + + +def validate_string_is_not_blank( + value: str, + parameter_name: str, +) -> None: + """Validate that a string is non-blank. + + Args: + value: The string to validate. + parameter_name: The name of the parameter being validated. + + Raises: + NoneNotAllowedError: If the value is None. + EmptyStringError: If the string is empty. + """ + _validate_string_is_not_blank( + value, + parameter_name, + ) diff --git a/agents/agent1/src/python_agent_template/agents/agent1/validators/errors/__init__.py b/agents/agent1/src/python_agent_template/agents/agent1/validators/errors/__init__.py new file mode 100644 index 0000000..0be9080 --- /dev/null +++ b/agents/agent1/src/python_agent_template/agents/agent1/validators/errors/__init__.py @@ -0,0 +1,13 @@ +"""Error classes for validator utilities.""" + +from .empty_string_error import EmptyStringError +from .missing_parameter_error import MissingParameterError +from .none_not_allowed_error import NoneNotAllowedError +from .string_type_error import StringTypeError + +__all__ = [ + "EmptyStringError", + "MissingParameterError", + "NoneNotAllowedError", + "StringTypeError", +] diff --git a/agents/agent1/src/python_agent_template/agents/agent1/validators/errors/empty_string_error.py b/agents/agent1/src/python_agent_template/agents/agent1/validators/errors/empty_string_error.py new file mode 100644 index 0000000..3b582e5 --- /dev/null +++ b/agents/agent1/src/python_agent_template/agents/agent1/validators/errors/empty_string_error.py @@ -0,0 +1,12 @@ +"""Exception raised for empty string values.""" + + +class EmptyStringError(ValueError): + """Raised when a string is empty after trimming whitespace.""" + + __slots__ = () + + def __init__(self, parameter: str) -> None: + """Initialize the error with parameter context.""" + message: str = f"param '{parameter}' must be non-empty." + super().__init__(message) diff --git a/agents/agent1/src/python_agent_template/agents/agent1/validators/errors/missing_parameter_error.py b/agents/agent1/src/python_agent_template/agents/agent1/validators/errors/missing_parameter_error.py new file mode 100644 index 0000000..5132940 --- /dev/null +++ b/agents/agent1/src/python_agent_template/agents/agent1/validators/errors/missing_parameter_error.py @@ -0,0 +1,13 @@ +"""Exception raised when a required parameter is missing.""" + +from __future__ import annotations + + +class MissingParameterError(TypeError): + """Raised when a required parameter is missing.""" + + __slots__ = () + + def __init__(self, parameter: str, func_name: str) -> None: + """Initialize the error with parameter context.""" + super().__init__(f"{func_name}(): missing param '{parameter}'.") diff --git a/agents/agent1/src/python_agent_template/agents/agent1/validators/errors/none_not_allowed_error.py b/agents/agent1/src/python_agent_template/agents/agent1/validators/errors/none_not_allowed_error.py new file mode 100644 index 0000000..7c96510 --- /dev/null +++ b/agents/agent1/src/python_agent_template/agents/agent1/validators/errors/none_not_allowed_error.py @@ -0,0 +1,14 @@ +"""Exception raised when None is not allowed for a parameter.""" + +from __future__ import annotations + + +class NoneNotAllowedError(TypeError): + """Raised when a parameter is None but disallowed.""" + + __slots__ = () + + def __init__(self, parameter: str) -> None: + """Initialize the error with parameter context.""" + message: str = f"param '{parameter}' cannot be None." + super().__init__(message) diff --git a/agents/agent1/src/python_agent_template/agents/agent1/validators/errors/string_type_error.py b/agents/agent1/src/python_agent_template/agents/agent1/validators/errors/string_type_error.py new file mode 100644 index 0000000..027b5de --- /dev/null +++ b/agents/agent1/src/python_agent_template/agents/agent1/validators/errors/string_type_error.py @@ -0,0 +1,14 @@ +"""Exception raised when a parameter is not a string.""" + +from __future__ import annotations + + +class StringTypeError(TypeError): + """Raised when a parameter is not a string.""" + + __slots__ = () + + def __init__(self, parameter: str) -> None: + """Initialize the error with parameter context.""" + message: str = f"param '{parameter}' must be str." + super().__init__(message) diff --git a/agents/agent1/tests/test_agent.py b/agents/agent1/tests/test_agent.py index 5bf7aea..15dbd5a 100644 --- a/agents/agent1/tests/test_agent.py +++ b/agents/agent1/tests/test_agent.py @@ -3,7 +3,7 @@ import pytest from python_agent_template.agents.agent1 import AgentConfig, ExampleAgent -from python_agent_template.agents.agent1.agent import MissingNameError +from python_agent_template.agents.agent1.validators.errors import EmptyStringError def test_run_greets_name() -> None: @@ -15,5 +15,12 @@ def test_run_greets_name() -> None: def test_run_requires_name() -> None: """Agent raises when name is missing.""" agent = ExampleAgent() - with pytest.raises(MissingNameError, match="name"): + with pytest.raises(EmptyStringError, match="name"): agent.run("") + + +def test_run_rejects_whitespace_only_name() -> None: + """Agent validates whitespace-only names via decorator guard.""" + agent = ExampleAgent() + with pytest.raises(EmptyStringError, match="name"): + agent.run(" ") diff --git a/agents/agent1/tests/test_blank_string_validator.py b/agents/agent1/tests/test_blank_string_validator.py new file mode 100644 index 0000000..da2bfb5 --- /dev/null +++ b/agents/agent1/tests/test_blank_string_validator.py @@ -0,0 +1,71 @@ +"""Tests for blank string validator utilities.""" + +from __future__ import annotations + +import pytest + +from python_agent_template.agents.agent1.validators.blank_string_validator import ( + require_non_blank_strings, + validate_string_is_not_blank, +) +from python_agent_template.agents.agent1.validators.errors import ( + EmptyStringError, + MissingParameterError, + NoneNotAllowedError, + StringTypeError, +) + + +def test_validate_string_is_not_blank_accepts_non_blank() -> None: + """Allows non-blank strings.""" + validate_string_is_not_blank("Ada", "name") + + +def test_validate_string_is_not_blank_rejects_none() -> None: + """Raises when value is None.""" + with pytest.raises(NoneNotAllowedError, match="name"): + validate_string_is_not_blank(None, "name") # type: ignore[arg-type] + + +def test_validate_string_is_not_blank_rejects_empty() -> None: + """Raises when string is empty.""" + with pytest.raises(EmptyStringError, match="name"): + validate_string_is_not_blank("", "name") + + +def test_decorator_raises_missing_parameter() -> None: + """Decorator raises when required arg is missing.""" + + @require_non_blank_strings("first") # type: ignore[untyped-decorator] + def greet(*, first: str) -> str: + return f"hi {first}" + + with pytest.raises(MissingParameterError, match="first"): + greet() # type: ignore[call-arg] + + +def test_decorator_rejects_none_and_non_string() -> None: + """Decorator rejects None and non-string values.""" + + @require_non_blank_strings("first", "last") # type: ignore[untyped-decorator] + def greet(first: str, last: str) -> str: + return f"{first} {last}" + + with pytest.raises(NoneNotAllowedError, match="first"): + greet(None, "Doe") # type: ignore[arg-type] + + with pytest.raises(StringTypeError, match="last"): + greet("John", 123) # type: ignore[arg-type] + + +def test_decorator_rejects_blank_and_allows_deduped_order() -> None: + """Decorator rejects blanks and dedupes parameter list order.""" + + @require_non_blank_strings("first", "first", "last") # type: ignore[untyped-decorator] + def greet(first: str, last: str) -> str: + return f"{first} {last}" + + with pytest.raises(EmptyStringError, match="first"): + greet(" ", "Doe") + + assert greet("Ada", "Lovelace") == "Ada Lovelace" diff --git a/docs/manual/agent-guide-template.md b/docs/manual/agent-guide-template.md index 6f677c6..d578d09 100644 --- a/docs/manual/agent-guide-template.md +++ b/docs/manual/agent-guide-template.md @@ -8,6 +8,19 @@ Use this as a blueprint when creating or maintaining any agent in this monorepo. - Namespace per project: `python_agent_template.agents.` with namespace packages (no `__init__.py` at the namespace roots) per PyPA guidance: https://packaging.python.org/en/latest/guides/packaging-namespace-packages/ - Wheel-first images: build a wheel and install it in the runtime image for reproducibility and smaller layers. - Tasks via `uv run` + `poe` for consistent env management. +- Secure-by-default posture: treat secrets as env/secret-store only, validate all inputs, avoid logging sensitive data, and block unsafe shell usage by default. + +## Secure helpers (copy/adapt per agent) +- Validate inputs with simple guard clauses (e.g., reject blank/whitespace for required text fields) close to where data enters your agent. +- Prefer these helpers in agent logic and CLI parsing so new contributors follow the safer path by default. + +## Secure-by-default checklist (include in each agent README/docs) +- Secrets: load from environment/secret store only; never hardcode tokens/keys. +- Input validation: validate CLI/user input with guard clauses; reject blank/whitespace and unexpected values early. +- Logging/PII: avoid logging secrets or user-provided sensitive data; redact when unsure. +- External calls/commands: favor library calls over shell; if shelling out, build argv lists (no `shell=True`). +- Quality gate: run `uv run poe check` before pushing to catch format/lint/type/security/test issues. +- Tests: include at least one validation test and one test that guards unsafe command construction so contributors see the expected patterns. ## Structure (replace ``) ``` diff --git a/pyproject.toml b/pyproject.toml index 4e782fd..b549b57 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -210,6 +210,7 @@ mypy-agents = "python scripts/run_tasks_in_agents_if_exists.py mypy" mypy-root = "uv run mypy --config-file pyproject.toml" bandit = "uv run bandit -c pyproject.toml -r agents scripts docs/source" +lock-verify = "uv lock --locked" test = "python scripts/run_tasks_in_agents_if_exists.py test" markdown-code-lint = "uv run python scripts/check_md_code_blocks.py README.md docs/manual/*.md agents/**/README.md .github/instructions/*.md" @@ -217,15 +218,13 @@ pre-commit-install = "uv run pre-commit install --install-hooks --overwrite" install = "uv sync --all-packages --all-extras --dev -U --prerelease=if-necessary-or-explicit --no-group=docs" docs = "uv run python scripts/generate_docs.py" docs-install = "uv sync --all-packages --all-extras --dev -U --prerelease=if-necessary-or-explicit --group docs" -check = ["fmt", "lint", "pyright", "mypy", "bandit", "test", "markdown-code-lint"] -# Optional release/publish helpers (commented out by default) -# clean-dist-agents = "python scripts/run_tasks_in_agents_if_exists.py clean-dist" -# clean-dist-meta = "rm -rf dist" -# clean-dist = ["clean-dist-agents", "clean-dist-meta"] -# build-agents = "python scripts/run_tasks_in_agents_if_exists.py build" -# build-meta = "python -m flit build" -# build = ["build-agents", "build-meta"] -# publish = "uv publish" +check = ["lock-verify", "fmt", "lint", "pyright", "mypy", "bandit", "test", "markdown-code-lint"] +clean-dist = "rm -rf dist" +build-all-agents = "python scripts/run_tasks_in_agents_if_exists.py build" +build-changed-agents = "python scripts/run_tasks_in_changed_agents.py build" +build = ["clean-dist", "build-all-agents"] +build-changed = ["clean-dist", "build-changed-agents"] +publish = "uv publish" # Setup and Virtual Environment [tool.poe.tasks.venv] @@ -259,9 +258,9 @@ sequence = [ args = [{ name = "files", default = ".", positional = true, multiple = true }] [[tool.uv.index]] -name = "testpypi" -url = "https://test.pypi.org/simple/" -publish-url = "https://test.pypi.org/legacy/" +name = "github" +url = "https://nuget.pkg.github.com/pmalarme/index.json" +publish-url = "https://nuget.pkg.github.com/pmalarme/upload" explicit = true [build-system] diff --git a/shared_tasks.toml b/shared_tasks.toml index 2340a71..98fc693 100644 --- a/shared_tasks.toml +++ b/shared_tasks.toml @@ -9,12 +9,8 @@ format.ref = "fmt" lint = "ruff check" pyright = "pyright" -# publish builds and uploads the package to the configured index (e.g., GitHub Packages) -publish = "uv publish" - # repo/agent check bundle; when run with -C agents/, scopes to that agent check = ["fmt", "lint", "pyright", "mypy", "bandit", "test"] -clean-dist = "rm -rf dist" build-package = "uv build" build = ["build-package"] diff --git a/uv.lock b/uv.lock new file mode 100644 index 0000000..ae14ce6 --- /dev/null +++ b/uv.lock @@ -0,0 +1,1448 @@ +version = 1 +revision = 3 +requires-python = ">=3.10" +resolution-markers = [ + "python_full_version >= '3.12' and sys_platform == 'linux'", + "python_full_version < '3.12' and sys_platform == 'linux'", + "sys_platform == 'darwin'", + "sys_platform == 'win32'", +] +supported-markers = [ + "sys_platform == 'linux'", + "sys_platform == 'darwin'", + "sys_platform == 'win32'", +] + +[manifest] +members = [ + "agent1", + "python-agent-template", +] + +[[package]] +name = "agent1" +version = "0.1.0" +source = { editable = "agents/agent1" } + +[[package]] +name = "alabaster" +version = "0.7.16" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c9/3e/13dd8e5ed9094e734ac430b5d0eb4f2bb001708a8b7856cbf8e084e001ba/alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65", size = 23776, upload-time = "2024-01-10T00:56:10.189Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/32/34/d4e1c02d3bee589efb5dfa17f88ea08bdb3e3eac12bc475462aec52ed223/alabaster-0.7.16-py3-none-any.whl", hash = "sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92", size = 13511, upload-time = "2024-01-10T00:56:08.388Z" }, +] + +[[package]] +name = "annotated-types" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, +] + +[[package]] +name = "babel" +version = "2.17.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7d/6b/d52e42361e1aa00709585ecc30b3f9684b3ab62530771402248b1b1d6240/babel-2.17.0.tar.gz", hash = "sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d", size = 9951852, upload-time = "2025-02-01T15:17:41.026Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/b8/3fe70c75fe32afc4bb507f75563d39bc5642255d1d94f1f23604725780bf/babel-2.17.0-py3-none-any.whl", hash = "sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2", size = 10182537, upload-time = "2025-02-01T15:17:37.39Z" }, +] + +[[package]] +name = "backports-asyncio-runner" +version = "1.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8e/ff/70dca7d7cb1cbc0edb2c6cc0c38b65cba36cccc491eca64cabd5fe7f8670/backports_asyncio_runner-1.2.0.tar.gz", hash = "sha256:a5aa7b2b7d8f8bfcaa2b57313f70792df84e32a2a746f585213373f900b42162", size = 69893, upload-time = "2025-07-02T02:27:15.685Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/59/76ab57e3fe74484f48a53f8e337171b4a2349e506eabe136d7e01d059086/backports_asyncio_runner-1.2.0-py3-none-any.whl", hash = "sha256:0da0a936a8aeb554eccb426dc55af3ba63bcdc69fa1a600b5bb305413a4477b5", size = 12313, upload-time = "2025-07-02T02:27:14.263Z" }, +] + +[[package]] +name = "bandit" +version = "1.9.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "pyyaml", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "rich", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "stevedore", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/89/76/a7f3e639b78601118aaa4a394db2c66ae2597fbd8c39644c32874ed11e0c/bandit-1.9.3.tar.gz", hash = "sha256:ade4b9b7786f89ef6fc7344a52b34558caec5da74cb90373aed01de88472f774", size = 4242154, upload-time = "2026-01-19T04:05:22.802Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e0/0b/8bdc52111c83e2dc2f97403dc87c0830b8989d9ae45732b34b686326fb2c/bandit-1.9.3-py3-none-any.whl", hash = "sha256:4745917c88d2246def79748bde5e08b9d5e9b92f877863d43fab70cd8814ce6a", size = 134451, upload-time = "2026-01-19T04:05:20.938Z" }, +] + +[[package]] +name = "certifi" +version = "2026.1.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e0/2d/a891ca51311197f6ad14a7ef42e2399f36cf2f9bd44752b3dc4eab60fdc5/certifi-2026.1.4.tar.gz", hash = "sha256:ac726dd470482006e014ad384921ed6438c457018f4b3d204aea4281258b2120", size = 154268, upload-time = "2026-01-04T02:42:41.825Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e6/ad/3cc14f097111b4de0040c83a525973216457bbeeb63739ef1ed275c1c021/certifi-2026.1.4-py3-none-any.whl", hash = "sha256:9943707519e4add1115f44c2bc244f782c0249876bf51b6599fee1ffbedd685c", size = 152900, upload-time = "2026-01-04T02:42:40.15Z" }, +] + +[[package]] +name = "cfgv" +version = "3.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/4e/b5/721b8799b04bf9afe054a3899c6cf4e880fcf8563cc71c15610242490a0c/cfgv-3.5.0.tar.gz", hash = "sha256:d5b1034354820651caa73ede66a6294d6e95c1b00acc5e9b098e917404669132", size = 7334, upload-time = "2025-11-19T20:55:51.612Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/db/3c/33bac158f8ab7f89b2e59426d5fe2e4f63f7ed25df84c036890172b412b5/cfgv-3.5.0-py2.py3-none-any.whl", hash = "sha256:a8dc6b26ad22ff227d2634a65cb388215ce6cc96bbcc5cfde7641ae87e8dacc0", size = 7445, upload-time = "2025-11-19T20:55:50.744Z" }, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/13/69/33ddede1939fdd074bce5434295f38fae7136463422fe4fd3e0e89b98062/charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a", size = 129418, upload-time = "2025-10-14T04:42:32.879Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1f/b8/6d51fc1d52cbd52cd4ccedd5b5b2f0f6a11bbf6765c782298b0f3e808541/charset_normalizer-3.4.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e824f1492727fa856dd6eda4f7cee25f8518a12f3c4a56a74e8095695089cf6d", size = 209709, upload-time = "2025-10-14T04:40:11.385Z" }, + { url = "https://files.pythonhosted.org/packages/5c/af/1f9d7f7faafe2ddfb6f72a2e07a548a629c61ad510fe60f9630309908fef/charset_normalizer-3.4.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4bd5d4137d500351a30687c2d3971758aac9a19208fc110ccb9d7188fbe709e8", size = 148814, upload-time = "2025-10-14T04:40:13.135Z" }, + { url = "https://files.pythonhosted.org/packages/79/3d/f2e3ac2bbc056ca0c204298ea4e3d9db9b4afe437812638759db2c976b5f/charset_normalizer-3.4.4-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:027f6de494925c0ab2a55eab46ae5129951638a49a34d87f4c3eda90f696b4ad", size = 144467, upload-time = "2025-10-14T04:40:14.728Z" }, + { url = "https://files.pythonhosted.org/packages/ec/85/1bf997003815e60d57de7bd972c57dc6950446a3e4ccac43bc3070721856/charset_normalizer-3.4.4-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f820802628d2694cb7e56db99213f930856014862f3fd943d290ea8438d07ca8", size = 162280, upload-time = "2025-10-14T04:40:16.14Z" }, + { url = "https://files.pythonhosted.org/packages/3e/8e/6aa1952f56b192f54921c436b87f2aaf7c7a7c3d0d1a765547d64fd83c13/charset_normalizer-3.4.4-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:798d75d81754988d2565bff1b97ba5a44411867c0cf32b77a7e8f8d84796b10d", size = 159454, upload-time = "2025-10-14T04:40:17.567Z" }, + { url = "https://files.pythonhosted.org/packages/36/3b/60cbd1f8e93aa25d1c669c649b7a655b0b5fb4c571858910ea9332678558/charset_normalizer-3.4.4-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d1bb833febdff5c8927f922386db610b49db6e0d4f4ee29601d71e7c2694313", size = 153609, upload-time = "2025-10-14T04:40:19.08Z" }, + { url = "https://files.pythonhosted.org/packages/64/91/6a13396948b8fd3c4b4fd5bc74d045f5637d78c9675585e8e9fbe5636554/charset_normalizer-3.4.4-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:9cd98cdc06614a2f768d2b7286d66805f94c48cde050acdbbb7db2600ab3197e", size = 151849, upload-time = "2025-10-14T04:40:20.607Z" }, + { url = "https://files.pythonhosted.org/packages/b7/7a/59482e28b9981d105691e968c544cc0df3b7d6133152fb3dcdc8f135da7a/charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:077fbb858e903c73f6c9db43374fd213b0b6a778106bc7032446a8e8b5b38b93", size = 151586, upload-time = "2025-10-14T04:40:21.719Z" }, + { url = "https://files.pythonhosted.org/packages/92/59/f64ef6a1c4bdd2baf892b04cd78792ed8684fbc48d4c2afe467d96b4df57/charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:244bfb999c71b35de57821b8ea746b24e863398194a4014e4c76adc2bbdfeff0", size = 145290, upload-time = "2025-10-14T04:40:23.069Z" }, + { url = "https://files.pythonhosted.org/packages/6b/63/3bf9f279ddfa641ffa1962b0db6a57a9c294361cc2f5fcac997049a00e9c/charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:64b55f9dce520635f018f907ff1b0df1fdc31f2795a922fb49dd14fbcdf48c84", size = 163663, upload-time = "2025-10-14T04:40:24.17Z" }, + { url = "https://files.pythonhosted.org/packages/ed/09/c9e38fc8fa9e0849b172b581fd9803bdf6e694041127933934184e19f8c3/charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:faa3a41b2b66b6e50f84ae4a68c64fcd0c44355741c6374813a800cd6695db9e", size = 151964, upload-time = "2025-10-14T04:40:25.368Z" }, + { url = "https://files.pythonhosted.org/packages/d2/d1/d28b747e512d0da79d8b6a1ac18b7ab2ecfd81b2944c4c710e166d8dd09c/charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:6515f3182dbe4ea06ced2d9e8666d97b46ef4c75e326b79bb624110f122551db", size = 161064, upload-time = "2025-10-14T04:40:26.806Z" }, + { url = "https://files.pythonhosted.org/packages/bb/9a/31d62b611d901c3b9e5500c36aab0ff5eb442043fb3a1c254200d3d397d9/charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cc00f04ed596e9dc0da42ed17ac5e596c6ccba999ba6bd92b0e0aef2f170f2d6", size = 155015, upload-time = "2025-10-14T04:40:28.284Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f3/107e008fa2bff0c8b9319584174418e5e5285fef32f79d8ee6a430d0039c/charset_normalizer-3.4.4-cp310-cp310-win32.whl", hash = "sha256:f34be2938726fc13801220747472850852fe6b1ea75869a048d6f896838c896f", size = 99792, upload-time = "2025-10-14T04:40:29.613Z" }, + { url = "https://files.pythonhosted.org/packages/eb/66/e396e8a408843337d7315bab30dbf106c38966f1819f123257f5520f8a96/charset_normalizer-3.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:a61900df84c667873b292c3de315a786dd8dac506704dea57bc957bd31e22c7d", size = 107198, upload-time = "2025-10-14T04:40:30.644Z" }, + { url = "https://files.pythonhosted.org/packages/b5/58/01b4f815bf0312704c267f2ccb6e5d42bcc7752340cd487bc9f8c3710597/charset_normalizer-3.4.4-cp310-cp310-win_arm64.whl", hash = "sha256:cead0978fc57397645f12578bfd2d5ea9138ea0fac82b2f63f7f7c6877986a69", size = 100262, upload-time = "2025-10-14T04:40:32.108Z" }, + { url = "https://files.pythonhosted.org/packages/ed/27/c6491ff4954e58a10f69ad90aca8a1b6fe9c5d3c6f380907af3c37435b59/charset_normalizer-3.4.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6e1fcf0720908f200cd21aa4e6750a48ff6ce4afe7ff5a79a90d5ed8a08296f8", size = 206988, upload-time = "2025-10-14T04:40:33.79Z" }, + { url = "https://files.pythonhosted.org/packages/94/59/2e87300fe67ab820b5428580a53cad894272dbb97f38a7a814a2a1ac1011/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5f819d5fe9234f9f82d75bdfa9aef3a3d72c4d24a6e57aeaebba32a704553aa0", size = 147324, upload-time = "2025-10-14T04:40:34.961Z" }, + { url = "https://files.pythonhosted.org/packages/07/fb/0cf61dc84b2b088391830f6274cb57c82e4da8bbc2efeac8c025edb88772/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a59cb51917aa591b1c4e6a43c132f0cdc3c76dbad6155df4e28ee626cc77a0a3", size = 142742, upload-time = "2025-10-14T04:40:36.105Z" }, + { url = "https://files.pythonhosted.org/packages/62/8b/171935adf2312cd745d290ed93cf16cf0dfe320863ab7cbeeae1dcd6535f/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8ef3c867360f88ac904fd3f5e1f902f13307af9052646963ee08ff4f131adafc", size = 160863, upload-time = "2025-10-14T04:40:37.188Z" }, + { url = "https://files.pythonhosted.org/packages/09/73/ad875b192bda14f2173bfc1bc9a55e009808484a4b256748d931b6948442/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d9e45d7faa48ee908174d8fe84854479ef838fc6a705c9315372eacbc2f02897", size = 157837, upload-time = "2025-10-14T04:40:38.435Z" }, + { url = "https://files.pythonhosted.org/packages/6d/fc/de9cce525b2c5b94b47c70a4b4fb19f871b24995c728e957ee68ab1671ea/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:840c25fb618a231545cbab0564a799f101b63b9901f2569faecd6b222ac72381", size = 151550, upload-time = "2025-10-14T04:40:40.053Z" }, + { url = "https://files.pythonhosted.org/packages/55/c2/43edd615fdfba8c6f2dfbd459b25a6b3b551f24ea21981e23fb768503ce1/charset_normalizer-3.4.4-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ca5862d5b3928c4940729dacc329aa9102900382fea192fc5e52eb69d6093815", size = 149162, upload-time = "2025-10-14T04:40:41.163Z" }, + { url = "https://files.pythonhosted.org/packages/03/86/bde4ad8b4d0e9429a4e82c1e8f5c659993a9a863ad62c7df05cf7b678d75/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d9c7f57c3d666a53421049053eaacdd14bbd0a528e2186fcb2e672effd053bb0", size = 150019, upload-time = "2025-10-14T04:40:42.276Z" }, + { url = "https://files.pythonhosted.org/packages/1f/86/a151eb2af293a7e7bac3a739b81072585ce36ccfb4493039f49f1d3cae8c/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:277e970e750505ed74c832b4bf75dac7476262ee2a013f5574dd49075879e161", size = 143310, upload-time = "2025-10-14T04:40:43.439Z" }, + { url = "https://files.pythonhosted.org/packages/b5/fe/43dae6144a7e07b87478fdfc4dbe9efd5defb0e7ec29f5f58a55aeef7bf7/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:31fd66405eaf47bb62e8cd575dc621c56c668f27d46a61d975a249930dd5e2a4", size = 162022, upload-time = "2025-10-14T04:40:44.547Z" }, + { url = "https://files.pythonhosted.org/packages/80/e6/7aab83774f5d2bca81f42ac58d04caf44f0cc2b65fc6db2b3b2e8a05f3b3/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:0d3d8f15c07f86e9ff82319b3d9ef6f4bf907608f53fe9d92b28ea9ae3d1fd89", size = 149383, upload-time = "2025-10-14T04:40:46.018Z" }, + { url = "https://files.pythonhosted.org/packages/4f/e8/b289173b4edae05c0dde07f69f8db476a0b511eac556dfe0d6bda3c43384/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:9f7fcd74d410a36883701fafa2482a6af2ff5ba96b9a620e9e0721e28ead5569", size = 159098, upload-time = "2025-10-14T04:40:47.081Z" }, + { url = "https://files.pythonhosted.org/packages/d8/df/fe699727754cae3f8478493c7f45f777b17c3ef0600e28abfec8619eb49c/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ebf3e58c7ec8a8bed6d66a75d7fb37b55e5015b03ceae72a8e7c74495551e224", size = 152991, upload-time = "2025-10-14T04:40:48.246Z" }, + { url = "https://files.pythonhosted.org/packages/1a/86/584869fe4ddb6ffa3bd9f491b87a01568797fb9bd8933f557dba9771beaf/charset_normalizer-3.4.4-cp311-cp311-win32.whl", hash = "sha256:eecbc200c7fd5ddb9a7f16c7decb07b566c29fa2161a16cf67b8d068bd21690a", size = 99456, upload-time = "2025-10-14T04:40:49.376Z" }, + { url = "https://files.pythonhosted.org/packages/65/f6/62fdd5feb60530f50f7e38b4f6a1d5203f4d16ff4f9f0952962c044e919a/charset_normalizer-3.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:5ae497466c7901d54b639cf42d5b8c1b6a4fead55215500d2f486d34db48d016", size = 106978, upload-time = "2025-10-14T04:40:50.844Z" }, + { url = "https://files.pythonhosted.org/packages/7a/9d/0710916e6c82948b3be62d9d398cb4fcf4e97b56d6a6aeccd66c4b2f2bd5/charset_normalizer-3.4.4-cp311-cp311-win_arm64.whl", hash = "sha256:65e2befcd84bc6f37095f5961e68a6f077bf44946771354a28ad434c2cce0ae1", size = 99969, upload-time = "2025-10-14T04:40:52.272Z" }, + { url = "https://files.pythonhosted.org/packages/f3/85/1637cd4af66fa687396e757dec650f28025f2a2f5a5531a3208dc0ec43f2/charset_normalizer-3.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0a98e6759f854bd25a58a73fa88833fba3b7c491169f86ce1180c948ab3fd394", size = 208425, upload-time = "2025-10-14T04:40:53.353Z" }, + { url = "https://files.pythonhosted.org/packages/9d/6a/04130023fef2a0d9c62d0bae2649b69f7b7d8d24ea5536feef50551029df/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5b290ccc2a263e8d185130284f8501e3e36c5e02750fc6b6bdeb2e9e96f1e25", size = 148162, upload-time = "2025-10-14T04:40:54.558Z" }, + { url = "https://files.pythonhosted.org/packages/78/29/62328d79aa60da22c9e0b9a66539feae06ca0f5a4171ac4f7dc285b83688/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74bb723680f9f7a6234dcf67aea57e708ec1fbdf5699fb91dfd6f511b0a320ef", size = 144558, upload-time = "2025-10-14T04:40:55.677Z" }, + { url = "https://files.pythonhosted.org/packages/86/bb/b32194a4bf15b88403537c2e120b817c61cd4ecffa9b6876e941c3ee38fe/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f1e34719c6ed0b92f418c7c780480b26b5d9c50349e9a9af7d76bf757530350d", size = 161497, upload-time = "2025-10-14T04:40:57.217Z" }, + { url = "https://files.pythonhosted.org/packages/19/89/a54c82b253d5b9b111dc74aca196ba5ccfcca8242d0fb64146d4d3183ff1/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2437418e20515acec67d86e12bf70056a33abdacb5cb1655042f6538d6b085a8", size = 159240, upload-time = "2025-10-14T04:40:58.358Z" }, + { url = "https://files.pythonhosted.org/packages/c0/10/d20b513afe03acc89ec33948320a5544d31f21b05368436d580dec4e234d/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11d694519d7f29d6cd09f6ac70028dba10f92f6cdd059096db198c283794ac86", size = 153471, upload-time = "2025-10-14T04:40:59.468Z" }, + { url = "https://files.pythonhosted.org/packages/61/fa/fbf177b55bdd727010f9c0a3c49eefa1d10f960e5f09d1d887bf93c2e698/charset_normalizer-3.4.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ac1c4a689edcc530fc9d9aa11f5774b9e2f33f9a0c6a57864e90908f5208d30a", size = 150864, upload-time = "2025-10-14T04:41:00.623Z" }, + { url = "https://files.pythonhosted.org/packages/05/12/9fbc6a4d39c0198adeebbde20b619790e9236557ca59fc40e0e3cebe6f40/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:21d142cc6c0ec30d2efee5068ca36c128a30b0f2c53c1c07bd78cb6bc1d3be5f", size = 150647, upload-time = "2025-10-14T04:41:01.754Z" }, + { url = "https://files.pythonhosted.org/packages/ad/1f/6a9a593d52e3e8c5d2b167daf8c6b968808efb57ef4c210acb907c365bc4/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:5dbe56a36425d26d6cfb40ce79c314a2e4dd6211d51d6d2191c00bed34f354cc", size = 145110, upload-time = "2025-10-14T04:41:03.231Z" }, + { url = "https://files.pythonhosted.org/packages/30/42/9a52c609e72471b0fc54386dc63c3781a387bb4fe61c20231a4ebcd58bdd/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5bfbb1b9acf3334612667b61bd3002196fe2a1eb4dd74d247e0f2a4d50ec9bbf", size = 162839, upload-time = "2025-10-14T04:41:04.715Z" }, + { url = "https://files.pythonhosted.org/packages/c4/5b/c0682bbf9f11597073052628ddd38344a3d673fda35a36773f7d19344b23/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:d055ec1e26e441f6187acf818b73564e6e6282709e9bcb5b63f5b23068356a15", size = 150667, upload-time = "2025-10-14T04:41:05.827Z" }, + { url = "https://files.pythonhosted.org/packages/e4/24/a41afeab6f990cf2daf6cb8c67419b63b48cf518e4f56022230840c9bfb2/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:af2d8c67d8e573d6de5bc30cdb27e9b95e49115cd9baad5ddbd1a6207aaa82a9", size = 160535, upload-time = "2025-10-14T04:41:06.938Z" }, + { url = "https://files.pythonhosted.org/packages/2a/e5/6a4ce77ed243c4a50a1fecca6aaaab419628c818a49434be428fe24c9957/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:780236ac706e66881f3b7f2f32dfe90507a09e67d1d454c762cf642e6e1586e0", size = 154816, upload-time = "2025-10-14T04:41:08.101Z" }, + { url = "https://files.pythonhosted.org/packages/a8/ef/89297262b8092b312d29cdb2517cb1237e51db8ecef2e9af5edbe7b683b1/charset_normalizer-3.4.4-cp312-cp312-win32.whl", hash = "sha256:5833d2c39d8896e4e19b689ffc198f08ea58116bee26dea51e362ecc7cd3ed26", size = 99694, upload-time = "2025-10-14T04:41:09.23Z" }, + { url = "https://files.pythonhosted.org/packages/3d/2d/1e5ed9dd3b3803994c155cd9aacb60c82c331bad84daf75bcb9c91b3295e/charset_normalizer-3.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:a79cfe37875f822425b89a82333404539ae63dbdddf97f84dcbc3d339aae9525", size = 107131, upload-time = "2025-10-14T04:41:10.467Z" }, + { url = "https://files.pythonhosted.org/packages/d0/d9/0ed4c7098a861482a7b6a95603edce4c0d9db2311af23da1fb2b75ec26fc/charset_normalizer-3.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:376bec83a63b8021bb5c8ea75e21c4ccb86e7e45ca4eb81146091b56599b80c3", size = 100390, upload-time = "2025-10-14T04:41:11.915Z" }, + { url = "https://files.pythonhosted.org/packages/97/45/4b3a1239bbacd321068ea6e7ac28875b03ab8bc0aa0966452db17cd36714/charset_normalizer-3.4.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e1f185f86a6f3403aa2420e815904c67b2f9ebc443f045edd0de921108345794", size = 208091, upload-time = "2025-10-14T04:41:13.346Z" }, + { url = "https://files.pythonhosted.org/packages/7d/62/73a6d7450829655a35bb88a88fca7d736f9882a27eacdca2c6d505b57e2e/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b39f987ae8ccdf0d2642338faf2abb1862340facc796048b604ef14919e55ed", size = 147936, upload-time = "2025-10-14T04:41:14.461Z" }, + { url = "https://files.pythonhosted.org/packages/89/c5/adb8c8b3d6625bef6d88b251bbb0d95f8205831b987631ab0c8bb5d937c2/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3162d5d8ce1bb98dd51af660f2121c55d0fa541b46dff7bb9b9f86ea1d87de72", size = 144180, upload-time = "2025-10-14T04:41:15.588Z" }, + { url = "https://files.pythonhosted.org/packages/91/ed/9706e4070682d1cc219050b6048bfd293ccf67b3d4f5a4f39207453d4b99/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:81d5eb2a312700f4ecaa977a8235b634ce853200e828fbadf3a9c50bab278328", size = 161346, upload-time = "2025-10-14T04:41:16.738Z" }, + { url = "https://files.pythonhosted.org/packages/d5/0d/031f0d95e4972901a2f6f09ef055751805ff541511dc1252ba3ca1f80cf5/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5bd2293095d766545ec1a8f612559f6b40abc0eb18bb2f5d1171872d34036ede", size = 158874, upload-time = "2025-10-14T04:41:17.923Z" }, + { url = "https://files.pythonhosted.org/packages/f5/83/6ab5883f57c9c801ce5e5677242328aa45592be8a00644310a008d04f922/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8a8b89589086a25749f471e6a900d3f662d1d3b6e2e59dcecf787b1cc3a1894", size = 153076, upload-time = "2025-10-14T04:41:19.106Z" }, + { url = "https://files.pythonhosted.org/packages/75/1e/5ff781ddf5260e387d6419959ee89ef13878229732732ee73cdae01800f2/charset_normalizer-3.4.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc7637e2f80d8530ee4a78e878bce464f70087ce73cf7c1caf142416923b98f1", size = 150601, upload-time = "2025-10-14T04:41:20.245Z" }, + { url = "https://files.pythonhosted.org/packages/d7/57/71be810965493d3510a6ca79b90c19e48696fb1ff964da319334b12677f0/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f8bf04158c6b607d747e93949aa60618b61312fe647a6369f88ce2ff16043490", size = 150376, upload-time = "2025-10-14T04:41:21.398Z" }, + { url = "https://files.pythonhosted.org/packages/e5/d5/c3d057a78c181d007014feb7e9f2e65905a6c4ef182c0ddf0de2924edd65/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:554af85e960429cf30784dd47447d5125aaa3b99a6f0683589dbd27e2f45da44", size = 144825, upload-time = "2025-10-14T04:41:22.583Z" }, + { url = "https://files.pythonhosted.org/packages/e6/8c/d0406294828d4976f275ffbe66f00266c4b3136b7506941d87c00cab5272/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:74018750915ee7ad843a774364e13a3db91682f26142baddf775342c3f5b1133", size = 162583, upload-time = "2025-10-14T04:41:23.754Z" }, + { url = "https://files.pythonhosted.org/packages/d7/24/e2aa1f18c8f15c4c0e932d9287b8609dd30ad56dbe41d926bd846e22fb8d/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:c0463276121fdee9c49b98908b3a89c39be45d86d1dbaa22957e38f6321d4ce3", size = 150366, upload-time = "2025-10-14T04:41:25.27Z" }, + { url = "https://files.pythonhosted.org/packages/e4/5b/1e6160c7739aad1e2df054300cc618b06bf784a7a164b0f238360721ab86/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:362d61fd13843997c1c446760ef36f240cf81d3ebf74ac62652aebaf7838561e", size = 160300, upload-time = "2025-10-14T04:41:26.725Z" }, + { url = "https://files.pythonhosted.org/packages/7a/10/f882167cd207fbdd743e55534d5d9620e095089d176d55cb22d5322f2afd/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a26f18905b8dd5d685d6d07b0cdf98a79f3c7a918906af7cc143ea2e164c8bc", size = 154465, upload-time = "2025-10-14T04:41:28.322Z" }, + { url = "https://files.pythonhosted.org/packages/89/66/c7a9e1b7429be72123441bfdbaf2bc13faab3f90b933f664db506dea5915/charset_normalizer-3.4.4-cp313-cp313-win32.whl", hash = "sha256:9b35f4c90079ff2e2edc5b26c0c77925e5d2d255c42c74fdb70fb49b172726ac", size = 99404, upload-time = "2025-10-14T04:41:29.95Z" }, + { url = "https://files.pythonhosted.org/packages/c4/26/b9924fa27db384bdcd97ab83b4f0a8058d96ad9626ead570674d5e737d90/charset_normalizer-3.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:b435cba5f4f750aa6c0a0d92c541fb79f69a387c91e61f1795227e4ed9cece14", size = 107092, upload-time = "2025-10-14T04:41:31.188Z" }, + { url = "https://files.pythonhosted.org/packages/af/8f/3ed4bfa0c0c72a7ca17f0380cd9e4dd842b09f664e780c13cff1dcf2ef1b/charset_normalizer-3.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:542d2cee80be6f80247095cc36c418f7bddd14f4a6de45af91dfad36d817bba2", size = 100408, upload-time = "2025-10-14T04:41:32.624Z" }, + { url = "https://files.pythonhosted.org/packages/2a/35/7051599bd493e62411d6ede36fd5af83a38f37c4767b92884df7301db25d/charset_normalizer-3.4.4-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:da3326d9e65ef63a817ecbcc0df6e94463713b754fe293eaa03da99befb9a5bd", size = 207746, upload-time = "2025-10-14T04:41:33.773Z" }, + { url = "https://files.pythonhosted.org/packages/10/9a/97c8d48ef10d6cd4fcead2415523221624bf58bcf68a802721a6bc807c8f/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8af65f14dc14a79b924524b1e7fffe304517b2bff5a58bf64f30b98bbc5079eb", size = 147889, upload-time = "2025-10-14T04:41:34.897Z" }, + { url = "https://files.pythonhosted.org/packages/10/bf/979224a919a1b606c82bd2c5fa49b5c6d5727aa47b4312bb27b1734f53cd/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74664978bb272435107de04e36db5a9735e78232b85b77d45cfb38f758efd33e", size = 143641, upload-time = "2025-10-14T04:41:36.116Z" }, + { url = "https://files.pythonhosted.org/packages/ba/33/0ad65587441fc730dc7bd90e9716b30b4702dc7b617e6ba4997dc8651495/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:752944c7ffbfdd10c074dc58ec2d5a8a4cd9493b314d367c14d24c17684ddd14", size = 160779, upload-time = "2025-10-14T04:41:37.229Z" }, + { url = "https://files.pythonhosted.org/packages/67/ed/331d6b249259ee71ddea93f6f2f0a56cfebd46938bde6fcc6f7b9a3d0e09/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1f13550535ad8cff21b8d757a3257963e951d96e20ec82ab44bc64aeb62a191", size = 159035, upload-time = "2025-10-14T04:41:38.368Z" }, + { url = "https://files.pythonhosted.org/packages/67/ff/f6b948ca32e4f2a4576aa129d8bed61f2e0543bf9f5f2b7fc3758ed005c9/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ecaae4149d99b1c9e7b88bb03e3221956f68fd6d50be2ef061b2381b61d20838", size = 152542, upload-time = "2025-10-14T04:41:39.862Z" }, + { url = "https://files.pythonhosted.org/packages/16/85/276033dcbcc369eb176594de22728541a925b2632f9716428c851b149e83/charset_normalizer-3.4.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:cb6254dc36b47a990e59e1068afacdcd02958bdcce30bb50cc1700a8b9d624a6", size = 149524, upload-time = "2025-10-14T04:41:41.319Z" }, + { url = "https://files.pythonhosted.org/packages/9e/f2/6a2a1f722b6aba37050e626530a46a68f74e63683947a8acff92569f979a/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c8ae8a0f02f57a6e61203a31428fa1d677cbe50c93622b4149d5c0f319c1d19e", size = 150395, upload-time = "2025-10-14T04:41:42.539Z" }, + { url = "https://files.pythonhosted.org/packages/60/bb/2186cb2f2bbaea6338cad15ce23a67f9b0672929744381e28b0592676824/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:47cc91b2f4dd2833fddaedd2893006b0106129d4b94fdb6af1f4ce5a9965577c", size = 143680, upload-time = "2025-10-14T04:41:43.661Z" }, + { url = "https://files.pythonhosted.org/packages/7d/a5/bf6f13b772fbb2a90360eb620d52ed8f796f3c5caee8398c3b2eb7b1c60d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:82004af6c302b5d3ab2cfc4cc5f29db16123b1a8417f2e25f9066f91d4411090", size = 162045, upload-time = "2025-10-14T04:41:44.821Z" }, + { url = "https://files.pythonhosted.org/packages/df/c5/d1be898bf0dc3ef9030c3825e5d3b83f2c528d207d246cbabe245966808d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b7d8f6c26245217bd2ad053761201e9f9680f8ce52f0fcd8d0755aeae5b2152", size = 149687, upload-time = "2025-10-14T04:41:46.442Z" }, + { url = "https://files.pythonhosted.org/packages/a5/42/90c1f7b9341eef50c8a1cb3f098ac43b0508413f33affd762855f67a410e/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:799a7a5e4fb2d5898c60b640fd4981d6a25f1c11790935a44ce38c54e985f828", size = 160014, upload-time = "2025-10-14T04:41:47.631Z" }, + { url = "https://files.pythonhosted.org/packages/76/be/4d3ee471e8145d12795ab655ece37baed0929462a86e72372fd25859047c/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:99ae2cffebb06e6c22bdc25801d7b30f503cc87dbd283479e7b606f70aff57ec", size = 154044, upload-time = "2025-10-14T04:41:48.81Z" }, + { url = "https://files.pythonhosted.org/packages/b0/6f/8f7af07237c34a1defe7defc565a9bc1807762f672c0fde711a4b22bf9c0/charset_normalizer-3.4.4-cp314-cp314-win32.whl", hash = "sha256:f9d332f8c2a2fcbffe1378594431458ddbef721c1769d78e2cbc06280d8155f9", size = 99940, upload-time = "2025-10-14T04:41:49.946Z" }, + { url = "https://files.pythonhosted.org/packages/4b/51/8ade005e5ca5b0d80fb4aff72a3775b325bdc3d27408c8113811a7cbe640/charset_normalizer-3.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:8a6562c3700cce886c5be75ade4a5db4214fda19fede41d9792d100288d8f94c", size = 107104, upload-time = "2025-10-14T04:41:51.051Z" }, + { url = "https://files.pythonhosted.org/packages/da/5f/6b8f83a55bb8278772c5ae54a577f3099025f9ade59d0136ac24a0df4bde/charset_normalizer-3.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:de00632ca48df9daf77a2c65a484531649261ec9f25489917f09e455cb09ddb2", size = 100743, upload-time = "2025-10-14T04:41:52.122Z" }, + { url = "https://files.pythonhosted.org/packages/0a/4c/925909008ed5a988ccbb72dcc897407e5d6d3bd72410d69e051fc0c14647/charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f", size = 53402, upload-time = "2025-10-14T04:42:31.76Z" }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "coverage" +version = "7.13.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/23/f9/e92df5e07f3fc8d4c7f9a0f146ef75446bf870351cd37b788cf5897f8079/coverage-7.13.1.tar.gz", hash = "sha256:b7593fe7eb5feaa3fbb461ac79aac9f9fc0387a5ca8080b0c6fe2ca27b091afd", size = 825862, upload-time = "2025-12-28T15:42:56.969Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2d/9a/3742e58fd04b233df95c012ee9f3dfe04708a5e1d32613bd2d47d4e1be0d/coverage-7.13.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e1fa280b3ad78eea5be86f94f461c04943d942697e0dac889fa18fff8f5f9147", size = 218633, upload-time = "2025-12-28T15:40:10.165Z" }, + { url = "https://files.pythonhosted.org/packages/7e/45/7e6bdc94d89cd7c8017ce735cf50478ddfe765d4fbf0c24d71d30ea33d7a/coverage-7.13.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c3d8c679607220979434f494b139dfb00131ebf70bb406553d69c1ff01a5c33d", size = 219147, upload-time = "2025-12-28T15:40:12.069Z" }, + { url = "https://files.pythonhosted.org/packages/f7/38/0d6a258625fd7f10773fe94097dc16937a5f0e3e0cdf3adef67d3ac6baef/coverage-7.13.1-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:339dc63b3eba969067b00f41f15ad161bf2946613156fb131266d8debc8e44d0", size = 245894, upload-time = "2025-12-28T15:40:13.556Z" }, + { url = "https://files.pythonhosted.org/packages/27/58/409d15ea487986994cbd4d06376e9860e9b157cfbfd402b1236770ab8dd2/coverage-7.13.1-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:db622b999ffe49cb891f2fff3b340cdc2f9797d01a0a202a0973ba2562501d90", size = 247721, upload-time = "2025-12-28T15:40:15.37Z" }, + { url = "https://files.pythonhosted.org/packages/da/bf/6e8056a83fd7a96c93341f1ffe10df636dd89f26d5e7b9ca511ce3bcf0df/coverage-7.13.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1443ba9acbb593fa7c1c29e011d7c9761545fe35e7652e85ce7f51a16f7e08d", size = 249585, upload-time = "2025-12-28T15:40:17.226Z" }, + { url = "https://files.pythonhosted.org/packages/f4/15/e1daff723f9f5959acb63cbe35b11203a9df77ee4b95b45fffd38b318390/coverage-7.13.1-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c832ec92c4499ac463186af72f9ed4d8daec15499b16f0a879b0d1c8e5cf4a3b", size = 246597, upload-time = "2025-12-28T15:40:19.028Z" }, + { url = "https://files.pythonhosted.org/packages/74/a6/1efd31c5433743a6ddbc9d37ac30c196bb07c7eab3d74fbb99b924c93174/coverage-7.13.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:562ec27dfa3f311e0db1ba243ec6e5f6ab96b1edfcfc6cf86f28038bc4961ce6", size = 247626, upload-time = "2025-12-28T15:40:20.846Z" }, + { url = "https://files.pythonhosted.org/packages/6d/9f/1609267dd3e749f57fdd66ca6752567d1c13b58a20a809dc409b263d0b5f/coverage-7.13.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:4de84e71173d4dada2897e5a0e1b7877e5eefbfe0d6a44edee6ce31d9b8ec09e", size = 245629, upload-time = "2025-12-28T15:40:22.397Z" }, + { url = "https://files.pythonhosted.org/packages/e2/f6/6815a220d5ec2466383d7cc36131b9fa6ecbe95c50ec52a631ba733f306a/coverage-7.13.1-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:a5a68357f686f8c4d527a2dc04f52e669c2fc1cbde38f6f7eb6a0e58cbd17cae", size = 245901, upload-time = "2025-12-28T15:40:23.836Z" }, + { url = "https://files.pythonhosted.org/packages/ac/58/40576554cd12e0872faf6d2c0eb3bc85f71d78427946ddd19ad65201e2c0/coverage-7.13.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:77cc258aeb29a3417062758975521eae60af6f79e930d6993555eeac6a8eac29", size = 246505, upload-time = "2025-12-28T15:40:25.421Z" }, + { url = "https://files.pythonhosted.org/packages/3b/77/9233a90253fba576b0eee81707b5781d0e21d97478e5377b226c5b096c0f/coverage-7.13.1-cp310-cp310-win32.whl", hash = "sha256:bb4f8c3c9a9f34423dba193f241f617b08ffc63e27f67159f60ae6baf2dcfe0f", size = 221257, upload-time = "2025-12-28T15:40:27.217Z" }, + { url = "https://files.pythonhosted.org/packages/e0/43/e842ff30c1a0a623ec80db89befb84a3a7aad7bfe44a6ea77d5a3e61fedd/coverage-7.13.1-cp310-cp310-win_amd64.whl", hash = "sha256:c8e2706ceb622bc63bac98ebb10ef5da80ed70fbd8a7999a5076de3afaef0fb1", size = 222191, upload-time = "2025-12-28T15:40:28.916Z" }, + { url = "https://files.pythonhosted.org/packages/b4/9b/77baf488516e9ced25fc215a6f75d803493fc3f6a1a1227ac35697910c2a/coverage-7.13.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a55d509a1dc5a5b708b5dad3b5334e07a16ad4c2185e27b40e4dba796ab7f88", size = 218755, upload-time = "2025-12-28T15:40:30.812Z" }, + { url = "https://files.pythonhosted.org/packages/d7/cd/7ab01154e6eb79ee2fab76bf4d89e94c6648116557307ee4ebbb85e5c1bf/coverage-7.13.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4d010d080c4888371033baab27e47c9df7d6fb28d0b7b7adf85a4a49be9298b3", size = 219257, upload-time = "2025-12-28T15:40:32.333Z" }, + { url = "https://files.pythonhosted.org/packages/01/d5/b11ef7863ffbbdb509da0023fad1e9eda1c0eaea61a6d2ea5b17d4ac706e/coverage-7.13.1-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d938b4a840fb1523b9dfbbb454f652967f18e197569c32266d4d13f37244c3d9", size = 249657, upload-time = "2025-12-28T15:40:34.1Z" }, + { url = "https://files.pythonhosted.org/packages/f7/7c/347280982982383621d29b8c544cf497ae07ac41e44b1ca4903024131f55/coverage-7.13.1-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bf100a3288f9bb7f919b87eb84f87101e197535b9bd0e2c2b5b3179633324fee", size = 251581, upload-time = "2025-12-28T15:40:36.131Z" }, + { url = "https://files.pythonhosted.org/packages/82/f6/ebcfed11036ade4c0d75fa4453a6282bdd225bc073862766eec184a4c643/coverage-7.13.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ef6688db9bf91ba111ae734ba6ef1a063304a881749726e0d3575f5c10a9facf", size = 253691, upload-time = "2025-12-28T15:40:37.626Z" }, + { url = "https://files.pythonhosted.org/packages/02/92/af8f5582787f5d1a8b130b2dcba785fa5e9a7a8e121a0bb2220a6fdbdb8a/coverage-7.13.1-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0b609fc9cdbd1f02e51f67f51e5aee60a841ef58a68d00d5ee2c0faf357481a3", size = 249799, upload-time = "2025-12-28T15:40:39.47Z" }, + { url = "https://files.pythonhosted.org/packages/24/aa/0e39a2a3b16eebf7f193863323edbff38b6daba711abaaf807d4290cf61a/coverage-7.13.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c43257717611ff5e9a1d79dce8e47566235ebda63328718d9b65dd640bc832ef", size = 251389, upload-time = "2025-12-28T15:40:40.954Z" }, + { url = "https://files.pythonhosted.org/packages/73/46/7f0c13111154dc5b978900c0ccee2e2ca239b910890e674a77f1363d483e/coverage-7.13.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e09fbecc007f7b6afdfb3b07ce5bd9f8494b6856dd4f577d26c66c391b829851", size = 249450, upload-time = "2025-12-28T15:40:42.489Z" }, + { url = "https://files.pythonhosted.org/packages/ac/ca/e80da6769e8b669ec3695598c58eef7ad98b0e26e66333996aee6316db23/coverage-7.13.1-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:a03a4f3a19a189919c7055098790285cc5c5b0b3976f8d227aea39dbf9f8bfdb", size = 249170, upload-time = "2025-12-28T15:40:44.279Z" }, + { url = "https://files.pythonhosted.org/packages/af/18/9e29baabdec1a8644157f572541079b4658199cfd372a578f84228e860de/coverage-7.13.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3820778ea1387c2b6a818caec01c63adc5b3750211af6447e8dcfb9b6f08dbba", size = 250081, upload-time = "2025-12-28T15:40:45.748Z" }, + { url = "https://files.pythonhosted.org/packages/00/f8/c3021625a71c3b2f516464d322e41636aea381018319050a8114105872ee/coverage-7.13.1-cp311-cp311-win32.whl", hash = "sha256:ff10896fa55167371960c5908150b434b71c876dfab97b69478f22c8b445ea19", size = 221281, upload-time = "2025-12-28T15:40:47.232Z" }, + { url = "https://files.pythonhosted.org/packages/27/56/c216625f453df6e0559ed666d246fcbaaa93f3aa99eaa5080cea1229aa3d/coverage-7.13.1-cp311-cp311-win_amd64.whl", hash = "sha256:a998cc0aeeea4c6d5622a3754da5a493055d2d95186bad877b0a34ea6e6dbe0a", size = 222215, upload-time = "2025-12-28T15:40:49.19Z" }, + { url = "https://files.pythonhosted.org/packages/5c/9a/be342e76f6e531cae6406dc46af0d350586f24d9b67fdfa6daee02df71af/coverage-7.13.1-cp311-cp311-win_arm64.whl", hash = "sha256:fea07c1a39a22614acb762e3fbbb4011f65eedafcb2948feeef641ac78b4ee5c", size = 220886, upload-time = "2025-12-28T15:40:51.067Z" }, + { url = "https://files.pythonhosted.org/packages/ce/8a/87af46cccdfa78f53db747b09f5f9a21d5fc38d796834adac09b30a8ce74/coverage-7.13.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6f34591000f06e62085b1865c9bc5f7858df748834662a51edadfd2c3bfe0dd3", size = 218927, upload-time = "2025-12-28T15:40:52.814Z" }, + { url = "https://files.pythonhosted.org/packages/82/a8/6e22fdc67242a4a5a153f9438d05944553121c8f4ba70cb072af4c41362e/coverage-7.13.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b67e47c5595b9224599016e333f5ec25392597a89d5744658f837d204e16c63e", size = 219288, upload-time = "2025-12-28T15:40:54.262Z" }, + { url = "https://files.pythonhosted.org/packages/d0/0a/853a76e03b0f7c4375e2ca025df45c918beb367f3e20a0a8e91967f6e96c/coverage-7.13.1-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3e7b8bd70c48ffb28461ebe092c2345536fb18bbbf19d287c8913699735f505c", size = 250786, upload-time = "2025-12-28T15:40:56.059Z" }, + { url = "https://files.pythonhosted.org/packages/ea/b4/694159c15c52b9f7ec7adf49d50e5f8ee71d3e9ef38adb4445d13dd56c20/coverage-7.13.1-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:c223d078112e90dc0e5c4e35b98b9584164bea9fbbd221c0b21c5241f6d51b62", size = 253543, upload-time = "2025-12-28T15:40:57.585Z" }, + { url = "https://files.pythonhosted.org/packages/96/b2/7f1f0437a5c855f87e17cf5d0dc35920b6440ff2b58b1ba9788c059c26c8/coverage-7.13.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:794f7c05af0763b1bbd1b9e6eff0e52ad068be3b12cd96c87de037b01390c968", size = 254635, upload-time = "2025-12-28T15:40:59.443Z" }, + { url = "https://files.pythonhosted.org/packages/e9/d1/73c3fdb8d7d3bddd9473c9c6a2e0682f09fc3dfbcb9c3f36412a7368bcab/coverage-7.13.1-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0642eae483cc8c2902e4af7298bf886d605e80f26382124cddc3967c2a3df09e", size = 251202, upload-time = "2025-12-28T15:41:01.328Z" }, + { url = "https://files.pythonhosted.org/packages/66/3c/f0edf75dcc152f145d5598329e864bbbe04ab78660fe3e8e395f9fff010f/coverage-7.13.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9f5e772ed5fef25b3de9f2008fe67b92d46831bd2bc5bdc5dd6bfd06b83b316f", size = 252566, upload-time = "2025-12-28T15:41:03.319Z" }, + { url = "https://files.pythonhosted.org/packages/17/b3/e64206d3c5f7dcbceafd14941345a754d3dbc78a823a6ed526e23b9cdaab/coverage-7.13.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:45980ea19277dc0a579e432aef6a504fe098ef3a9032ead15e446eb0f1191aee", size = 250711, upload-time = "2025-12-28T15:41:06.411Z" }, + { url = "https://files.pythonhosted.org/packages/dc/ad/28a3eb970a8ef5b479ee7f0c484a19c34e277479a5b70269dc652b730733/coverage-7.13.1-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:e4f18eca6028ffa62adbd185a8f1e1dd242f2e68164dba5c2b74a5204850b4cf", size = 250278, upload-time = "2025-12-28T15:41:08.285Z" }, + { url = "https://files.pythonhosted.org/packages/54/e3/c8f0f1a93133e3e1291ca76cbb63565bd4b5c5df63b141f539d747fff348/coverage-7.13.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f8dca5590fec7a89ed6826fce625595279e586ead52e9e958d3237821fbc750c", size = 252154, upload-time = "2025-12-28T15:41:09.969Z" }, + { url = "https://files.pythonhosted.org/packages/d0/bf/9939c5d6859c380e405b19e736321f1c7d402728792f4c752ad1adcce005/coverage-7.13.1-cp312-cp312-win32.whl", hash = "sha256:ff86d4e85188bba72cfb876df3e11fa243439882c55957184af44a35bd5880b7", size = 221487, upload-time = "2025-12-28T15:41:11.468Z" }, + { url = "https://files.pythonhosted.org/packages/fa/dc/7282856a407c621c2aad74021680a01b23010bb8ebf427cf5eacda2e876f/coverage-7.13.1-cp312-cp312-win_amd64.whl", hash = "sha256:16cc1da46c04fb0fb128b4dc430b78fa2aba8a6c0c9f8eb391fd5103409a6ac6", size = 222299, upload-time = "2025-12-28T15:41:13.386Z" }, + { url = "https://files.pythonhosted.org/packages/10/79/176a11203412c350b3e9578620013af35bcdb79b651eb976f4a4b32044fa/coverage-7.13.1-cp312-cp312-win_arm64.whl", hash = "sha256:8d9bc218650022a768f3775dd7fdac1886437325d8d295d923ebcfef4892ad5c", size = 220941, upload-time = "2025-12-28T15:41:14.975Z" }, + { url = "https://files.pythonhosted.org/packages/a3/a4/e98e689347a1ff1a7f67932ab535cef82eb5e78f32a9e4132e114bbb3a0a/coverage-7.13.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:cb237bfd0ef4d5eb6a19e29f9e528ac67ac3be932ea6b44fb6cc09b9f3ecff78", size = 218951, upload-time = "2025-12-28T15:41:16.653Z" }, + { url = "https://files.pythonhosted.org/packages/32/33/7cbfe2bdc6e2f03d6b240d23dc45fdaf3fd270aaf2d640be77b7f16989ab/coverage-7.13.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1dcb645d7e34dcbcc96cd7c132b1fc55c39263ca62eb961c064eb3928997363b", size = 219325, upload-time = "2025-12-28T15:41:18.609Z" }, + { url = "https://files.pythonhosted.org/packages/59/f6/efdabdb4929487baeb7cb2a9f7dac457d9356f6ad1b255be283d58b16316/coverage-7.13.1-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3d42df8201e00384736f0df9be2ced39324c3907607d17d50d50116c989d84cd", size = 250309, upload-time = "2025-12-28T15:41:20.629Z" }, + { url = "https://files.pythonhosted.org/packages/12/da/91a52516e9d5aea87d32d1523f9cdcf7a35a3b298e6be05d6509ba3cfab2/coverage-7.13.1-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:fa3edde1aa8807de1d05934982416cb3ec46d1d4d91e280bcce7cca01c507992", size = 252907, upload-time = "2025-12-28T15:41:22.257Z" }, + { url = "https://files.pythonhosted.org/packages/75/38/f1ea837e3dc1231e086db1638947e00d264e7e8c41aa8ecacf6e1e0c05f4/coverage-7.13.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9edd0e01a343766add6817bc448408858ba6b489039eaaa2018474e4001651a4", size = 254148, upload-time = "2025-12-28T15:41:23.87Z" }, + { url = "https://files.pythonhosted.org/packages/7f/43/f4f16b881aaa34954ba446318dea6b9ed5405dd725dd8daac2358eda869a/coverage-7.13.1-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:985b7836931d033570b94c94713c6dba5f9d3ff26045f72c3e5dbc5fe3361e5a", size = 250515, upload-time = "2025-12-28T15:41:25.437Z" }, + { url = "https://files.pythonhosted.org/packages/84/34/8cba7f00078bd468ea914134e0144263194ce849ec3baad187ffb6203d1c/coverage-7.13.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ffed1e4980889765c84a5d1a566159e363b71d6b6fbaf0bebc9d3c30bc016766", size = 252292, upload-time = "2025-12-28T15:41:28.459Z" }, + { url = "https://files.pythonhosted.org/packages/8c/a4/cffac66c7652d84ee4ac52d3ccb94c015687d3b513f9db04bfcac2ac800d/coverage-7.13.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:8842af7f175078456b8b17f1b73a0d16a65dcbdc653ecefeb00a56b3c8c298c4", size = 250242, upload-time = "2025-12-28T15:41:30.02Z" }, + { url = "https://files.pythonhosted.org/packages/f4/78/9a64d462263dde416f3c0067efade7b52b52796f489b1037a95b0dc389c9/coverage-7.13.1-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:ccd7a6fca48ca9c131d9b0a2972a581e28b13416fc313fb98b6d24a03ce9a398", size = 250068, upload-time = "2025-12-28T15:41:32.007Z" }, + { url = "https://files.pythonhosted.org/packages/69/c8/a8994f5fece06db7c4a97c8fc1973684e178599b42e66280dded0524ef00/coverage-7.13.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0403f647055de2609be776965108447deb8e384fe4a553c119e3ff6bfbab4784", size = 251846, upload-time = "2025-12-28T15:41:33.946Z" }, + { url = "https://files.pythonhosted.org/packages/cc/f7/91fa73c4b80305c86598a2d4e54ba22df6bf7d0d97500944af7ef155d9f7/coverage-7.13.1-cp313-cp313-win32.whl", hash = "sha256:549d195116a1ba1e1ae2f5ca143f9777800f6636eab917d4f02b5310d6d73461", size = 221512, upload-time = "2025-12-28T15:41:35.519Z" }, + { url = "https://files.pythonhosted.org/packages/45/0b/0768b4231d5a044da8f75e097a8714ae1041246bb765d6b5563bab456735/coverage-7.13.1-cp313-cp313-win_amd64.whl", hash = "sha256:5899d28b5276f536fcf840b18b61a9fce23cc3aec1d114c44c07fe94ebeaa500", size = 222321, upload-time = "2025-12-28T15:41:37.371Z" }, + { url = "https://files.pythonhosted.org/packages/9b/b8/bdcb7253b7e85157282450262008f1366aa04663f3e3e4c30436f596c3e2/coverage-7.13.1-cp313-cp313-win_arm64.whl", hash = "sha256:868a2fae76dfb06e87291bcbd4dcbcc778a8500510b618d50496e520bd94d9b9", size = 220949, upload-time = "2025-12-28T15:41:39.553Z" }, + { url = "https://files.pythonhosted.org/packages/70/52/f2be52cc445ff75ea8397948c96c1b4ee14f7f9086ea62fc929c5ae7b717/coverage-7.13.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:67170979de0dacac3f3097d02b0ad188d8edcea44ccc44aaa0550af49150c7dc", size = 219643, upload-time = "2025-12-28T15:41:41.567Z" }, + { url = "https://files.pythonhosted.org/packages/47/79/c85e378eaa239e2edec0c5523f71542c7793fe3340954eafb0bc3904d32d/coverage-7.13.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f80e2bb21bfab56ed7405c2d79d34b5dc0bc96c2c1d2a067b643a09fb756c43a", size = 219997, upload-time = "2025-12-28T15:41:43.418Z" }, + { url = "https://files.pythonhosted.org/packages/fe/9b/b1ade8bfb653c0bbce2d6d6e90cc6c254cbb99b7248531cc76253cb4da6d/coverage-7.13.1-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f83351e0f7dcdb14d7326c3d8d8c4e915fa685cbfdc6281f9470d97a04e9dfe4", size = 261296, upload-time = "2025-12-28T15:41:45.207Z" }, + { url = "https://files.pythonhosted.org/packages/1f/af/ebf91e3e1a2473d523e87e87fd8581e0aa08741b96265730e2d79ce78d8d/coverage-7.13.1-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bb3f6562e89bad0110afbe64e485aac2462efdce6232cdec7862a095dc3412f6", size = 263363, upload-time = "2025-12-28T15:41:47.163Z" }, + { url = "https://files.pythonhosted.org/packages/c4/8b/fb2423526d446596624ac7fde12ea4262e66f86f5120114c3cfd0bb2befa/coverage-7.13.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:77545b5dcda13b70f872c3b5974ac64c21d05e65b1590b441c8560115dc3a0d1", size = 265783, upload-time = "2025-12-28T15:41:49.03Z" }, + { url = "https://files.pythonhosted.org/packages/9b/26/ef2adb1e22674913b89f0fe7490ecadcef4a71fa96f5ced90c60ec358789/coverage-7.13.1-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a4d240d260a1aed814790bbe1f10a5ff31ce6c21bc78f0da4a1e8268d6c80dbd", size = 260508, upload-time = "2025-12-28T15:41:51.035Z" }, + { url = "https://files.pythonhosted.org/packages/ce/7d/f0f59b3404caf662e7b5346247883887687c074ce67ba453ea08c612b1d5/coverage-7.13.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:d2287ac9360dec3837bfdad969963a5d073a09a85d898bd86bea82aa8876ef3c", size = 263357, upload-time = "2025-12-28T15:41:52.631Z" }, + { url = "https://files.pythonhosted.org/packages/1a/b1/29896492b0b1a047604d35d6fa804f12818fa30cdad660763a5f3159e158/coverage-7.13.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:0d2c11f3ea4db66b5cbded23b20185c35066892c67d80ec4be4bab257b9ad1e0", size = 260978, upload-time = "2025-12-28T15:41:54.589Z" }, + { url = "https://files.pythonhosted.org/packages/48/f2/971de1238a62e6f0a4128d37adadc8bb882ee96afbe03ff1570291754629/coverage-7.13.1-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:3fc6a169517ca0d7ca6846c3c5392ef2b9e38896f61d615cb75b9e7134d4ee1e", size = 259877, upload-time = "2025-12-28T15:41:56.263Z" }, + { url = "https://files.pythonhosted.org/packages/6a/fc/0474efcbb590ff8628830e9aaec5f1831594874360e3251f1fdec31d07a3/coverage-7.13.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:d10a2ed46386e850bb3de503a54f9fe8192e5917fcbb143bfef653a9355e9a53", size = 262069, upload-time = "2025-12-28T15:41:58.093Z" }, + { url = "https://files.pythonhosted.org/packages/88/4f/3c159b7953db37a7b44c0eab8a95c37d1aa4257c47b4602c04022d5cb975/coverage-7.13.1-cp313-cp313t-win32.whl", hash = "sha256:75a6f4aa904301dab8022397a22c0039edc1f51e90b83dbd4464b8a38dc87842", size = 222184, upload-time = "2025-12-28T15:41:59.763Z" }, + { url = "https://files.pythonhosted.org/packages/58/a5/6b57d28f81417f9335774f20679d9d13b9a8fb90cd6160957aa3b54a2379/coverage-7.13.1-cp313-cp313t-win_amd64.whl", hash = "sha256:309ef5706e95e62578cda256b97f5e097916a2c26247c287bbe74794e7150df2", size = 223250, upload-time = "2025-12-28T15:42:01.52Z" }, + { url = "https://files.pythonhosted.org/packages/81/7c/160796f3b035acfbb58be80e02e484548595aa67e16a6345e7910ace0a38/coverage-7.13.1-cp313-cp313t-win_arm64.whl", hash = "sha256:92f980729e79b5d16d221038dbf2e8f9a9136afa072f9d5d6ed4cb984b126a09", size = 221521, upload-time = "2025-12-28T15:42:03.275Z" }, + { url = "https://files.pythonhosted.org/packages/aa/8e/ba0e597560c6563fc0adb902fda6526df5d4aa73bb10adf0574d03bd2206/coverage-7.13.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:97ab3647280d458a1f9adb85244e81587505a43c0c7cff851f5116cd2814b894", size = 218996, upload-time = "2025-12-28T15:42:04.978Z" }, + { url = "https://files.pythonhosted.org/packages/6b/8e/764c6e116f4221dc7aa26c4061181ff92edb9c799adae6433d18eeba7a14/coverage-7.13.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:8f572d989142e0908e6acf57ad1b9b86989ff057c006d13b76c146ec6a20216a", size = 219326, upload-time = "2025-12-28T15:42:06.691Z" }, + { url = "https://files.pythonhosted.org/packages/4f/a6/6130dc6d8da28cdcbb0f2bf8865aeca9b157622f7c0031e48c6cf9a0e591/coverage-7.13.1-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d72140ccf8a147e94274024ff6fd8fb7811354cf7ef88b1f0a988ebaa5bc774f", size = 250374, upload-time = "2025-12-28T15:42:08.786Z" }, + { url = "https://files.pythonhosted.org/packages/82/2b/783ded568f7cd6b677762f780ad338bf4b4750205860c17c25f7c708995e/coverage-7.13.1-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:d3c9f051b028810f5a87c88e5d6e9af3c0ff32ef62763bf15d29f740453ca909", size = 252882, upload-time = "2025-12-28T15:42:10.515Z" }, + { url = "https://files.pythonhosted.org/packages/cd/b2/9808766d082e6a4d59eb0cc881a57fc1600eb2c5882813eefff8254f71b5/coverage-7.13.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f398ba4df52d30b1763f62eed9de5620dcde96e6f491f4c62686736b155aa6e4", size = 254218, upload-time = "2025-12-28T15:42:12.208Z" }, + { url = "https://files.pythonhosted.org/packages/44/ea/52a985bb447c871cb4d2e376e401116520991b597c85afdde1ea9ef54f2c/coverage-7.13.1-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:132718176cc723026d201e347f800cd1a9e4b62ccd3f82476950834dad501c75", size = 250391, upload-time = "2025-12-28T15:42:14.21Z" }, + { url = "https://files.pythonhosted.org/packages/7f/1d/125b36cc12310718873cfc8209ecfbc1008f14f4f5fa0662aa608e579353/coverage-7.13.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:9e549d642426e3579b3f4b92d0431543b012dcb6e825c91619d4e93b7363c3f9", size = 252239, upload-time = "2025-12-28T15:42:16.292Z" }, + { url = "https://files.pythonhosted.org/packages/6a/16/10c1c164950cade470107f9f14bbac8485f8fb8515f515fca53d337e4a7f/coverage-7.13.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:90480b2134999301eea795b3a9dbf606c6fbab1b489150c501da84a959442465", size = 250196, upload-time = "2025-12-28T15:42:18.54Z" }, + { url = "https://files.pythonhosted.org/packages/2a/c6/cd860fac08780c6fd659732f6ced1b40b79c35977c1356344e44d72ba6c4/coverage-7.13.1-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:e825dbb7f84dfa24663dd75835e7257f8882629fc11f03ecf77d84a75134b864", size = 250008, upload-time = "2025-12-28T15:42:20.365Z" }, + { url = "https://files.pythonhosted.org/packages/f0/3a/a8c58d3d38f82a5711e1e0a67268362af48e1a03df27c03072ac30feefcf/coverage-7.13.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:623dcc6d7a7ba450bbdbeedbaa0c42b329bdae16491af2282f12a7e809be7eb9", size = 251671, upload-time = "2025-12-28T15:42:22.114Z" }, + { url = "https://files.pythonhosted.org/packages/f0/bc/fd4c1da651d037a1e3d53e8cb3f8182f4b53271ffa9a95a2e211bacc0349/coverage-7.13.1-cp314-cp314-win32.whl", hash = "sha256:6e73ebb44dca5f708dc871fe0b90cf4cff1a13f9956f747cc87b535a840386f5", size = 221777, upload-time = "2025-12-28T15:42:23.919Z" }, + { url = "https://files.pythonhosted.org/packages/4b/50/71acabdc8948464c17e90b5ffd92358579bd0910732c2a1c9537d7536aa6/coverage-7.13.1-cp314-cp314-win_amd64.whl", hash = "sha256:be753b225d159feb397bd0bf91ae86f689bad0da09d3b301478cd39b878ab31a", size = 222592, upload-time = "2025-12-28T15:42:25.619Z" }, + { url = "https://files.pythonhosted.org/packages/f7/c8/a6fb943081bb0cc926499c7907731a6dc9efc2cbdc76d738c0ab752f1a32/coverage-7.13.1-cp314-cp314-win_arm64.whl", hash = "sha256:228b90f613b25ba0019361e4ab81520b343b622fc657daf7e501c4ed6a2366c0", size = 221169, upload-time = "2025-12-28T15:42:27.629Z" }, + { url = "https://files.pythonhosted.org/packages/16/61/d5b7a0a0e0e40d62e59bc8c7aa1afbd86280d82728ba97f0673b746b78e2/coverage-7.13.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:60cfb538fe9ef86e5b2ab0ca8fc8d62524777f6c611dcaf76dc16fbe9b8e698a", size = 219730, upload-time = "2025-12-28T15:42:29.306Z" }, + { url = "https://files.pythonhosted.org/packages/a3/2c/8881326445fd071bb49514d1ce97d18a46a980712b51fee84f9ab42845b4/coverage-7.13.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:57dfc8048c72ba48a8c45e188d811e5efd7e49b387effc8fb17e97936dde5bf6", size = 220001, upload-time = "2025-12-28T15:42:31.319Z" }, + { url = "https://files.pythonhosted.org/packages/b5/d7/50de63af51dfa3a7f91cc37ad8fcc1e244b734232fbc8b9ab0f3c834a5cd/coverage-7.13.1-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3f2f725aa3e909b3c5fdb8192490bdd8e1495e85906af74fe6e34a2a77ba0673", size = 261370, upload-time = "2025-12-28T15:42:32.992Z" }, + { url = "https://files.pythonhosted.org/packages/e1/2c/d31722f0ec918fd7453b2758312729f645978d212b410cd0f7c2aed88a94/coverage-7.13.1-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:9ee68b21909686eeb21dfcba2c3b81fee70dcf38b140dcd5aa70680995fa3aa5", size = 263485, upload-time = "2025-12-28T15:42:34.759Z" }, + { url = "https://files.pythonhosted.org/packages/fa/7a/2c114fa5c5fc08ba0777e4aec4c97e0b4a1afcb69c75f1f54cff78b073ab/coverage-7.13.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:724b1b270cb13ea2e6503476e34541a0b1f62280bc997eab443f87790202033d", size = 265890, upload-time = "2025-12-28T15:42:36.517Z" }, + { url = "https://files.pythonhosted.org/packages/65/d9/f0794aa1c74ceabc780fe17f6c338456bbc4e96bd950f2e969f48ac6fb20/coverage-7.13.1-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:916abf1ac5cf7eb16bc540a5bf75c71c43a676f5c52fcb9fe75a2bd75fb944e8", size = 260445, upload-time = "2025-12-28T15:42:38.646Z" }, + { url = "https://files.pythonhosted.org/packages/49/23/184b22a00d9bb97488863ced9454068c79e413cb23f472da6cbddc6cfc52/coverage-7.13.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:776483fd35b58d8afe3acbd9988d5de592ab6da2d2a865edfdbc9fdb43e7c486", size = 263357, upload-time = "2025-12-28T15:42:40.788Z" }, + { url = "https://files.pythonhosted.org/packages/7d/bd/58af54c0c9199ea4190284f389005779d7daf7bf3ce40dcd2d2b2f96da69/coverage-7.13.1-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:b6f3b96617e9852703f5b633ea01315ca45c77e879584f283c44127f0f1ec564", size = 260959, upload-time = "2025-12-28T15:42:42.808Z" }, + { url = "https://files.pythonhosted.org/packages/4b/2a/6839294e8f78a4891bf1df79d69c536880ba2f970d0ff09e7513d6e352e9/coverage-7.13.1-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:bd63e7b74661fed317212fab774e2a648bc4bb09b35f25474f8e3325d2945cd7", size = 259792, upload-time = "2025-12-28T15:42:44.818Z" }, + { url = "https://files.pythonhosted.org/packages/ba/c3/528674d4623283310ad676c5af7414b9850ab6d55c2300e8aa4b945ec554/coverage-7.13.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:933082f161bbb3e9f90d00990dc956120f608cdbcaeea15c4d897f56ef4fe416", size = 262123, upload-time = "2025-12-28T15:42:47.108Z" }, + { url = "https://files.pythonhosted.org/packages/06/c5/8c0515692fb4c73ac379d8dc09b18eaf0214ecb76ea6e62467ba7a1556ff/coverage-7.13.1-cp314-cp314t-win32.whl", hash = "sha256:18be793c4c87de2965e1c0f060f03d9e5aff66cfeae8e1dbe6e5b88056ec153f", size = 222562, upload-time = "2025-12-28T15:42:49.144Z" }, + { url = "https://files.pythonhosted.org/packages/05/0e/c0a0c4678cb30dac735811db529b321d7e1c9120b79bd728d4f4d6b010e9/coverage-7.13.1-cp314-cp314t-win_amd64.whl", hash = "sha256:0e42e0ec0cd3e0d851cb3c91f770c9301f48647cb2877cb78f74bdaa07639a79", size = 223670, upload-time = "2025-12-28T15:42:51.218Z" }, + { url = "https://files.pythonhosted.org/packages/f5/5f/b177aa0011f354abf03a8f30a85032686d290fdeed4222b27d36b4372a50/coverage-7.13.1-cp314-cp314t-win_arm64.whl", hash = "sha256:eaecf47ef10c72ece9a2a92118257da87e460e113b83cc0d2905cbbe931792b4", size = 221707, upload-time = "2025-12-28T15:42:53.034Z" }, + { url = "https://files.pythonhosted.org/packages/cc/48/d9f421cb8da5afaa1a64570d9989e00fb7955e6acddc5a12979f7666ef60/coverage-7.13.1-py3-none-any.whl", hash = "sha256:2016745cb3ba554469d02819d78958b571792bb68e31302610e898f80dd3a573", size = 210722, upload-time = "2025-12-28T15:42:54.901Z" }, +] + +[package.optional-dependencies] +toml = [ + { name = "tomli", marker = "(python_full_version <= '3.11' and sys_platform == 'darwin') or (python_full_version <= '3.11' and sys_platform == 'linux') or (python_full_version <= '3.11' and sys_platform == 'win32')" }, +] + +[[package]] +name = "distlib" +version = "0.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/96/8e/709914eb2b5749865801041647dc7f4e6d00b549cfe88b65ca192995f07c/distlib-0.4.0.tar.gz", hash = "sha256:feec40075be03a04501a973d81f633735b4b69f98b05450592310c0f401a4e0d", size = 614605, upload-time = "2025-07-17T16:52:00.465Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/33/6b/e0547afaf41bf2c42e52430072fa5658766e3d65bd4b03a563d1b6336f57/distlib-0.4.0-py2.py3-none-any.whl", hash = "sha256:9659f7d87e46584a30b5780e43ac7a2143098441670ff0a49d5f9034c54a6c16", size = 469047, upload-time = "2025-07-17T16:51:58.613Z" }, +] + +[[package]] +name = "docutils" +version = "0.21.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ae/ed/aefcc8cd0ba62a0560c3c18c33925362d46c6075480bfa4df87b28e169a9/docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f", size = 2204444, upload-time = "2024-04-23T18:57:18.24Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8f/d7/9322c609343d929e75e7e5e6255e614fcc67572cfd083959cdef3b7aad79/docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2", size = 587408, upload-time = "2024-04-23T18:57:14.835Z" }, +] + +[[package]] +name = "exceptiongroup" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "(python_full_version < '3.13' and sys_platform == 'darwin') or (python_full_version < '3.13' and sys_platform == 'win32') or (python_full_version < '3.12' and sys_platform == 'linux')" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/50/79/66800aadf48771f6b62f7eb014e352e5d06856655206165d775e675a02c9/exceptiongroup-1.3.1.tar.gz", hash = "sha256:8b412432c6055b0b7d14c310000ae93352ed6754f70fa8f7c34141f91c4e3219", size = 30371, upload-time = "2025-11-21T23:01:54.787Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8a/0e/97c33bf5009bdbac74fd2beace167cab3f978feb69cc36f1ef79360d6c4e/exceptiongroup-1.3.1-py3-none-any.whl", hash = "sha256:a7a39a3bd276781e98394987d3a5701d0c4edffb633bb7a5144577f82c773598", size = 16740, upload-time = "2025-11-21T23:01:53.443Z" }, +] + +[[package]] +name = "execnet" +version = "2.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/bf/89/780e11f9588d9e7128a3f87788354c7946a9cbb1401ad38a48c4db9a4f07/execnet-2.1.2.tar.gz", hash = "sha256:63d83bfdd9a23e35b9c6a3261412324f964c2ec8dcd8d3c6916ee9373e0befcd", size = 166622, upload-time = "2025-11-12T09:56:37.75Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ab/84/02fc1827e8cdded4aa65baef11296a9bbe595c474f0d6d758af082d849fd/execnet-2.1.2-py3-none-any.whl", hash = "sha256:67fba928dd5a544b783f6056f449e5e3931a5c378b128bc18501f7ea79e296ec", size = 40708, upload-time = "2025-11-12T09:56:36.333Z" }, +] + +[[package]] +name = "filelock" +version = "3.20.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1d/65/ce7f1b70157833bf3cb851b556a37d4547ceafc158aa9b34b36782f23696/filelock-3.20.3.tar.gz", hash = "sha256:18c57ee915c7ec61cff0ecf7f0f869936c7c30191bb0cf406f1341778d0834e1", size = 19485, upload-time = "2026-01-09T17:55:05.421Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b5/36/7fb70f04bf00bc646cd5bb45aa9eddb15e19437a28b8fb2b4a5249fac770/filelock-3.20.3-py3-none-any.whl", hash = "sha256:4b0dda527ee31078689fc205ec4f1c1bf7d56cf88b6dc9426c4f230e46c2dce1", size = 16701, upload-time = "2026-01-09T17:55:04.334Z" }, +] + +[[package]] +name = "flit" +version = "3.12.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "docutils", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "flit-core", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pip", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "requests", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "tomli-w", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/50/9c/0608c91a5b6c013c63548515ae31cff6399cd9ce891bd9daee8c103da09b/flit-3.12.0.tar.gz", hash = "sha256:1c80f34dd96992e7758b40423d2809f48f640ca285d0b7821825e50745ec3740", size = 155038, upload-time = "2025-03-25T08:03:22.505Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f5/82/ce1d3bb380b227e26e517655d1de7b32a72aad61fa21ff9bd91a2e2db6ee/flit-3.12.0-py3-none-any.whl", hash = "sha256:2b4e7171dc22881fa6adc2dbf083e5ecc72520be3cd7587d2a803da94d6ef431", size = 50657, upload-time = "2025-03-25T08:03:19.031Z" }, +] + +[[package]] +name = "flit-core" +version = "3.12.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/69/59/b6fc2188dfc7ea4f936cd12b49d707f66a1cb7a1d2c16172963534db741b/flit_core-3.12.0.tar.gz", hash = "sha256:18f63100d6f94385c6ed57a72073443e1a71a4acb4339491615d0f16d6ff01b2", size = 53690, upload-time = "2025-03-25T08:03:23.969Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f2/65/b6ba90634c984a4fcc02c7e3afe523fef500c4980fec67cc27536ee50acf/flit_core-3.12.0-py3-none-any.whl", hash = "sha256:e7a0304069ea895172e3c7bb703292e992c5d1555dd1233ab7b5621b5b69e62c", size = 45594, upload-time = "2025-03-25T08:03:20.772Z" }, +] + +[[package]] +name = "identify" +version = "2.6.16" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5b/8d/e8b97e6bd3fb6fb271346f7981362f1e04d6a7463abd0de79e1fda17c067/identify-2.6.16.tar.gz", hash = "sha256:846857203b5511bbe94d5a352a48ef2359532bc8f6727b5544077a0dcfb24980", size = 99360, upload-time = "2026-01-12T18:58:58.201Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b8/58/40fbbcefeda82364720eba5cf2270f98496bdfa19ea75b4cccae79c698e6/identify-2.6.16-py2.py3-none-any.whl", hash = "sha256:391ee4d77741d994189522896270b787aed8670389bfd60f326d677d64a6dfb0", size = 99202, upload-time = "2026-01-12T18:58:56.627Z" }, +] + +[[package]] +name = "idna" +version = "3.11" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" }, +] + +[[package]] +name = "imagesize" +version = "1.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a7/84/62473fb57d61e31fef6e36d64a179c8781605429fd927b5dd608c997be31/imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a", size = 1280026, upload-time = "2022-07-01T12:21:05.687Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ff/62/85c4c919272577931d407be5ba5d71c20f0b616d31a0befe0ae45bb79abd/imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b", size = 8769, upload-time = "2022-07-01T12:21:02.467Z" }, +] + +[[package]] +name = "iniconfig" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" }, +] + +[[package]] +name = "jinja2" +version = "3.1.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115, upload-time = "2025-03-05T20:05:02.478Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" }, +] + +[[package]] +name = "librt" +version = "0.7.8" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e7/24/5f3646ff414285e0f7708fa4e946b9bf538345a41d1c375c439467721a5e/librt-0.7.8.tar.gz", hash = "sha256:1a4ede613941d9c3470b0368be851df6bb78ab218635512d0370b27a277a0862", size = 148323, upload-time = "2026-01-14T12:56:16.876Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/44/13/57b06758a13550c5f09563893b004f98e9537ee6ec67b7df85c3571c8832/librt-0.7.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b45306a1fc5f53c9330fbee134d8b3227fe5da2ab09813b892790400aa49352d", size = 56521, upload-time = "2026-01-14T12:54:40.066Z" }, + { url = "https://files.pythonhosted.org/packages/c2/24/bbea34d1452a10612fb45ac8356f95351ba40c2517e429602160a49d1fd0/librt-0.7.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:864c4b7083eeee250ed55135d2127b260d7eb4b5e953a9e5df09c852e327961b", size = 58456, upload-time = "2026-01-14T12:54:41.471Z" }, + { url = "https://files.pythonhosted.org/packages/04/72/a168808f92253ec3a810beb1eceebc465701197dbc7e865a1c9ceb3c22c7/librt-0.7.8-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:6938cc2de153bc927ed8d71c7d2f2ae01b4e96359126c602721340eb7ce1a92d", size = 164392, upload-time = "2026-01-14T12:54:42.843Z" }, + { url = "https://files.pythonhosted.org/packages/14/5c/4c0d406f1b02735c2e7af8ff1ff03a6577b1369b91aa934a9fa2cc42c7ce/librt-0.7.8-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:66daa6ac5de4288a5bbfbe55b4caa7bf0cd26b3269c7a476ffe8ce45f837f87d", size = 172959, upload-time = "2026-01-14T12:54:44.602Z" }, + { url = "https://files.pythonhosted.org/packages/82/5f/3e85351c523f73ad8d938989e9a58c7f59fb9c17f761b9981b43f0025ce7/librt-0.7.8-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4864045f49dc9c974dadb942ac56a74cd0479a2aafa51ce272c490a82322ea3c", size = 186717, upload-time = "2026-01-14T12:54:45.986Z" }, + { url = "https://files.pythonhosted.org/packages/08/f8/18bfe092e402d00fe00d33aa1e01dda1bd583ca100b393b4373847eade6d/librt-0.7.8-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a36515b1328dc5b3ffce79fe204985ca8572525452eacabee2166f44bb387b2c", size = 184585, upload-time = "2026-01-14T12:54:47.139Z" }, + { url = "https://files.pythonhosted.org/packages/4e/fc/f43972ff56fd790a9fa55028a52ccea1875100edbb856b705bd393b601e3/librt-0.7.8-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b7e7f140c5169798f90b80d6e607ed2ba5059784968a004107c88ad61fb3641d", size = 180497, upload-time = "2026-01-14T12:54:48.946Z" }, + { url = "https://files.pythonhosted.org/packages/e1/3a/25e36030315a410d3ad0b7d0f19f5f188e88d1613d7d3fd8150523ea1093/librt-0.7.8-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ff71447cb778a4f772ddc4ce360e6ba9c95527ed84a52096bd1bbf9fee2ec7c0", size = 200052, upload-time = "2026-01-14T12:54:50.382Z" }, + { url = "https://files.pythonhosted.org/packages/fc/b8/f3a5a1931ae2a6ad92bf6893b9ef44325b88641d58723529e2c2935e8abe/librt-0.7.8-cp310-cp310-win32.whl", hash = "sha256:047164e5f68b7a8ebdf9fae91a3c2161d3192418aadd61ddd3a86a56cbe3dc85", size = 43477, upload-time = "2026-01-14T12:54:51.815Z" }, + { url = "https://files.pythonhosted.org/packages/fe/91/c4202779366bc19f871b4ad25db10fcfa1e313c7893feb942f32668e8597/librt-0.7.8-cp310-cp310-win_amd64.whl", hash = "sha256:d6f254d096d84156a46a84861183c183d30734e52383602443292644d895047c", size = 49806, upload-time = "2026-01-14T12:54:53.149Z" }, + { url = "https://files.pythonhosted.org/packages/1b/a3/87ea9c1049f2c781177496ebee29430e4631f439b8553a4969c88747d5d8/librt-0.7.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ff3e9c11aa260c31493d4b3197d1e28dd07768594a4f92bec4506849d736248f", size = 56507, upload-time = "2026-01-14T12:54:54.156Z" }, + { url = "https://files.pythonhosted.org/packages/5e/4a/23bcef149f37f771ad30203d561fcfd45b02bc54947b91f7a9ac34815747/librt-0.7.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ddb52499d0b3ed4aa88746aaf6f36a08314677d5c346234c3987ddc506404eac", size = 58455, upload-time = "2026-01-14T12:54:55.978Z" }, + { url = "https://files.pythonhosted.org/packages/22/6e/46eb9b85c1b9761e0f42b6e6311e1cc544843ac897457062b9d5d0b21df4/librt-0.7.8-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:e9c0afebbe6ce177ae8edba0c7c4d626f2a0fc12c33bb993d163817c41a7a05c", size = 164956, upload-time = "2026-01-14T12:54:57.311Z" }, + { url = "https://files.pythonhosted.org/packages/7a/3f/aa7c7f6829fb83989feb7ba9aa11c662b34b4bd4bd5b262f2876ba3db58d/librt-0.7.8-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:631599598e2c76ded400c0a8722dec09217c89ff64dc54b060f598ed68e7d2a8", size = 174364, upload-time = "2026-01-14T12:54:59.089Z" }, + { url = "https://files.pythonhosted.org/packages/3f/2d/d57d154b40b11f2cb851c4df0d4c4456bacd9b1ccc4ecb593ddec56c1a8b/librt-0.7.8-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9c1ba843ae20db09b9d5c80475376168feb2640ce91cd9906414f23cc267a1ff", size = 188034, upload-time = "2026-01-14T12:55:00.141Z" }, + { url = "https://files.pythonhosted.org/packages/59/f9/36c4dad00925c16cd69d744b87f7001792691857d3b79187e7a673e812fb/librt-0.7.8-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b5b007bb22ea4b255d3ee39dfd06d12534de2fcc3438567d9f48cdaf67ae1ae3", size = 186295, upload-time = "2026-01-14T12:55:01.303Z" }, + { url = "https://files.pythonhosted.org/packages/23/9b/8a9889d3df5efb67695a67785028ccd58e661c3018237b73ad081691d0cb/librt-0.7.8-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:dbd79caaf77a3f590cbe32dc2447f718772d6eea59656a7dcb9311161b10fa75", size = 181470, upload-time = "2026-01-14T12:55:02.492Z" }, + { url = "https://files.pythonhosted.org/packages/43/64/54d6ef11afca01fef8af78c230726a9394759f2addfbf7afc5e3cc032a45/librt-0.7.8-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:87808a8d1e0bd62a01cafc41f0fd6818b5a5d0ca0d8a55326a81643cdda8f873", size = 201713, upload-time = "2026-01-14T12:55:03.919Z" }, + { url = "https://files.pythonhosted.org/packages/2d/29/73e7ed2991330b28919387656f54109139b49e19cd72902f466bd44415fd/librt-0.7.8-cp311-cp311-win32.whl", hash = "sha256:31724b93baa91512bd0a376e7cf0b59d8b631ee17923b1218a65456fa9bda2e7", size = 43803, upload-time = "2026-01-14T12:55:04.996Z" }, + { url = "https://files.pythonhosted.org/packages/3f/de/66766ff48ed02b4d78deea30392ae200bcbd99ae61ba2418b49fd50a4831/librt-0.7.8-cp311-cp311-win_amd64.whl", hash = "sha256:978e8b5f13e52cf23a9e80f3286d7546baa70bc4ef35b51d97a709d0b28e537c", size = 50080, upload-time = "2026-01-14T12:55:06.489Z" }, + { url = "https://files.pythonhosted.org/packages/6f/e3/33450438ff3a8c581d4ed7f798a70b07c3206d298cf0b87d3806e72e3ed8/librt-0.7.8-cp311-cp311-win_arm64.whl", hash = "sha256:20e3946863d872f7cabf7f77c6c9d370b8b3d74333d3a32471c50d3a86c0a232", size = 43383, upload-time = "2026-01-14T12:55:07.49Z" }, + { url = "https://files.pythonhosted.org/packages/56/04/79d8fcb43cae376c7adbab7b2b9f65e48432c9eced62ac96703bcc16e09b/librt-0.7.8-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9b6943885b2d49c48d0cff23b16be830ba46b0152d98f62de49e735c6e655a63", size = 57472, upload-time = "2026-01-14T12:55:08.528Z" }, + { url = "https://files.pythonhosted.org/packages/b4/ba/60b96e93043d3d659da91752689023a73981336446ae82078cddf706249e/librt-0.7.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:46ef1f4b9b6cc364b11eea0ecc0897314447a66029ee1e55859acb3dd8757c93", size = 58986, upload-time = "2026-01-14T12:55:09.466Z" }, + { url = "https://files.pythonhosted.org/packages/7c/26/5215e4cdcc26e7be7eee21955a7e13cbf1f6d7d7311461a6014544596fac/librt-0.7.8-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:907ad09cfab21e3c86e8f1f87858f7049d1097f77196959c033612f532b4e592", size = 168422, upload-time = "2026-01-14T12:55:10.499Z" }, + { url = "https://files.pythonhosted.org/packages/0f/84/e8d1bc86fa0159bfc24f3d798d92cafd3897e84c7fea7fe61b3220915d76/librt-0.7.8-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2991b6c3775383752b3ca0204842743256f3ad3deeb1d0adc227d56b78a9a850", size = 177478, upload-time = "2026-01-14T12:55:11.577Z" }, + { url = "https://files.pythonhosted.org/packages/57/11/d0268c4b94717a18aa91df1100e767b010f87b7ae444dafaa5a2d80f33a6/librt-0.7.8-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:03679b9856932b8c8f674e87aa3c55ea11c9274301f76ae8dc4d281bda55cf62", size = 192439, upload-time = "2026-01-14T12:55:12.7Z" }, + { url = "https://files.pythonhosted.org/packages/8d/56/1e8e833b95fe684f80f8894ae4d8b7d36acc9203e60478fcae599120a975/librt-0.7.8-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3968762fec1b2ad34ce57458b6de25dbb4142713e9ca6279a0d352fa4e9f452b", size = 191483, upload-time = "2026-01-14T12:55:13.838Z" }, + { url = "https://files.pythonhosted.org/packages/17/48/f11cf28a2cb6c31f282009e2208312aa84a5ee2732859f7856ee306176d5/librt-0.7.8-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:bb7a7807523a31f03061288cc4ffc065d684c39db7644c676b47d89553c0d714", size = 185376, upload-time = "2026-01-14T12:55:15.017Z" }, + { url = "https://files.pythonhosted.org/packages/b8/6a/d7c116c6da561b9155b184354a60a3d5cdbf08fc7f3678d09c95679d13d9/librt-0.7.8-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad64a14b1e56e702e19b24aae108f18ad1bf7777f3af5fcd39f87d0c5a814449", size = 206234, upload-time = "2026-01-14T12:55:16.571Z" }, + { url = "https://files.pythonhosted.org/packages/61/de/1975200bb0285fc921c5981d9978ce6ce11ae6d797df815add94a5a848a3/librt-0.7.8-cp312-cp312-win32.whl", hash = "sha256:0241a6ed65e6666236ea78203a73d800dbed896cf12ae25d026d75dc1fcd1dac", size = 44057, upload-time = "2026-01-14T12:55:18.077Z" }, + { url = "https://files.pythonhosted.org/packages/8e/cd/724f2d0b3461426730d4877754b65d39f06a41ac9d0a92d5c6840f72b9ae/librt-0.7.8-cp312-cp312-win_amd64.whl", hash = "sha256:6db5faf064b5bab9675c32a873436b31e01d66ca6984c6f7f92621656033a708", size = 50293, upload-time = "2026-01-14T12:55:19.179Z" }, + { url = "https://files.pythonhosted.org/packages/bd/cf/7e899acd9ee5727ad8160fdcc9994954e79fab371c66535c60e13b968ffc/librt-0.7.8-cp312-cp312-win_arm64.whl", hash = "sha256:57175aa93f804d2c08d2edb7213e09276bd49097611aefc37e3fa38d1fb99ad0", size = 43574, upload-time = "2026-01-14T12:55:20.185Z" }, + { url = "https://files.pythonhosted.org/packages/a1/fe/b1f9de2829cf7fc7649c1dcd202cfd873837c5cc2fc9e526b0e7f716c3d2/librt-0.7.8-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4c3995abbbb60b3c129490fa985dfe6cac11d88fc3c36eeb4fb1449efbbb04fc", size = 57500, upload-time = "2026-01-14T12:55:21.219Z" }, + { url = "https://files.pythonhosted.org/packages/eb/d4/4a60fbe2e53b825f5d9a77325071d61cd8af8506255067bf0c8527530745/librt-0.7.8-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:44e0c2cbc9bebd074cf2cdbe472ca185e824be4e74b1c63a8e934cea674bebf2", size = 59019, upload-time = "2026-01-14T12:55:22.256Z" }, + { url = "https://files.pythonhosted.org/packages/6a/37/61ff80341ba5159afa524445f2d984c30e2821f31f7c73cf166dcafa5564/librt-0.7.8-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:4d2f1e492cae964b3463a03dc77a7fe8742f7855d7258c7643f0ee32b6651dd3", size = 169015, upload-time = "2026-01-14T12:55:23.24Z" }, + { url = "https://files.pythonhosted.org/packages/1c/86/13d4f2d6a93f181ebf2fc953868826653ede494559da8268023fe567fca3/librt-0.7.8-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:451e7ffcef8f785831fdb791bd69211f47e95dc4c6ddff68e589058806f044c6", size = 178161, upload-time = "2026-01-14T12:55:24.826Z" }, + { url = "https://files.pythonhosted.org/packages/88/26/e24ef01305954fc4d771f1f09f3dd682f9eb610e1bec188ffb719374d26e/librt-0.7.8-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3469e1af9f1380e093ae06bedcbdd11e407ac0b303a56bbe9afb1d6824d4982d", size = 193015, upload-time = "2026-01-14T12:55:26.04Z" }, + { url = "https://files.pythonhosted.org/packages/88/a0/92b6bd060e720d7a31ed474d046a69bd55334ec05e9c446d228c4b806ae3/librt-0.7.8-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f11b300027ce19a34f6d24ebb0a25fd0e24a9d53353225a5c1e6cadbf2916b2e", size = 192038, upload-time = "2026-01-14T12:55:27.208Z" }, + { url = "https://files.pythonhosted.org/packages/06/bb/6f4c650253704279c3a214dad188101d1b5ea23be0606628bc6739456624/librt-0.7.8-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4adc73614f0d3c97874f02f2c7fd2a27854e7e24ad532ea6b965459c5b757eca", size = 186006, upload-time = "2026-01-14T12:55:28.594Z" }, + { url = "https://files.pythonhosted.org/packages/dc/00/1c409618248d43240cadf45f3efb866837fa77e9a12a71481912135eb481/librt-0.7.8-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:60c299e555f87e4c01b2eca085dfccda1dde87f5a604bb45c2906b8305819a93", size = 206888, upload-time = "2026-01-14T12:55:30.214Z" }, + { url = "https://files.pythonhosted.org/packages/d9/83/b2cfe8e76ff5c1c77f8a53da3d5de62d04b5ebf7cf913e37f8bca43b5d07/librt-0.7.8-cp313-cp313-win32.whl", hash = "sha256:b09c52ed43a461994716082ee7d87618096851319bf695d57ec123f2ab708951", size = 44126, upload-time = "2026-01-14T12:55:31.44Z" }, + { url = "https://files.pythonhosted.org/packages/a9/0b/c59d45de56a51bd2d3a401fc63449c0ac163e4ef7f523ea8b0c0dee86ec5/librt-0.7.8-cp313-cp313-win_amd64.whl", hash = "sha256:f8f4a901a3fa28969d6e4519deceab56c55a09d691ea7b12ca830e2fa3461e34", size = 50262, upload-time = "2026-01-14T12:55:33.01Z" }, + { url = "https://files.pythonhosted.org/packages/fc/b9/973455cec0a1ec592395250c474164c4a58ebf3e0651ee920fef1a2623f1/librt-0.7.8-cp313-cp313-win_arm64.whl", hash = "sha256:43d4e71b50763fcdcf64725ac680d8cfa1706c928b844794a7aa0fa9ac8e5f09", size = 43600, upload-time = "2026-01-14T12:55:34.054Z" }, + { url = "https://files.pythonhosted.org/packages/1a/73/fa8814c6ce2d49c3827829cadaa1589b0bf4391660bd4510899393a23ebc/librt-0.7.8-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:be927c3c94c74b05128089a955fba86501c3b544d1d300282cc1b4bd370cb418", size = 57049, upload-time = "2026-01-14T12:55:35.056Z" }, + { url = "https://files.pythonhosted.org/packages/53/fe/f6c70956da23ea235fd2e3cc16f4f0b4ebdfd72252b02d1164dd58b4e6c3/librt-0.7.8-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:7b0803e9008c62a7ef79058233db7ff6f37a9933b8f2573c05b07ddafa226611", size = 58689, upload-time = "2026-01-14T12:55:36.078Z" }, + { url = "https://files.pythonhosted.org/packages/1f/4d/7a2481444ac5fba63050d9abe823e6bc16896f575bfc9c1e5068d516cdce/librt-0.7.8-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:79feb4d00b2a4e0e05c9c56df707934f41fcb5fe53fd9efb7549068d0495b758", size = 166808, upload-time = "2026-01-14T12:55:37.595Z" }, + { url = "https://files.pythonhosted.org/packages/ac/3c/10901d9e18639f8953f57c8986796cfbf4c1c514844a41c9197cf87cb707/librt-0.7.8-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b9122094e3f24aa759c38f46bd8863433820654927370250f460ae75488b66ea", size = 175614, upload-time = "2026-01-14T12:55:38.756Z" }, + { url = "https://files.pythonhosted.org/packages/db/01/5cbdde0951a5090a80e5ba44e6357d375048123c572a23eecfb9326993a7/librt-0.7.8-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7e03bea66af33c95ce3addf87a9bf1fcad8d33e757bc479957ddbc0e4f7207ac", size = 189955, upload-time = "2026-01-14T12:55:39.939Z" }, + { url = "https://files.pythonhosted.org/packages/6a/b4/e80528d2f4b7eaf1d437fcbd6fc6ba4cbeb3e2a0cb9ed5a79f47c7318706/librt-0.7.8-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:f1ade7f31675db00b514b98f9ab9a7698c7282dad4be7492589109471852d398", size = 189370, upload-time = "2026-01-14T12:55:41.057Z" }, + { url = "https://files.pythonhosted.org/packages/c1/ab/938368f8ce31a9787ecd4becb1e795954782e4312095daf8fd22420227c8/librt-0.7.8-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:a14229ac62adcf1b90a15992f1ab9c69ae8b99ffb23cb64a90878a6e8a2f5b81", size = 183224, upload-time = "2026-01-14T12:55:42.328Z" }, + { url = "https://files.pythonhosted.org/packages/3c/10/559c310e7a6e4014ac44867d359ef8238465fb499e7eb31b6bfe3e3f86f5/librt-0.7.8-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5bcaaf624fd24e6a0cb14beac37677f90793a96864c67c064a91458611446e83", size = 203541, upload-time = "2026-01-14T12:55:43.501Z" }, + { url = "https://files.pythonhosted.org/packages/f8/db/a0db7acdb6290c215f343835c6efda5b491bb05c3ddc675af558f50fdba3/librt-0.7.8-cp314-cp314-win32.whl", hash = "sha256:7aa7d5457b6c542ecaed79cec4ad98534373c9757383973e638ccced0f11f46d", size = 40657, upload-time = "2026-01-14T12:55:44.668Z" }, + { url = "https://files.pythonhosted.org/packages/72/e0/4f9bdc2a98a798511e81edcd6b54fe82767a715e05d1921115ac70717f6f/librt-0.7.8-cp314-cp314-win_amd64.whl", hash = "sha256:3d1322800771bee4a91f3b4bd4e49abc7d35e65166821086e5afd1e6c0d9be44", size = 46835, upload-time = "2026-01-14T12:55:45.655Z" }, + { url = "https://files.pythonhosted.org/packages/f9/3d/59c6402e3dec2719655a41ad027a7371f8e2334aa794ed11533ad5f34969/librt-0.7.8-cp314-cp314-win_arm64.whl", hash = "sha256:5363427bc6a8c3b1719f8f3845ea53553d301382928a86e8fab7984426949bce", size = 39885, upload-time = "2026-01-14T12:55:47.138Z" }, + { url = "https://files.pythonhosted.org/packages/4e/9c/2481d80950b83085fb14ba3c595db56330d21bbc7d88a19f20165f3538db/librt-0.7.8-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:ca916919793a77e4a98d4a1701e345d337ce53be4a16620f063191f7322ac80f", size = 59161, upload-time = "2026-01-14T12:55:48.45Z" }, + { url = "https://files.pythonhosted.org/packages/96/79/108df2cfc4e672336765d54e3ff887294c1cc36ea4335c73588875775527/librt-0.7.8-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:54feb7b4f2f6706bb82325e836a01be805770443e2400f706e824e91f6441dde", size = 61008, upload-time = "2026-01-14T12:55:49.527Z" }, + { url = "https://files.pythonhosted.org/packages/46/f2/30179898f9994a5637459d6e169b6abdc982012c0a4b2d4c26f50c06f911/librt-0.7.8-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:39a4c76fee41007070f872b648cc2f711f9abf9a13d0c7162478043377b52c8e", size = 187199, upload-time = "2026-01-14T12:55:50.587Z" }, + { url = "https://files.pythonhosted.org/packages/b4/da/f7563db55cebdc884f518ba3791ad033becc25ff68eb70902b1747dc0d70/librt-0.7.8-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ac9c8a458245c7de80bc1b9765b177055efff5803f08e548dd4bb9ab9a8d789b", size = 198317, upload-time = "2026-01-14T12:55:51.991Z" }, + { url = "https://files.pythonhosted.org/packages/b3/6c/4289acf076ad371471fa86718c30ae353e690d3de6167f7db36f429272f1/librt-0.7.8-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:95b67aa7eff150f075fda09d11f6bfb26edffd300f6ab1666759547581e8f666", size = 210334, upload-time = "2026-01-14T12:55:53.682Z" }, + { url = "https://files.pythonhosted.org/packages/4a/7f/377521ac25b78ac0a5ff44127a0360ee6d5ddd3ce7327949876a30533daa/librt-0.7.8-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:535929b6eff670c593c34ff435d5440c3096f20fa72d63444608a5aef64dd581", size = 211031, upload-time = "2026-01-14T12:55:54.827Z" }, + { url = "https://files.pythonhosted.org/packages/c5/b1/e1e96c3e20b23d00cf90f4aad48f0deb4cdfec2f0ed8380d0d85acf98bbf/librt-0.7.8-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:63937bd0f4d1cb56653dc7ae900d6c52c41f0015e25aaf9902481ee79943b33a", size = 204581, upload-time = "2026-01-14T12:55:56.811Z" }, + { url = "https://files.pythonhosted.org/packages/43/71/0f5d010e92ed9747e14bef35e91b6580533510f1e36a8a09eb79ee70b2f0/librt-0.7.8-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:cf243da9e42d914036fd362ac3fa77d80a41cadcd11ad789b1b5eec4daaf67ca", size = 224731, upload-time = "2026-01-14T12:55:58.175Z" }, + { url = "https://files.pythonhosted.org/packages/22/f0/07fb6ab5c39a4ca9af3e37554f9d42f25c464829254d72e4ebbd81da351c/librt-0.7.8-cp314-cp314t-win32.whl", hash = "sha256:171ca3a0a06c643bd0a2f62a8944e1902c94aa8e5da4db1ea9a8daf872685365", size = 41173, upload-time = "2026-01-14T12:55:59.315Z" }, + { url = "https://files.pythonhosted.org/packages/24/d4/7e4be20993dc6a782639625bd2f97f3c66125c7aa80c82426956811cfccf/librt-0.7.8-cp314-cp314t-win_amd64.whl", hash = "sha256:445b7304145e24c60288a2f172b5ce2ca35c0f81605f5299f3fa567e189d2e32", size = 47668, upload-time = "2026-01-14T12:56:00.261Z" }, + { url = "https://files.pythonhosted.org/packages/fc/85/69f92b2a7b3c0f88ffe107c86b952b397004b5b8ea5a81da3d9c04c04422/librt-0.7.8-cp314-cp314t-win_arm64.whl", hash = "sha256:8766ece9de08527deabcd7cb1b4f1a967a385d26e33e536d6d8913db6ef74f06", size = 40550, upload-time = "2026-01-14T12:56:01.542Z" }, +] + +[[package]] +name = "markdown-it-py" +version = "4.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mdurl", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5b/f5/4ec618ed16cc4f8fb3b701563655a69816155e79e24a17b651541804721d/markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3", size = 73070, upload-time = "2025-08-11T12:57:52.854Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321, upload-time = "2025-08-11T12:57:51.923Z" }, +] + +[[package]] +name = "markupsafe" +version = "3.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7e/99/7690b6d4034fffd95959cbe0c02de8deb3098cc577c67bb6a24fe5d7caa7/markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698", size = 80313, upload-time = "2025-09-27T18:37:40.426Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e8/4b/3541d44f3937ba468b75da9eebcae497dcf67adb65caa16760b0a6807ebb/markupsafe-3.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2f981d352f04553a7171b8e44369f2af4055f888dfb147d55e42d29e29e74559", size = 11631, upload-time = "2025-09-27T18:36:05.558Z" }, + { url = "https://files.pythonhosted.org/packages/98/1b/fbd8eed11021cabd9226c37342fa6ca4e8a98d8188a8d9b66740494960e4/markupsafe-3.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e1c1493fb6e50ab01d20a22826e57520f1284df32f2d8601fdd90b6304601419", size = 12057, upload-time = "2025-09-27T18:36:07.165Z" }, + { url = "https://files.pythonhosted.org/packages/40/01/e560d658dc0bb8ab762670ece35281dec7b6c1b33f5fbc09ebb57a185519/markupsafe-3.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1ba88449deb3de88bd40044603fafffb7bc2b055d626a330323a9ed736661695", size = 22050, upload-time = "2025-09-27T18:36:08.005Z" }, + { url = "https://files.pythonhosted.org/packages/af/cd/ce6e848bbf2c32314c9b237839119c5a564a59725b53157c856e90937b7a/markupsafe-3.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f42d0984e947b8adf7dd6dde396e720934d12c506ce84eea8476409563607591", size = 20681, upload-time = "2025-09-27T18:36:08.881Z" }, + { url = "https://files.pythonhosted.org/packages/c9/2a/b5c12c809f1c3045c4d580b035a743d12fcde53cf685dbc44660826308da/markupsafe-3.0.3-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c0c0b3ade1c0b13b936d7970b1d37a57acde9199dc2aecc4c336773e1d86049c", size = 20705, upload-time = "2025-09-27T18:36:10.131Z" }, + { url = "https://files.pythonhosted.org/packages/cf/e3/9427a68c82728d0a88c50f890d0fc072a1484de2f3ac1ad0bfc1a7214fd5/markupsafe-3.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0303439a41979d9e74d18ff5e2dd8c43ed6c6001fd40e5bf2e43f7bd9bbc523f", size = 21524, upload-time = "2025-09-27T18:36:11.324Z" }, + { url = "https://files.pythonhosted.org/packages/bc/36/23578f29e9e582a4d0278e009b38081dbe363c5e7165113fad546918a232/markupsafe-3.0.3-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:d2ee202e79d8ed691ceebae8e0486bd9a2cd4794cec4824e1c99b6f5009502f6", size = 20282, upload-time = "2025-09-27T18:36:12.573Z" }, + { url = "https://files.pythonhosted.org/packages/56/21/dca11354e756ebd03e036bd8ad58d6d7168c80ce1fe5e75218e4945cbab7/markupsafe-3.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:177b5253b2834fe3678cb4a5f0059808258584c559193998be2601324fdeafb1", size = 20745, upload-time = "2025-09-27T18:36:13.504Z" }, + { url = "https://files.pythonhosted.org/packages/87/99/faba9369a7ad6e4d10b6a5fbf71fa2a188fe4a593b15f0963b73859a1bbd/markupsafe-3.0.3-cp310-cp310-win32.whl", hash = "sha256:2a15a08b17dd94c53a1da0438822d70ebcd13f8c3a95abe3a9ef9f11a94830aa", size = 14571, upload-time = "2025-09-27T18:36:14.779Z" }, + { url = "https://files.pythonhosted.org/packages/d6/25/55dc3ab959917602c96985cb1253efaa4ff42f71194bddeb61eb7278b8be/markupsafe-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:c4ffb7ebf07cfe8931028e3e4c85f0357459a3f9f9490886198848f4fa002ec8", size = 15056, upload-time = "2025-09-27T18:36:16.125Z" }, + { url = "https://files.pythonhosted.org/packages/d0/9e/0a02226640c255d1da0b8d12e24ac2aa6734da68bff14c05dd53b94a0fc3/markupsafe-3.0.3-cp310-cp310-win_arm64.whl", hash = "sha256:e2103a929dfa2fcaf9bb4e7c091983a49c9ac3b19c9061b6d5427dd7d14d81a1", size = 13932, upload-time = "2025-09-27T18:36:17.311Z" }, + { url = "https://files.pythonhosted.org/packages/08/db/fefacb2136439fc8dd20e797950e749aa1f4997ed584c62cfb8ef7c2be0e/markupsafe-3.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cc7ea17a6824959616c525620e387f6dd30fec8cb44f649e31712db02123dad", size = 11631, upload-time = "2025-09-27T18:36:18.185Z" }, + { url = "https://files.pythonhosted.org/packages/e1/2e/5898933336b61975ce9dc04decbc0a7f2fee78c30353c5efba7f2d6ff27a/markupsafe-3.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bd4cd07944443f5a265608cc6aab442e4f74dff8088b0dfc8238647b8f6ae9a", size = 12058, upload-time = "2025-09-27T18:36:19.444Z" }, + { url = "https://files.pythonhosted.org/packages/1d/09/adf2df3699d87d1d8184038df46a9c80d78c0148492323f4693df54e17bb/markupsafe-3.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b5420a1d9450023228968e7e6a9ce57f65d148ab56d2313fcd589eee96a7a50", size = 24287, upload-time = "2025-09-27T18:36:20.768Z" }, + { url = "https://files.pythonhosted.org/packages/30/ac/0273f6fcb5f42e314c6d8cd99effae6a5354604d461b8d392b5ec9530a54/markupsafe-3.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0bf2a864d67e76e5c9a34dc26ec616a66b9888e25e7b9460e1c76d3293bd9dbf", size = 22940, upload-time = "2025-09-27T18:36:22.249Z" }, + { url = "https://files.pythonhosted.org/packages/19/ae/31c1be199ef767124c042c6c3e904da327a2f7f0cd63a0337e1eca2967a8/markupsafe-3.0.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc51efed119bc9cfdf792cdeaa4d67e8f6fcccab66ed4bfdd6bde3e59bfcbb2f", size = 21887, upload-time = "2025-09-27T18:36:23.535Z" }, + { url = "https://files.pythonhosted.org/packages/b2/76/7edcab99d5349a4532a459e1fe64f0b0467a3365056ae550d3bcf3f79e1e/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:068f375c472b3e7acbe2d5318dea141359e6900156b5b2ba06a30b169086b91a", size = 23692, upload-time = "2025-09-27T18:36:24.823Z" }, + { url = "https://files.pythonhosted.org/packages/a4/28/6e74cdd26d7514849143d69f0bf2399f929c37dc2b31e6829fd2045b2765/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:7be7b61bb172e1ed687f1754f8e7484f1c8019780f6f6b0786e76bb01c2ae115", size = 21471, upload-time = "2025-09-27T18:36:25.95Z" }, + { url = "https://files.pythonhosted.org/packages/62/7e/a145f36a5c2945673e590850a6f8014318d5577ed7e5920a4b3448e0865d/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f9e130248f4462aaa8e2552d547f36ddadbeaa573879158d721bbd33dfe4743a", size = 22923, upload-time = "2025-09-27T18:36:27.109Z" }, + { url = "https://files.pythonhosted.org/packages/0f/62/d9c46a7f5c9adbeeeda52f5b8d802e1094e9717705a645efc71b0913a0a8/markupsafe-3.0.3-cp311-cp311-win32.whl", hash = "sha256:0db14f5dafddbb6d9208827849fad01f1a2609380add406671a26386cdf15a19", size = 14572, upload-time = "2025-09-27T18:36:28.045Z" }, + { url = "https://files.pythonhosted.org/packages/83/8a/4414c03d3f891739326e1783338e48fb49781cc915b2e0ee052aa490d586/markupsafe-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:de8a88e63464af587c950061a5e6a67d3632e36df62b986892331d4620a35c01", size = 15077, upload-time = "2025-09-27T18:36:29.025Z" }, + { url = "https://files.pythonhosted.org/packages/35/73/893072b42e6862f319b5207adc9ae06070f095b358655f077f69a35601f0/markupsafe-3.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:3b562dd9e9ea93f13d53989d23a7e775fdfd1066c33494ff43f5418bc8c58a5c", size = 13876, upload-time = "2025-09-27T18:36:29.954Z" }, + { url = "https://files.pythonhosted.org/packages/5a/72/147da192e38635ada20e0a2e1a51cf8823d2119ce8883f7053879c2199b5/markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e", size = 11615, upload-time = "2025-09-27T18:36:30.854Z" }, + { url = "https://files.pythonhosted.org/packages/9a/81/7e4e08678a1f98521201c3079f77db69fb552acd56067661f8c2f534a718/markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce", size = 12020, upload-time = "2025-09-27T18:36:31.971Z" }, + { url = "https://files.pythonhosted.org/packages/1e/2c/799f4742efc39633a1b54a92eec4082e4f815314869865d876824c257c1e/markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d", size = 24332, upload-time = "2025-09-27T18:36:32.813Z" }, + { url = "https://files.pythonhosted.org/packages/3c/2e/8d0c2ab90a8c1d9a24f0399058ab8519a3279d1bd4289511d74e909f060e/markupsafe-3.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6dd0be5b5b189d31db7cda48b91d7e0a9795f31430b7f271219ab30f1d3ac9d", size = 22947, upload-time = "2025-09-27T18:36:33.86Z" }, + { url = "https://files.pythonhosted.org/packages/2c/54/887f3092a85238093a0b2154bd629c89444f395618842e8b0c41783898ea/markupsafe-3.0.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:94c6f0bb423f739146aec64595853541634bde58b2135f27f61c1ffd1cd4d16a", size = 21962, upload-time = "2025-09-27T18:36:35.099Z" }, + { url = "https://files.pythonhosted.org/packages/c9/2f/336b8c7b6f4a4d95e91119dc8521402461b74a485558d8f238a68312f11c/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:be8813b57049a7dc738189df53d69395eba14fb99345e0a5994914a3864c8a4b", size = 23760, upload-time = "2025-09-27T18:36:36.001Z" }, + { url = "https://files.pythonhosted.org/packages/32/43/67935f2b7e4982ffb50a4d169b724d74b62a3964bc1a9a527f5ac4f1ee2b/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:83891d0e9fb81a825d9a6d61e3f07550ca70a076484292a70fde82c4b807286f", size = 21529, upload-time = "2025-09-27T18:36:36.906Z" }, + { url = "https://files.pythonhosted.org/packages/89/e0/4486f11e51bbba8b0c041098859e869e304d1c261e59244baa3d295d47b7/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:77f0643abe7495da77fb436f50f8dab76dbc6e5fd25d39589a0f1fe6548bfa2b", size = 23015, upload-time = "2025-09-27T18:36:37.868Z" }, + { url = "https://files.pythonhosted.org/packages/2f/e1/78ee7a023dac597a5825441ebd17170785a9dab23de95d2c7508ade94e0e/markupsafe-3.0.3-cp312-cp312-win32.whl", hash = "sha256:d88b440e37a16e651bda4c7c2b930eb586fd15ca7406cb39e211fcff3bf3017d", size = 14540, upload-time = "2025-09-27T18:36:38.761Z" }, + { url = "https://files.pythonhosted.org/packages/aa/5b/bec5aa9bbbb2c946ca2733ef9c4ca91c91b6a24580193e891b5f7dbe8e1e/markupsafe-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:26a5784ded40c9e318cfc2bdb30fe164bdb8665ded9cd64d500a34fb42067b1c", size = 15105, upload-time = "2025-09-27T18:36:39.701Z" }, + { url = "https://files.pythonhosted.org/packages/e5/f1/216fc1bbfd74011693a4fd837e7026152e89c4bcf3e77b6692fba9923123/markupsafe-3.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:35add3b638a5d900e807944a078b51922212fb3dedb01633a8defc4b01a3c85f", size = 13906, upload-time = "2025-09-27T18:36:40.689Z" }, + { url = "https://files.pythonhosted.org/packages/38/2f/907b9c7bbba283e68f20259574b13d005c121a0fa4c175f9bed27c4597ff/markupsafe-3.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e1cf1972137e83c5d4c136c43ced9ac51d0e124706ee1c8aa8532c1287fa8795", size = 11622, upload-time = "2025-09-27T18:36:41.777Z" }, + { url = "https://files.pythonhosted.org/packages/9c/d9/5f7756922cdd676869eca1c4e3c0cd0df60ed30199ffd775e319089cb3ed/markupsafe-3.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:116bb52f642a37c115f517494ea5feb03889e04df47eeff5b130b1808ce7c219", size = 12029, upload-time = "2025-09-27T18:36:43.257Z" }, + { url = "https://files.pythonhosted.org/packages/00/07/575a68c754943058c78f30db02ee03a64b3c638586fba6a6dd56830b30a3/markupsafe-3.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:133a43e73a802c5562be9bbcd03d090aa5a1fe899db609c29e8c8d815c5f6de6", size = 24374, upload-time = "2025-09-27T18:36:44.508Z" }, + { url = "https://files.pythonhosted.org/packages/a9/21/9b05698b46f218fc0e118e1f8168395c65c8a2c750ae2bab54fc4bd4e0e8/markupsafe-3.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfcd093f13f0f0b7fdd0f198b90053bf7b2f02a3927a30e63f3ccc9df56b676", size = 22980, upload-time = "2025-09-27T18:36:45.385Z" }, + { url = "https://files.pythonhosted.org/packages/7f/71/544260864f893f18b6827315b988c146b559391e6e7e8f7252839b1b846a/markupsafe-3.0.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:509fa21c6deb7a7a273d629cf5ec029bc209d1a51178615ddf718f5918992ab9", size = 21990, upload-time = "2025-09-27T18:36:46.916Z" }, + { url = "https://files.pythonhosted.org/packages/c2/28/b50fc2f74d1ad761af2f5dcce7492648b983d00a65b8c0e0cb457c82ebbe/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4afe79fb3de0b7097d81da19090f4df4f8d3a2b3adaa8764138aac2e44f3af1", size = 23784, upload-time = "2025-09-27T18:36:47.884Z" }, + { url = "https://files.pythonhosted.org/packages/ed/76/104b2aa106a208da8b17a2fb72e033a5a9d7073c68f7e508b94916ed47a9/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:795e7751525cae078558e679d646ae45574b47ed6e7771863fcc079a6171a0fc", size = 21588, upload-time = "2025-09-27T18:36:48.82Z" }, + { url = "https://files.pythonhosted.org/packages/b5/99/16a5eb2d140087ebd97180d95249b00a03aa87e29cc224056274f2e45fd6/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8485f406a96febb5140bfeca44a73e3ce5116b2501ac54fe953e488fb1d03b12", size = 23041, upload-time = "2025-09-27T18:36:49.797Z" }, + { url = "https://files.pythonhosted.org/packages/19/bc/e7140ed90c5d61d77cea142eed9f9c303f4c4806f60a1044c13e3f1471d0/markupsafe-3.0.3-cp313-cp313-win32.whl", hash = "sha256:bdd37121970bfd8be76c5fb069c7751683bdf373db1ed6c010162b2a130248ed", size = 14543, upload-time = "2025-09-27T18:36:51.584Z" }, + { url = "https://files.pythonhosted.org/packages/05/73/c4abe620b841b6b791f2edc248f556900667a5a1cf023a6646967ae98335/markupsafe-3.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:9a1abfdc021a164803f4d485104931fb8f8c1efd55bc6b748d2f5774e78b62c5", size = 15113, upload-time = "2025-09-27T18:36:52.537Z" }, + { url = "https://files.pythonhosted.org/packages/f0/3a/fa34a0f7cfef23cf9500d68cb7c32dd64ffd58a12b09225fb03dd37d5b80/markupsafe-3.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:7e68f88e5b8799aa49c85cd116c932a1ac15caaa3f5db09087854d218359e485", size = 13911, upload-time = "2025-09-27T18:36:53.513Z" }, + { url = "https://files.pythonhosted.org/packages/e4/d7/e05cd7efe43a88a17a37b3ae96e79a19e846f3f456fe79c57ca61356ef01/markupsafe-3.0.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:218551f6df4868a8d527e3062d0fb968682fe92054e89978594c28e642c43a73", size = 11658, upload-time = "2025-09-27T18:36:54.819Z" }, + { url = "https://files.pythonhosted.org/packages/99/9e/e412117548182ce2148bdeacdda3bb494260c0b0184360fe0d56389b523b/markupsafe-3.0.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3524b778fe5cfb3452a09d31e7b5adefeea8c5be1d43c4f810ba09f2ceb29d37", size = 12066, upload-time = "2025-09-27T18:36:55.714Z" }, + { url = "https://files.pythonhosted.org/packages/bc/e6/fa0ffcda717ef64a5108eaa7b4f5ed28d56122c9a6d70ab8b72f9f715c80/markupsafe-3.0.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4e885a3d1efa2eadc93c894a21770e4bc67899e3543680313b09f139e149ab19", size = 25639, upload-time = "2025-09-27T18:36:56.908Z" }, + { url = "https://files.pythonhosted.org/packages/96/ec/2102e881fe9d25fc16cb4b25d5f5cde50970967ffa5dddafdb771237062d/markupsafe-3.0.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8709b08f4a89aa7586de0aadc8da56180242ee0ada3999749b183aa23df95025", size = 23569, upload-time = "2025-09-27T18:36:57.913Z" }, + { url = "https://files.pythonhosted.org/packages/4b/30/6f2fce1f1f205fc9323255b216ca8a235b15860c34b6798f810f05828e32/markupsafe-3.0.3-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b8512a91625c9b3da6f127803b166b629725e68af71f8184ae7e7d54686a56d6", size = 23284, upload-time = "2025-09-27T18:36:58.833Z" }, + { url = "https://files.pythonhosted.org/packages/58/47/4a0ccea4ab9f5dcb6f79c0236d954acb382202721e704223a8aafa38b5c8/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9b79b7a16f7fedff2495d684f2b59b0457c3b493778c9eed31111be64d58279f", size = 24801, upload-time = "2025-09-27T18:36:59.739Z" }, + { url = "https://files.pythonhosted.org/packages/6a/70/3780e9b72180b6fecb83a4814d84c3bf4b4ae4bf0b19c27196104149734c/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:12c63dfb4a98206f045aa9563db46507995f7ef6d83b2f68eda65c307c6829eb", size = 22769, upload-time = "2025-09-27T18:37:00.719Z" }, + { url = "https://files.pythonhosted.org/packages/98/c5/c03c7f4125180fc215220c035beac6b9cb684bc7a067c84fc69414d315f5/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8f71bc33915be5186016f675cd83a1e08523649b0e33efdb898db577ef5bb009", size = 23642, upload-time = "2025-09-27T18:37:01.673Z" }, + { url = "https://files.pythonhosted.org/packages/80/d6/2d1b89f6ca4bff1036499b1e29a1d02d282259f3681540e16563f27ebc23/markupsafe-3.0.3-cp313-cp313t-win32.whl", hash = "sha256:69c0b73548bc525c8cb9a251cddf1931d1db4d2258e9599c28c07ef3580ef354", size = 14612, upload-time = "2025-09-27T18:37:02.639Z" }, + { url = "https://files.pythonhosted.org/packages/2b/98/e48a4bfba0a0ffcf9925fe2d69240bfaa19c6f7507b8cd09c70684a53c1e/markupsafe-3.0.3-cp313-cp313t-win_amd64.whl", hash = "sha256:1b4b79e8ebf6b55351f0d91fe80f893b4743f104bff22e90697db1590e47a218", size = 15200, upload-time = "2025-09-27T18:37:03.582Z" }, + { url = "https://files.pythonhosted.org/packages/0e/72/e3cc540f351f316e9ed0f092757459afbc595824ca724cbc5a5d4263713f/markupsafe-3.0.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ad2cf8aa28b8c020ab2fc8287b0f823d0a7d8630784c31e9ee5edea20f406287", size = 13973, upload-time = "2025-09-27T18:37:04.929Z" }, + { url = "https://files.pythonhosted.org/packages/33/8a/8e42d4838cd89b7dde187011e97fe6c3af66d8c044997d2183fbd6d31352/markupsafe-3.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:eaa9599de571d72e2daf60164784109f19978b327a3910d3e9de8c97b5b70cfe", size = 11619, upload-time = "2025-09-27T18:37:06.342Z" }, + { url = "https://files.pythonhosted.org/packages/b5/64/7660f8a4a8e53c924d0fa05dc3a55c9cee10bbd82b11c5afb27d44b096ce/markupsafe-3.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c47a551199eb8eb2121d4f0f15ae0f923d31350ab9280078d1e5f12b249e0026", size = 12029, upload-time = "2025-09-27T18:37:07.213Z" }, + { url = "https://files.pythonhosted.org/packages/da/ef/e648bfd021127bef5fa12e1720ffed0c6cbb8310c8d9bea7266337ff06de/markupsafe-3.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f34c41761022dd093b4b6896d4810782ffbabe30f2d443ff5f083e0cbbb8c737", size = 24408, upload-time = "2025-09-27T18:37:09.572Z" }, + { url = "https://files.pythonhosted.org/packages/41/3c/a36c2450754618e62008bf7435ccb0f88053e07592e6028a34776213d877/markupsafe-3.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:457a69a9577064c05a97c41f4e65148652db078a3a509039e64d3467b9e7ef97", size = 23005, upload-time = "2025-09-27T18:37:10.58Z" }, + { url = "https://files.pythonhosted.org/packages/bc/20/b7fdf89a8456b099837cd1dc21974632a02a999ec9bf7ca3e490aacd98e7/markupsafe-3.0.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e8afc3f2ccfa24215f8cb28dcf43f0113ac3c37c2f0f0806d8c70e4228c5cf4d", size = 22048, upload-time = "2025-09-27T18:37:11.547Z" }, + { url = "https://files.pythonhosted.org/packages/9a/a7/591f592afdc734f47db08a75793a55d7fbcc6902a723ae4cfbab61010cc5/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ec15a59cf5af7be74194f7ab02d0f59a62bdcf1a537677ce67a2537c9b87fcda", size = 23821, upload-time = "2025-09-27T18:37:12.48Z" }, + { url = "https://files.pythonhosted.org/packages/7d/33/45b24e4f44195b26521bc6f1a82197118f74df348556594bd2262bda1038/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:0eb9ff8191e8498cca014656ae6b8d61f39da5f95b488805da4bb029cccbfbaf", size = 21606, upload-time = "2025-09-27T18:37:13.485Z" }, + { url = "https://files.pythonhosted.org/packages/ff/0e/53dfaca23a69fbfbbf17a4b64072090e70717344c52eaaaa9c5ddff1e5f0/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2713baf880df847f2bece4230d4d094280f4e67b1e813eec43b4c0e144a34ffe", size = 23043, upload-time = "2025-09-27T18:37:14.408Z" }, + { url = "https://files.pythonhosted.org/packages/46/11/f333a06fc16236d5238bfe74daccbca41459dcd8d1fa952e8fbd5dccfb70/markupsafe-3.0.3-cp314-cp314-win32.whl", hash = "sha256:729586769a26dbceff69f7a7dbbf59ab6572b99d94576a5592625d5b411576b9", size = 14747, upload-time = "2025-09-27T18:37:15.36Z" }, + { url = "https://files.pythonhosted.org/packages/28/52/182836104b33b444e400b14f797212f720cbc9ed6ba34c800639d154e821/markupsafe-3.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:bdc919ead48f234740ad807933cdf545180bfbe9342c2bb451556db2ed958581", size = 15341, upload-time = "2025-09-27T18:37:16.496Z" }, + { url = "https://files.pythonhosted.org/packages/6f/18/acf23e91bd94fd7b3031558b1f013adfa21a8e407a3fdb32745538730382/markupsafe-3.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:5a7d5dc5140555cf21a6fefbdbf8723f06fcd2f63ef108f2854de715e4422cb4", size = 14073, upload-time = "2025-09-27T18:37:17.476Z" }, + { url = "https://files.pythonhosted.org/packages/3c/f0/57689aa4076e1b43b15fdfa646b04653969d50cf30c32a102762be2485da/markupsafe-3.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:1353ef0c1b138e1907ae78e2f6c63ff67501122006b0f9abad68fda5f4ffc6ab", size = 11661, upload-time = "2025-09-27T18:37:18.453Z" }, + { url = "https://files.pythonhosted.org/packages/89/c3/2e67a7ca217c6912985ec766c6393b636fb0c2344443ff9d91404dc4c79f/markupsafe-3.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1085e7fbddd3be5f89cc898938f42c0b3c711fdcb37d75221de2666af647c175", size = 12069, upload-time = "2025-09-27T18:37:19.332Z" }, + { url = "https://files.pythonhosted.org/packages/f0/00/be561dce4e6ca66b15276e184ce4b8aec61fe83662cce2f7d72bd3249d28/markupsafe-3.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1b52b4fb9df4eb9ae465f8d0c228a00624de2334f216f178a995ccdcf82c4634", size = 25670, upload-time = "2025-09-27T18:37:20.245Z" }, + { url = "https://files.pythonhosted.org/packages/50/09/c419f6f5a92e5fadde27efd190eca90f05e1261b10dbd8cbcb39cd8ea1dc/markupsafe-3.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fed51ac40f757d41b7c48425901843666a6677e3e8eb0abcff09e4ba6e664f50", size = 23598, upload-time = "2025-09-27T18:37:21.177Z" }, + { url = "https://files.pythonhosted.org/packages/22/44/a0681611106e0b2921b3033fc19bc53323e0b50bc70cffdd19f7d679bb66/markupsafe-3.0.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f190daf01f13c72eac4efd5c430a8de82489d9cff23c364c3ea822545032993e", size = 23261, upload-time = "2025-09-27T18:37:22.167Z" }, + { url = "https://files.pythonhosted.org/packages/5f/57/1b0b3f100259dc9fffe780cfb60d4be71375510e435efec3d116b6436d43/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e56b7d45a839a697b5eb268c82a71bd8c7f6c94d6fd50c3d577fa39a9f1409f5", size = 24835, upload-time = "2025-09-27T18:37:23.296Z" }, + { url = "https://files.pythonhosted.org/packages/26/6a/4bf6d0c97c4920f1597cc14dd720705eca0bf7c787aebc6bb4d1bead5388/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:f3e98bb3798ead92273dc0e5fd0f31ade220f59a266ffd8a4f6065e0a3ce0523", size = 22733, upload-time = "2025-09-27T18:37:24.237Z" }, + { url = "https://files.pythonhosted.org/packages/14/c7/ca723101509b518797fedc2fdf79ba57f886b4aca8a7d31857ba3ee8281f/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5678211cb9333a6468fb8d8be0305520aa073f50d17f089b5b4b477ea6e67fdc", size = 23672, upload-time = "2025-09-27T18:37:25.271Z" }, + { url = "https://files.pythonhosted.org/packages/fb/df/5bd7a48c256faecd1d36edc13133e51397e41b73bb77e1a69deab746ebac/markupsafe-3.0.3-cp314-cp314t-win32.whl", hash = "sha256:915c04ba3851909ce68ccc2b8e2cd691618c4dc4c4232fb7982bca3f41fd8c3d", size = 14819, upload-time = "2025-09-27T18:37:26.285Z" }, + { url = "https://files.pythonhosted.org/packages/1a/8a/0402ba61a2f16038b48b39bccca271134be00c5c9f0f623208399333c448/markupsafe-3.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4faffd047e07c38848ce017e8725090413cd80cbc23d86e55c587bf979e579c9", size = 15426, upload-time = "2025-09-27T18:37:27.316Z" }, + { url = "https://files.pythonhosted.org/packages/70/bc/6f1c2f612465f5fa89b95bead1f44dcb607670fd42891d8fdcd5d039f4f4/markupsafe-3.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:32001d6a8fc98c8cb5c947787c5d08b0a50663d139f1305bac5885d98d9b40fa", size = 14146, upload-time = "2025-09-27T18:37:28.327Z" }, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload-time = "2022-08-14T12:40:10.846Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" }, +] + +[[package]] +name = "mypy" +version = "1.19.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "librt", marker = "(platform_python_implementation != 'PyPy' and sys_platform == 'darwin') or (platform_python_implementation != 'PyPy' and sys_platform == 'linux') or (platform_python_implementation != 'PyPy' and sys_platform == 'win32')" }, + { name = "mypy-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pathspec", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "tomli", marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform == 'win32')" }, + { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f5/db/4efed9504bc01309ab9c2da7e352cc223569f05478012b5d9ece38fd44d2/mypy-1.19.1.tar.gz", hash = "sha256:19d88bb05303fe63f71dd2c6270daca27cb9401c4ca8255fe50d1d920e0eb9ba", size = 3582404, upload-time = "2025-12-15T05:03:48.42Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2f/63/e499890d8e39b1ff2df4c0c6ce5d371b6844ee22b8250687a99fd2f657a8/mypy-1.19.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5f05aa3d375b385734388e844bc01733bd33c644ab48e9684faa54e5389775ec", size = 13101333, upload-time = "2025-12-15T05:03:03.28Z" }, + { url = "https://files.pythonhosted.org/packages/72/4b/095626fc136fba96effc4fd4a82b41d688ab92124f8c4f7564bffe5cf1b0/mypy-1.19.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:022ea7279374af1a5d78dfcab853fe6a536eebfda4b59deab53cd21f6cd9f00b", size = 12164102, upload-time = "2025-12-15T05:02:33.611Z" }, + { url = "https://files.pythonhosted.org/packages/0c/5b/952928dd081bf88a83a5ccd49aaecfcd18fd0d2710c7ff07b8fb6f7032b9/mypy-1.19.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee4c11e460685c3e0c64a4c5de82ae143622410950d6be863303a1c4ba0e36d6", size = 12765799, upload-time = "2025-12-15T05:03:28.44Z" }, + { url = "https://files.pythonhosted.org/packages/2a/0d/93c2e4a287f74ef11a66fb6d49c7a9f05e47b0a4399040e6719b57f500d2/mypy-1.19.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de759aafbae8763283b2ee5869c7255391fbc4de3ff171f8f030b5ec48381b74", size = 13522149, upload-time = "2025-12-15T05:02:36.011Z" }, + { url = "https://files.pythonhosted.org/packages/7b/0e/33a294b56aaad2b338d203e3a1d8b453637ac36cb278b45005e0901cf148/mypy-1.19.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ab43590f9cd5108f41aacf9fca31841142c786827a74ab7cc8a2eacb634e09a1", size = 13810105, upload-time = "2025-12-15T05:02:40.327Z" }, + { url = "https://files.pythonhosted.org/packages/0e/fd/3e82603a0cb66b67c5e7abababce6bf1a929ddf67bf445e652684af5c5a0/mypy-1.19.1-cp310-cp310-win_amd64.whl", hash = "sha256:2899753e2f61e571b3971747e302d5f420c3fd09650e1951e99f823bc3089dac", size = 10057200, upload-time = "2025-12-15T05:02:51.012Z" }, + { url = "https://files.pythonhosted.org/packages/ef/47/6b3ebabd5474d9cdc170d1342fbf9dddc1b0ec13ec90bf9004ee6f391c31/mypy-1.19.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d8dfc6ab58ca7dda47d9237349157500468e404b17213d44fc1cb77bce532288", size = 13028539, upload-time = "2025-12-15T05:03:44.129Z" }, + { url = "https://files.pythonhosted.org/packages/5c/a6/ac7c7a88a3c9c54334f53a941b765e6ec6c4ebd65d3fe8cdcfbe0d0fd7db/mypy-1.19.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e3f276d8493c3c97930e354b2595a44a21348b320d859fb4a2b9f66da9ed27ab", size = 12083163, upload-time = "2025-12-15T05:03:37.679Z" }, + { url = "https://files.pythonhosted.org/packages/67/af/3afa9cf880aa4a2c803798ac24f1d11ef72a0c8079689fac5cfd815e2830/mypy-1.19.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2abb24cf3f17864770d18d673c85235ba52456b36a06b6afc1e07c1fdcd3d0e6", size = 12687629, upload-time = "2025-12-15T05:02:31.526Z" }, + { url = "https://files.pythonhosted.org/packages/2d/46/20f8a7114a56484ab268b0ab372461cb3a8f7deed31ea96b83a4e4cfcfca/mypy-1.19.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a009ffa5a621762d0c926a078c2d639104becab69e79538a494bcccb62cc0331", size = 13436933, upload-time = "2025-12-15T05:03:15.606Z" }, + { url = "https://files.pythonhosted.org/packages/5b/f8/33b291ea85050a21f15da910002460f1f445f8007adb29230f0adea279cb/mypy-1.19.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f7cee03c9a2e2ee26ec07479f38ea9c884e301d42c6d43a19d20fb014e3ba925", size = 13661754, upload-time = "2025-12-15T05:02:26.731Z" }, + { url = "https://files.pythonhosted.org/packages/fd/a3/47cbd4e85bec4335a9cd80cf67dbc02be21b5d4c9c23ad6b95d6c5196bac/mypy-1.19.1-cp311-cp311-win_amd64.whl", hash = "sha256:4b84a7a18f41e167f7995200a1d07a4a6810e89d29859df936f1c3923d263042", size = 10055772, upload-time = "2025-12-15T05:03:26.179Z" }, + { url = "https://files.pythonhosted.org/packages/06/8a/19bfae96f6615aa8a0604915512e0289b1fad33d5909bf7244f02935d33a/mypy-1.19.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a8174a03289288c1f6c46d55cef02379b478bfbc8e358e02047487cad44c6ca1", size = 13206053, upload-time = "2025-12-15T05:03:46.622Z" }, + { url = "https://files.pythonhosted.org/packages/a5/34/3e63879ab041602154ba2a9f99817bb0c85c4df19a23a1443c8986e4d565/mypy-1.19.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ffcebe56eb09ff0c0885e750036a095e23793ba6c2e894e7e63f6d89ad51f22e", size = 12219134, upload-time = "2025-12-15T05:03:24.367Z" }, + { url = "https://files.pythonhosted.org/packages/89/cc/2db6f0e95366b630364e09845672dbee0cbf0bbe753a204b29a944967cd9/mypy-1.19.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b64d987153888790bcdb03a6473d321820597ab8dd9243b27a92153c4fa50fd2", size = 12731616, upload-time = "2025-12-15T05:02:44.725Z" }, + { url = "https://files.pythonhosted.org/packages/00/be/dd56c1fd4807bc1eba1cf18b2a850d0de7bacb55e158755eb79f77c41f8e/mypy-1.19.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c35d298c2c4bba75feb2195655dfea8124d855dfd7343bf8b8c055421eaf0cf8", size = 13620847, upload-time = "2025-12-15T05:03:39.633Z" }, + { url = "https://files.pythonhosted.org/packages/6d/42/332951aae42b79329f743bf1da088cd75d8d4d9acc18fbcbd84f26c1af4e/mypy-1.19.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:34c81968774648ab5ac09c29a375fdede03ba253f8f8287847bd480782f73a6a", size = 13834976, upload-time = "2025-12-15T05:03:08.786Z" }, + { url = "https://files.pythonhosted.org/packages/6f/63/e7493e5f90e1e085c562bb06e2eb32cae27c5057b9653348d38b47daaecc/mypy-1.19.1-cp312-cp312-win_amd64.whl", hash = "sha256:b10e7c2cd7870ba4ad9b2d8a6102eb5ffc1f16ca35e3de6bfa390c1113029d13", size = 10118104, upload-time = "2025-12-15T05:03:10.834Z" }, + { url = "https://files.pythonhosted.org/packages/de/9f/a6abae693f7a0c697dbb435aac52e958dc8da44e92e08ba88d2e42326176/mypy-1.19.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e3157c7594ff2ef1634ee058aafc56a82db665c9438fd41b390f3bde1ab12250", size = 13201927, upload-time = "2025-12-15T05:02:29.138Z" }, + { url = "https://files.pythonhosted.org/packages/9a/a4/45c35ccf6e1c65afc23a069f50e2c66f46bd3798cbe0d680c12d12935caa/mypy-1.19.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bdb12f69bcc02700c2b47e070238f42cb87f18c0bc1fc4cdb4fb2bc5fd7a3b8b", size = 12206730, upload-time = "2025-12-15T05:03:01.325Z" }, + { url = "https://files.pythonhosted.org/packages/05/bb/cdcf89678e26b187650512620eec8368fded4cfd99cfcb431e4cdfd19dec/mypy-1.19.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f859fb09d9583a985be9a493d5cfc5515b56b08f7447759a0c5deaf68d80506e", size = 12724581, upload-time = "2025-12-15T05:03:20.087Z" }, + { url = "https://files.pythonhosted.org/packages/d1/32/dd260d52babf67bad8e6770f8e1102021877ce0edea106e72df5626bb0ec/mypy-1.19.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c9a6538e0415310aad77cb94004ca6482330fece18036b5f360b62c45814c4ef", size = 13616252, upload-time = "2025-12-15T05:02:49.036Z" }, + { url = "https://files.pythonhosted.org/packages/71/d0/5e60a9d2e3bd48432ae2b454b7ef2b62a960ab51292b1eda2a95edd78198/mypy-1.19.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:da4869fc5e7f62a88f3fe0b5c919d1d9f7ea3cef92d3689de2823fd27e40aa75", size = 13840848, upload-time = "2025-12-15T05:02:55.95Z" }, + { url = "https://files.pythonhosted.org/packages/98/76/d32051fa65ecf6cc8c6610956473abdc9b4c43301107476ac03559507843/mypy-1.19.1-cp313-cp313-win_amd64.whl", hash = "sha256:016f2246209095e8eda7538944daa1d60e1e8134d98983b9fc1e92c1fc0cb8dd", size = 10135510, upload-time = "2025-12-15T05:02:58.438Z" }, + { url = "https://files.pythonhosted.org/packages/de/eb/b83e75f4c820c4247a58580ef86fcd35165028f191e7e1ba57128c52782d/mypy-1.19.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:06e6170bd5836770e8104c8fdd58e5e725cfeb309f0a6c681a811f557e97eac1", size = 13199744, upload-time = "2025-12-15T05:03:30.823Z" }, + { url = "https://files.pythonhosted.org/packages/94/28/52785ab7bfa165f87fcbb61547a93f98bb20e7f82f90f165a1f69bce7b3d/mypy-1.19.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:804bd67b8054a85447c8954215a906d6eff9cabeabe493fb6334b24f4bfff718", size = 12215815, upload-time = "2025-12-15T05:02:42.323Z" }, + { url = "https://files.pythonhosted.org/packages/0a/c6/bdd60774a0dbfb05122e3e925f2e9e846c009e479dcec4821dad881f5b52/mypy-1.19.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:21761006a7f497cb0d4de3d8ef4ca70532256688b0523eee02baf9eec895e27b", size = 12740047, upload-time = "2025-12-15T05:03:33.168Z" }, + { url = "https://files.pythonhosted.org/packages/32/2a/66ba933fe6c76bd40d1fe916a83f04fed253152f451a877520b3c4a5e41e/mypy-1.19.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:28902ee51f12e0f19e1e16fbe2f8f06b6637f482c459dd393efddd0ec7f82045", size = 13601998, upload-time = "2025-12-15T05:03:13.056Z" }, + { url = "https://files.pythonhosted.org/packages/e3/da/5055c63e377c5c2418760411fd6a63ee2b96cf95397259038756c042574f/mypy-1.19.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:481daf36a4c443332e2ae9c137dfee878fcea781a2e3f895d54bd3002a900957", size = 13807476, upload-time = "2025-12-15T05:03:17.977Z" }, + { url = "https://files.pythonhosted.org/packages/cd/09/4ebd873390a063176f06b0dbf1f7783dd87bd120eae7727fa4ae4179b685/mypy-1.19.1-cp314-cp314-win_amd64.whl", hash = "sha256:8bb5c6f6d043655e055be9b542aa5f3bdd30e4f3589163e85f93f3640060509f", size = 10281872, upload-time = "2025-12-15T05:03:05.549Z" }, + { url = "https://files.pythonhosted.org/packages/8d/f4/4ce9a05ce5ded1de3ec1c1d96cf9f9504a04e54ce0ed55cfa38619a32b8d/mypy-1.19.1-py3-none-any.whl", hash = "sha256:f1235f5ea01b7db5468d53ece6aaddf1ad0b88d9e7462b86ef96fe04995d7247", size = 2471239, upload-time = "2025-12-15T05:03:07.248Z" }, +] + +[[package]] +name = "mypy-extensions" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/6e/371856a3fb9d31ca8dac321cda606860fa4548858c0cc45d9d1d4ca2628b/mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558", size = 6343, upload-time = "2025-04-22T14:54:24.164Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" }, +] + +[[package]] +name = "nodeenv" +version = "1.10.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/24/bf/d1bda4f6168e0b2e9e5958945e01910052158313224ada5ce1fb2e1113b8/nodeenv-1.10.0.tar.gz", hash = "sha256:996c191ad80897d076bdfba80a41994c2b47c68e224c542b48feba42ba00f8bb", size = 55611, upload-time = "2025-12-20T14:08:54.006Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/88/b2/d0896bdcdc8d28a7fc5717c305f1a861c26e18c05047949fb371034d98bd/nodeenv-1.10.0-py2.py3-none-any.whl", hash = "sha256:5bb13e3eed2923615535339b3c620e76779af4cb4c6a90deccc9e36b274d3827", size = 23438, upload-time = "2025-12-20T14:08:52.782Z" }, +] + +[[package]] +name = "packaging" +version = "25.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, +] + +[[package]] +name = "pastel" +version = "0.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/76/f1/4594f5e0fcddb6953e5b8fe00da8c317b8b41b547e2b3ae2da7512943c62/pastel-0.2.1.tar.gz", hash = "sha256:e6581ac04e973cac858828c6202c1e1e81fee1dc7de7683f3e1ffe0bfd8a573d", size = 7555, upload-time = "2020-09-16T19:21:12.43Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/aa/18/a8444036c6dd65ba3624c63b734d3ba95ba63ace513078e1580590075d21/pastel-0.2.1-py2.py3-none-any.whl", hash = "sha256:4349225fcdf6c2bb34d483e523475de5bb04a5c10ef711263452cb37d7dd4364", size = 5955, upload-time = "2020-09-16T19:21:11.409Z" }, +] + +[[package]] +name = "pathspec" +version = "1.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/4c/b2/bb8e495d5262bfec41ab5cb18f522f1012933347fb5d9e62452d446baca2/pathspec-1.0.3.tar.gz", hash = "sha256:bac5cf97ae2c2876e2d25ebb15078eb04d76e4b98921ee31c6f85ade8b59444d", size = 130841, upload-time = "2026-01-09T15:46:46.009Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/32/2b/121e912bd60eebd623f873fd090de0e84f322972ab25a7f9044c056804ed/pathspec-1.0.3-py3-none-any.whl", hash = "sha256:e80767021c1cc524aa3fb14bedda9c34406591343cc42797b386ce7b9354fb6c", size = 55021, upload-time = "2026-01-09T15:46:44.652Z" }, +] + +[[package]] +name = "pip" +version = "25.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fe/6e/74a3f0179a4a73a53d66ce57fdb4de0080a8baa1de0063de206d6167acc2/pip-25.3.tar.gz", hash = "sha256:8d0538dbbd7babbd207f261ed969c65de439f6bc9e5dbd3b3b9a77f25d95f343", size = 1803014, upload-time = "2025-10-25T00:55:41.394Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/44/3c/d717024885424591d5376220b5e836c2d5293ce2011523c9de23ff7bf068/pip-25.3-py3-none-any.whl", hash = "sha256:9655943313a94722b7774661c21049070f6bbb0a1516bf02f7c8d5d9201514cd", size = 1778622, upload-time = "2025-10-25T00:55:39.247Z" }, +] + +[[package]] +name = "platformdirs" +version = "4.5.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/cf/86/0248f086a84f01b37aaec0fa567b397df1a119f73c16f6c7a9aac73ea309/platformdirs-4.5.1.tar.gz", hash = "sha256:61d5cdcc6065745cdd94f0f878977f8de9437be93de97c1c12f853c9c0cdcbda", size = 21715, upload-time = "2025-12-05T13:52:58.638Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/28/3bfe2fa5a7b9c46fe7e13c97bda14c895fb10fa2ebf1d0abb90e0cea7ee1/platformdirs-4.5.1-py3-none-any.whl", hash = "sha256:d03afa3963c806a9bed9d5125c8f4cb2fdaf74a55ab60e5d59b3fde758104d31", size = 18731, upload-time = "2025-12-05T13:52:56.823Z" }, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, +] + +[[package]] +name = "poethepoet" +version = "0.40.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pastel", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pyyaml", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "tomli", marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform == 'win32')" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/38/9d/054c8435b03324ed9abd5d5ab8c45065b1f42c23952cd23f13a5921d8465/poethepoet-0.40.0.tar.gz", hash = "sha256:91835f00d03d6c4f0e146f80fa510e298ad865e7edd27fe4cb9c94fdc090791b", size = 81114, upload-time = "2026-01-05T19:09:13.116Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fb/bc/73327d12b176abea7a3c6c7d760e1a953992f7b59d72c0354e39d7a353b5/poethepoet-0.40.0-py3-none-any.whl", hash = "sha256:afd276ae31d5c53573c0c14898118d4848ccee3709b6b0be6a1c6cbe522bbc8a", size = 106672, upload-time = "2026-01-05T19:09:11.536Z" }, +] + +[[package]] +name = "pre-commit" +version = "4.5.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cfgv", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "identify", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "nodeenv", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pyyaml", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "virtualenv", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/40/f1/6d86a29246dfd2e9b6237f0b5823717f60cad94d47ddc26afa916d21f525/pre_commit-4.5.1.tar.gz", hash = "sha256:eb545fcff725875197837263e977ea257a402056661f09dae08e4b149b030a61", size = 198232, upload-time = "2025-12-16T21:14:33.552Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5d/19/fd3ef348460c80af7bb4669ea7926651d1f95c23ff2df18b9d24bab4f3fa/pre_commit-4.5.1-py2.py3-none-any.whl", hash = "sha256:3b3afd891e97337708c1674210f8eba659b52a38ea5f822ff142d10786221f77", size = 226437, upload-time = "2025-12-16T21:14:32.409Z" }, +] + +[[package]] +name = "pydantic" +version = "2.12.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-types", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pydantic-core", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "typing-inspection", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/69/44/36f1a6e523abc58ae5f928898e4aca2e0ea509b5aa6f6f392a5d882be928/pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49", size = 821591, upload-time = "2025-11-26T15:11:46.471Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d", size = 463580, upload-time = "2025-11-26T15:11:44.605Z" }, +] + +[[package]] +name = "pydantic-core" +version = "2.41.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/71/70/23b021c950c2addd24ec408e9ab05d59b035b39d97cdc1130e1bce647bb6/pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e", size = 460952, upload-time = "2025-11-04T13:43:49.098Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c6/90/32c9941e728d564b411d574d8ee0cf09b12ec978cb22b294995bae5549a5/pydantic_core-2.41.5-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:77b63866ca88d804225eaa4af3e664c5faf3568cea95360d21f4725ab6e07146", size = 2107298, upload-time = "2025-11-04T13:39:04.116Z" }, + { url = "https://files.pythonhosted.org/packages/fb/a8/61c96a77fe28993d9a6fb0f4127e05430a267b235a124545d79fea46dd65/pydantic_core-2.41.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dfa8a0c812ac681395907e71e1274819dec685fec28273a28905df579ef137e2", size = 1901475, upload-time = "2025-11-04T13:39:06.055Z" }, + { url = "https://files.pythonhosted.org/packages/5d/b6/338abf60225acc18cdc08b4faef592d0310923d19a87fba1faf05af5346e/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5921a4d3ca3aee735d9fd163808f5e8dd6c6972101e4adbda9a4667908849b97", size = 1918815, upload-time = "2025-11-04T13:39:10.41Z" }, + { url = "https://files.pythonhosted.org/packages/d1/1c/2ed0433e682983d8e8cba9c8d8ef274d4791ec6a6f24c58935b90e780e0a/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e25c479382d26a2a41b7ebea1043564a937db462816ea07afa8a44c0866d52f9", size = 2065567, upload-time = "2025-11-04T13:39:12.244Z" }, + { url = "https://files.pythonhosted.org/packages/b3/24/cf84974ee7d6eae06b9e63289b7b8f6549d416b5c199ca2d7ce13bbcf619/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f547144f2966e1e16ae626d8ce72b4cfa0caedc7fa28052001c94fb2fcaa1c52", size = 2230442, upload-time = "2025-11-04T13:39:13.962Z" }, + { url = "https://files.pythonhosted.org/packages/fd/21/4e287865504b3edc0136c89c9c09431be326168b1eb7841911cbc877a995/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f52298fbd394f9ed112d56f3d11aabd0d5bd27beb3084cc3d8ad069483b8941", size = 2350956, upload-time = "2025-11-04T13:39:15.889Z" }, + { url = "https://files.pythonhosted.org/packages/a8/76/7727ef2ffa4b62fcab916686a68a0426b9b790139720e1934e8ba797e238/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:100baa204bb412b74fe285fb0f3a385256dad1d1879f0a5cb1499ed2e83d132a", size = 2068253, upload-time = "2025-11-04T13:39:17.403Z" }, + { url = "https://files.pythonhosted.org/packages/d5/8c/a4abfc79604bcb4c748e18975c44f94f756f08fb04218d5cb87eb0d3a63e/pydantic_core-2.41.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:05a2c8852530ad2812cb7914dc61a1125dc4e06252ee98e5638a12da6cc6fb6c", size = 2177050, upload-time = "2025-11-04T13:39:19.351Z" }, + { url = "https://files.pythonhosted.org/packages/67/b1/de2e9a9a79b480f9cb0b6e8b6ba4c50b18d4e89852426364c66aa82bb7b3/pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:29452c56df2ed968d18d7e21f4ab0ac55e71dc59524872f6fc57dcf4a3249ed2", size = 2147178, upload-time = "2025-11-04T13:39:21Z" }, + { url = "https://files.pythonhosted.org/packages/16/c1/dfb33f837a47b20417500efaa0378adc6635b3c79e8369ff7a03c494b4ac/pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:d5160812ea7a8a2ffbe233d8da666880cad0cbaf5d4de74ae15c313213d62556", size = 2341833, upload-time = "2025-11-04T13:39:22.606Z" }, + { url = "https://files.pythonhosted.org/packages/47/36/00f398642a0f4b815a9a558c4f1dca1b4020a7d49562807d7bc9ff279a6c/pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:df3959765b553b9440adfd3c795617c352154e497a4eaf3752555cfb5da8fc49", size = 2321156, upload-time = "2025-11-04T13:39:25.843Z" }, + { url = "https://files.pythonhosted.org/packages/7e/70/cad3acd89fde2010807354d978725ae111ddf6d0ea46d1ea1775b5c1bd0c/pydantic_core-2.41.5-cp310-cp310-win32.whl", hash = "sha256:1f8d33a7f4d5a7889e60dc39856d76d09333d8a6ed0f5f1190635cbec70ec4ba", size = 1989378, upload-time = "2025-11-04T13:39:27.92Z" }, + { url = "https://files.pythonhosted.org/packages/76/92/d338652464c6c367e5608e4488201702cd1cbb0f33f7b6a85a60fe5f3720/pydantic_core-2.41.5-cp310-cp310-win_amd64.whl", hash = "sha256:62de39db01b8d593e45871af2af9e497295db8d73b085f6bfd0b18c83c70a8f9", size = 2013622, upload-time = "2025-11-04T13:39:29.848Z" }, + { url = "https://files.pythonhosted.org/packages/e8/72/74a989dd9f2084b3d9530b0915fdda64ac48831c30dbf7c72a41a5232db8/pydantic_core-2.41.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:a3a52f6156e73e7ccb0f8cced536adccb7042be67cb45f9562e12b319c119da6", size = 2105873, upload-time = "2025-11-04T13:39:31.373Z" }, + { url = "https://files.pythonhosted.org/packages/12/44/37e403fd9455708b3b942949e1d7febc02167662bf1a7da5b78ee1ea2842/pydantic_core-2.41.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7f3bf998340c6d4b0c9a2f02d6a400e51f123b59565d74dc60d252ce888c260b", size = 1899826, upload-time = "2025-11-04T13:39:32.897Z" }, + { url = "https://files.pythonhosted.org/packages/33/7f/1d5cab3ccf44c1935a359d51a8a2a9e1a654b744b5e7f80d41b88d501eec/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:378bec5c66998815d224c9ca994f1e14c0c21cb95d2f52b6021cc0b2a58f2a5a", size = 1917869, upload-time = "2025-11-04T13:39:34.469Z" }, + { url = "https://files.pythonhosted.org/packages/6e/6a/30d94a9674a7fe4f4744052ed6c5e083424510be1e93da5bc47569d11810/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e7b576130c69225432866fe2f4a469a85a54ade141d96fd396dffcf607b558f8", size = 2063890, upload-time = "2025-11-04T13:39:36.053Z" }, + { url = "https://files.pythonhosted.org/packages/50/be/76e5d46203fcb2750e542f32e6c371ffa9b8ad17364cf94bb0818dbfb50c/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6cb58b9c66f7e4179a2d5e0f849c48eff5c1fca560994d6eb6543abf955a149e", size = 2229740, upload-time = "2025-11-04T13:39:37.753Z" }, + { url = "https://files.pythonhosted.org/packages/d3/ee/fed784df0144793489f87db310a6bbf8118d7b630ed07aa180d6067e653a/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88942d3a3dff3afc8288c21e565e476fc278902ae4d6d134f1eeda118cc830b1", size = 2350021, upload-time = "2025-11-04T13:39:40.94Z" }, + { url = "https://files.pythonhosted.org/packages/c8/be/8fed28dd0a180dca19e72c233cbf58efa36df055e5b9d90d64fd1740b828/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f31d95a179f8d64d90f6831d71fa93290893a33148d890ba15de25642c5d075b", size = 2066378, upload-time = "2025-11-04T13:39:42.523Z" }, + { url = "https://files.pythonhosted.org/packages/b0/3b/698cf8ae1d536a010e05121b4958b1257f0b5522085e335360e53a6b1c8b/pydantic_core-2.41.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c1df3d34aced70add6f867a8cf413e299177e0c22660cc767218373d0779487b", size = 2175761, upload-time = "2025-11-04T13:39:44.553Z" }, + { url = "https://files.pythonhosted.org/packages/b8/ba/15d537423939553116dea94ce02f9c31be0fa9d0b806d427e0308ec17145/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4009935984bd36bd2c774e13f9a09563ce8de4abaa7226f5108262fa3e637284", size = 2146303, upload-time = "2025-11-04T13:39:46.238Z" }, + { url = "https://files.pythonhosted.org/packages/58/7f/0de669bf37d206723795f9c90c82966726a2ab06c336deba4735b55af431/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:34a64bc3441dc1213096a20fe27e8e128bd3ff89921706e83c0b1ac971276594", size = 2340355, upload-time = "2025-11-04T13:39:48.002Z" }, + { url = "https://files.pythonhosted.org/packages/e5/de/e7482c435b83d7e3c3ee5ee4451f6e8973cff0eb6007d2872ce6383f6398/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c9e19dd6e28fdcaa5a1de679aec4141f691023916427ef9bae8584f9c2fb3b0e", size = 2319875, upload-time = "2025-11-04T13:39:49.705Z" }, + { url = "https://files.pythonhosted.org/packages/fe/e6/8c9e81bb6dd7560e33b9053351c29f30c8194b72f2d6932888581f503482/pydantic_core-2.41.5-cp311-cp311-win32.whl", hash = "sha256:2c010c6ded393148374c0f6f0bf89d206bf3217f201faa0635dcd56bd1520f6b", size = 1987549, upload-time = "2025-11-04T13:39:51.842Z" }, + { url = "https://files.pythonhosted.org/packages/11/66/f14d1d978ea94d1bc21fc98fcf570f9542fe55bfcc40269d4e1a21c19bf7/pydantic_core-2.41.5-cp311-cp311-win_amd64.whl", hash = "sha256:76ee27c6e9c7f16f47db7a94157112a2f3a00e958bc626e2f4ee8bec5c328fbe", size = 2011305, upload-time = "2025-11-04T13:39:53.485Z" }, + { url = "https://files.pythonhosted.org/packages/56/d8/0e271434e8efd03186c5386671328154ee349ff0354d83c74f5caaf096ed/pydantic_core-2.41.5-cp311-cp311-win_arm64.whl", hash = "sha256:4bc36bbc0b7584de96561184ad7f012478987882ebf9f9c389b23f432ea3d90f", size = 1972902, upload-time = "2025-11-04T13:39:56.488Z" }, + { url = "https://files.pythonhosted.org/packages/5f/5d/5f6c63eebb5afee93bcaae4ce9a898f3373ca23df3ccaef086d0233a35a7/pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7", size = 2110990, upload-time = "2025-11-04T13:39:58.079Z" }, + { url = "https://files.pythonhosted.org/packages/aa/32/9c2e8ccb57c01111e0fd091f236c7b371c1bccea0fa85247ac55b1e2b6b6/pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0", size = 1896003, upload-time = "2025-11-04T13:39:59.956Z" }, + { url = "https://files.pythonhosted.org/packages/68/b8/a01b53cb0e59139fbc9e4fda3e9724ede8de279097179be4ff31f1abb65a/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69", size = 1919200, upload-time = "2025-11-04T13:40:02.241Z" }, + { url = "https://files.pythonhosted.org/packages/38/de/8c36b5198a29bdaade07b5985e80a233a5ac27137846f3bc2d3b40a47360/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75", size = 2052578, upload-time = "2025-11-04T13:40:04.401Z" }, + { url = "https://files.pythonhosted.org/packages/00/b5/0e8e4b5b081eac6cb3dbb7e60a65907549a1ce035a724368c330112adfdd/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05", size = 2208504, upload-time = "2025-11-04T13:40:06.072Z" }, + { url = "https://files.pythonhosted.org/packages/77/56/87a61aad59c7c5b9dc8caad5a41a5545cba3810c3e828708b3d7404f6cef/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc", size = 2335816, upload-time = "2025-11-04T13:40:07.835Z" }, + { url = "https://files.pythonhosted.org/packages/0d/76/941cc9f73529988688a665a5c0ecff1112b3d95ab48f81db5f7606f522d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c", size = 2075366, upload-time = "2025-11-04T13:40:09.804Z" }, + { url = "https://files.pythonhosted.org/packages/d3/43/ebef01f69baa07a482844faaa0a591bad1ef129253ffd0cdaa9d8a7f72d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5", size = 2171698, upload-time = "2025-11-04T13:40:12.004Z" }, + { url = "https://files.pythonhosted.org/packages/b1/87/41f3202e4193e3bacfc2c065fab7706ebe81af46a83d3e27605029c1f5a6/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c", size = 2132603, upload-time = "2025-11-04T13:40:13.868Z" }, + { url = "https://files.pythonhosted.org/packages/49/7d/4c00df99cb12070b6bccdef4a195255e6020a550d572768d92cc54dba91a/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294", size = 2329591, upload-time = "2025-11-04T13:40:15.672Z" }, + { url = "https://files.pythonhosted.org/packages/cc/6a/ebf4b1d65d458f3cda6a7335d141305dfa19bdc61140a884d165a8a1bbc7/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1", size = 2319068, upload-time = "2025-11-04T13:40:17.532Z" }, + { url = "https://files.pythonhosted.org/packages/49/3b/774f2b5cd4192d5ab75870ce4381fd89cf218af999515baf07e7206753f0/pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d", size = 1985908, upload-time = "2025-11-04T13:40:19.309Z" }, + { url = "https://files.pythonhosted.org/packages/86/45/00173a033c801cacf67c190fef088789394feaf88a98a7035b0e40d53dc9/pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815", size = 2020145, upload-time = "2025-11-04T13:40:21.548Z" }, + { url = "https://files.pythonhosted.org/packages/f9/22/91fbc821fa6d261b376a3f73809f907cec5ca6025642c463d3488aad22fb/pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3", size = 1976179, upload-time = "2025-11-04T13:40:23.393Z" }, + { url = "https://files.pythonhosted.org/packages/87/06/8806241ff1f70d9939f9af039c6c35f2360cf16e93c2ca76f184e76b1564/pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9", size = 2120403, upload-time = "2025-11-04T13:40:25.248Z" }, + { url = "https://files.pythonhosted.org/packages/94/02/abfa0e0bda67faa65fef1c84971c7e45928e108fe24333c81f3bfe35d5f5/pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34", size = 1896206, upload-time = "2025-11-04T13:40:27.099Z" }, + { url = "https://files.pythonhosted.org/packages/15/df/a4c740c0943e93e6500f9eb23f4ca7ec9bf71b19e608ae5b579678c8d02f/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0", size = 1919307, upload-time = "2025-11-04T13:40:29.806Z" }, + { url = "https://files.pythonhosted.org/packages/9a/e3/6324802931ae1d123528988e0e86587c2072ac2e5394b4bc2bc34b61ff6e/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33", size = 2063258, upload-time = "2025-11-04T13:40:33.544Z" }, + { url = "https://files.pythonhosted.org/packages/c9/d4/2230d7151d4957dd79c3044ea26346c148c98fbf0ee6ebd41056f2d62ab5/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e", size = 2214917, upload-time = "2025-11-04T13:40:35.479Z" }, + { url = "https://files.pythonhosted.org/packages/e6/9f/eaac5df17a3672fef0081b6c1bb0b82b33ee89aa5cec0d7b05f52fd4a1fa/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2", size = 2332186, upload-time = "2025-11-04T13:40:37.436Z" }, + { url = "https://files.pythonhosted.org/packages/cf/4e/35a80cae583a37cf15604b44240e45c05e04e86f9cfd766623149297e971/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586", size = 2073164, upload-time = "2025-11-04T13:40:40.289Z" }, + { url = "https://files.pythonhosted.org/packages/bf/e3/f6e262673c6140dd3305d144d032f7bd5f7497d3871c1428521f19f9efa2/pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d", size = 2179146, upload-time = "2025-11-04T13:40:42.809Z" }, + { url = "https://files.pythonhosted.org/packages/75/c7/20bd7fc05f0c6ea2056a4565c6f36f8968c0924f19b7d97bbfea55780e73/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740", size = 2137788, upload-time = "2025-11-04T13:40:44.752Z" }, + { url = "https://files.pythonhosted.org/packages/3a/8d/34318ef985c45196e004bc46c6eab2eda437e744c124ef0dbe1ff2c9d06b/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e", size = 2340133, upload-time = "2025-11-04T13:40:46.66Z" }, + { url = "https://files.pythonhosted.org/packages/9c/59/013626bf8c78a5a5d9350d12e7697d3d4de951a75565496abd40ccd46bee/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858", size = 2324852, upload-time = "2025-11-04T13:40:48.575Z" }, + { url = "https://files.pythonhosted.org/packages/1a/d9/c248c103856f807ef70c18a4f986693a46a8ffe1602e5d361485da502d20/pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36", size = 1994679, upload-time = "2025-11-04T13:40:50.619Z" }, + { url = "https://files.pythonhosted.org/packages/9e/8b/341991b158ddab181cff136acd2552c9f35bd30380422a639c0671e99a91/pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11", size = 2019766, upload-time = "2025-11-04T13:40:52.631Z" }, + { url = "https://files.pythonhosted.org/packages/73/7d/f2f9db34af103bea3e09735bb40b021788a5e834c81eedb541991badf8f5/pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd", size = 1981005, upload-time = "2025-11-04T13:40:54.734Z" }, + { url = "https://files.pythonhosted.org/packages/ea/28/46b7c5c9635ae96ea0fbb779e271a38129df2550f763937659ee6c5dbc65/pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a", size = 2119622, upload-time = "2025-11-04T13:40:56.68Z" }, + { url = "https://files.pythonhosted.org/packages/74/1a/145646e5687e8d9a1e8d09acb278c8535ebe9e972e1f162ed338a622f193/pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14", size = 1891725, upload-time = "2025-11-04T13:40:58.807Z" }, + { url = "https://files.pythonhosted.org/packages/23/04/e89c29e267b8060b40dca97bfc64a19b2a3cf99018167ea1677d96368273/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1", size = 1915040, upload-time = "2025-11-04T13:41:00.853Z" }, + { url = "https://files.pythonhosted.org/packages/84/a3/15a82ac7bd97992a82257f777b3583d3e84bdb06ba6858f745daa2ec8a85/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66", size = 2063691, upload-time = "2025-11-04T13:41:03.504Z" }, + { url = "https://files.pythonhosted.org/packages/74/9b/0046701313c6ef08c0c1cf0e028c67c770a4e1275ca73131563c5f2a310a/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869", size = 2213897, upload-time = "2025-11-04T13:41:05.804Z" }, + { url = "https://files.pythonhosted.org/packages/8a/cd/6bac76ecd1b27e75a95ca3a9a559c643b3afcd2dd62086d4b7a32a18b169/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2", size = 2333302, upload-time = "2025-11-04T13:41:07.809Z" }, + { url = "https://files.pythonhosted.org/packages/4c/d2/ef2074dc020dd6e109611a8be4449b98cd25e1b9b8a303c2f0fca2f2bcf7/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375", size = 2064877, upload-time = "2025-11-04T13:41:09.827Z" }, + { url = "https://files.pythonhosted.org/packages/18/66/e9db17a9a763d72f03de903883c057b2592c09509ccfe468187f2a2eef29/pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553", size = 2180680, upload-time = "2025-11-04T13:41:12.379Z" }, + { url = "https://files.pythonhosted.org/packages/d3/9e/3ce66cebb929f3ced22be85d4c2399b8e85b622db77dad36b73c5387f8f8/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90", size = 2138960, upload-time = "2025-11-04T13:41:14.627Z" }, + { url = "https://files.pythonhosted.org/packages/a6/62/205a998f4327d2079326b01abee48e502ea739d174f0a89295c481a2272e/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07", size = 2339102, upload-time = "2025-11-04T13:41:16.868Z" }, + { url = "https://files.pythonhosted.org/packages/3c/0d/f05e79471e889d74d3d88f5bd20d0ed189ad94c2423d81ff8d0000aab4ff/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb", size = 2326039, upload-time = "2025-11-04T13:41:18.934Z" }, + { url = "https://files.pythonhosted.org/packages/ec/e1/e08a6208bb100da7e0c4b288eed624a703f4d129bde2da475721a80cab32/pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23", size = 1995126, upload-time = "2025-11-04T13:41:21.418Z" }, + { url = "https://files.pythonhosted.org/packages/48/5d/56ba7b24e9557f99c9237e29f5c09913c81eeb2f3217e40e922353668092/pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf", size = 2015489, upload-time = "2025-11-04T13:41:24.076Z" }, + { url = "https://files.pythonhosted.org/packages/4e/bb/f7a190991ec9e3e0ba22e4993d8755bbc4a32925c0b5b42775c03e8148f9/pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0", size = 1977288, upload-time = "2025-11-04T13:41:26.33Z" }, + { url = "https://files.pythonhosted.org/packages/92/ed/77542d0c51538e32e15afe7899d79efce4b81eee631d99850edc2f5e9349/pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a", size = 2120255, upload-time = "2025-11-04T13:41:28.569Z" }, + { url = "https://files.pythonhosted.org/packages/bb/3d/6913dde84d5be21e284439676168b28d8bbba5600d838b9dca99de0fad71/pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3", size = 1863760, upload-time = "2025-11-04T13:41:31.055Z" }, + { url = "https://files.pythonhosted.org/packages/5a/f0/e5e6b99d4191da102f2b0eb9687aaa7f5bea5d9964071a84effc3e40f997/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c", size = 1878092, upload-time = "2025-11-04T13:41:33.21Z" }, + { url = "https://files.pythonhosted.org/packages/71/48/36fb760642d568925953bcc8116455513d6e34c4beaa37544118c36aba6d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612", size = 2053385, upload-time = "2025-11-04T13:41:35.508Z" }, + { url = "https://files.pythonhosted.org/packages/20/25/92dc684dd8eb75a234bc1c764b4210cf2646479d54b47bf46061657292a8/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d", size = 2218832, upload-time = "2025-11-04T13:41:37.732Z" }, + { url = "https://files.pythonhosted.org/packages/e2/09/f53e0b05023d3e30357d82eb35835d0f6340ca344720a4599cd663dca599/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9", size = 2327585, upload-time = "2025-11-04T13:41:40Z" }, + { url = "https://files.pythonhosted.org/packages/aa/4e/2ae1aa85d6af35a39b236b1b1641de73f5a6ac4d5a7509f77b814885760c/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660", size = 2041078, upload-time = "2025-11-04T13:41:42.323Z" }, + { url = "https://files.pythonhosted.org/packages/cd/13/2e215f17f0ef326fc72afe94776edb77525142c693767fc347ed6288728d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9", size = 2173914, upload-time = "2025-11-04T13:41:45.221Z" }, + { url = "https://files.pythonhosted.org/packages/02/7a/f999a6dcbcd0e5660bc348a3991c8915ce6599f4f2c6ac22f01d7a10816c/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3", size = 2129560, upload-time = "2025-11-04T13:41:47.474Z" }, + { url = "https://files.pythonhosted.org/packages/3a/b1/6c990ac65e3b4c079a4fb9f5b05f5b013afa0f4ed6780a3dd236d2cbdc64/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf", size = 2329244, upload-time = "2025-11-04T13:41:49.992Z" }, + { url = "https://files.pythonhosted.org/packages/d9/02/3c562f3a51afd4d88fff8dffb1771b30cfdfd79befd9883ee094f5b6c0d8/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470", size = 2331955, upload-time = "2025-11-04T13:41:54.079Z" }, + { url = "https://files.pythonhosted.org/packages/5c/96/5fb7d8c3c17bc8c62fdb031c47d77a1af698f1d7a406b0f79aaa1338f9ad/pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa", size = 1988906, upload-time = "2025-11-04T13:41:56.606Z" }, + { url = "https://files.pythonhosted.org/packages/22/ed/182129d83032702912c2e2d8bbe33c036f342cc735737064668585dac28f/pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c", size = 1981607, upload-time = "2025-11-04T13:41:58.889Z" }, + { url = "https://files.pythonhosted.org/packages/9f/ed/068e41660b832bb0b1aa5b58011dea2a3fe0ba7861ff38c4d4904c1c1a99/pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008", size = 1974769, upload-time = "2025-11-04T13:42:01.186Z" }, + { url = "https://files.pythonhosted.org/packages/11/72/90fda5ee3b97e51c494938a4a44c3a35a9c96c19bba12372fb9c634d6f57/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:b96d5f26b05d03cc60f11a7761a5ded1741da411e7fe0909e27a5e6a0cb7b034", size = 2115441, upload-time = "2025-11-04T13:42:39.557Z" }, + { url = "https://files.pythonhosted.org/packages/1f/53/8942f884fa33f50794f119012dc6a1a02ac43a56407adaac20463df8e98f/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:634e8609e89ceecea15e2d61bc9ac3718caaaa71963717bf3c8f38bfde64242c", size = 1930291, upload-time = "2025-11-04T13:42:42.169Z" }, + { url = "https://files.pythonhosted.org/packages/79/c8/ecb9ed9cd942bce09fc888ee960b52654fbdbede4ba6c2d6e0d3b1d8b49c/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93e8740d7503eb008aa2df04d3b9735f845d43ae845e6dcd2be0b55a2da43cd2", size = 1948632, upload-time = "2025-11-04T13:42:44.564Z" }, + { url = "https://files.pythonhosted.org/packages/2e/1b/687711069de7efa6af934e74f601e2a4307365e8fdc404703afc453eab26/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f15489ba13d61f670dcc96772e733aad1a6f9c429cc27574c6cdaed82d0146ad", size = 2138905, upload-time = "2025-11-04T13:42:47.156Z" }, + { url = "https://files.pythonhosted.org/packages/09/32/59b0c7e63e277fa7911c2fc70ccfb45ce4b98991e7ef37110663437005af/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd", size = 2110495, upload-time = "2025-11-04T13:42:49.689Z" }, + { url = "https://files.pythonhosted.org/packages/aa/81/05e400037eaf55ad400bcd318c05bb345b57e708887f07ddb2d20e3f0e98/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc", size = 1915388, upload-time = "2025-11-04T13:42:52.215Z" }, + { url = "https://files.pythonhosted.org/packages/6e/0d/e3549b2399f71d56476b77dbf3cf8937cec5cd70536bdc0e374a421d0599/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56", size = 1942879, upload-time = "2025-11-04T13:42:56.483Z" }, + { url = "https://files.pythonhosted.org/packages/f7/07/34573da085946b6a313d7c42f82f16e8920bfd730665de2d11c0c37a74b5/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b", size = 2139017, upload-time = "2025-11-04T13:42:59.471Z" }, + { url = "https://files.pythonhosted.org/packages/e6/b0/1a2aa41e3b5a4ba11420aba2d091b2d17959c8d1519ece3627c371951e73/pydantic_core-2.41.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b5819cd790dbf0c5eb9f82c73c16b39a65dd6dd4d1439dcdea7816ec9adddab8", size = 2103351, upload-time = "2025-11-04T13:43:02.058Z" }, + { url = "https://files.pythonhosted.org/packages/a4/ee/31b1f0020baaf6d091c87900ae05c6aeae101fa4e188e1613c80e4f1ea31/pydantic_core-2.41.5-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:5a4e67afbc95fa5c34cf27d9089bca7fcab4e51e57278d710320a70b956d1b9a", size = 1925363, upload-time = "2025-11-04T13:43:05.159Z" }, + { url = "https://files.pythonhosted.org/packages/e1/89/ab8e86208467e467a80deaca4e434adac37b10a9d134cd2f99b28a01e483/pydantic_core-2.41.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ece5c59f0ce7d001e017643d8d24da587ea1f74f6993467d85ae8a5ef9d4f42b", size = 2135615, upload-time = "2025-11-04T13:43:08.116Z" }, + { url = "https://files.pythonhosted.org/packages/99/0a/99a53d06dd0348b2008f2f30884b34719c323f16c3be4e6cc1203b74a91d/pydantic_core-2.41.5-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:16f80f7abe3351f8ea6858914ddc8c77e02578544a0ebc15b4c2e1a0e813b0b2", size = 2175369, upload-time = "2025-11-04T13:43:12.49Z" }, + { url = "https://files.pythonhosted.org/packages/6d/94/30ca3b73c6d485b9bb0bc66e611cff4a7138ff9736b7e66bcf0852151636/pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:33cb885e759a705b426baada1fe68cbb0a2e68e34c5d0d0289a364cf01709093", size = 2144218, upload-time = "2025-11-04T13:43:15.431Z" }, + { url = "https://files.pythonhosted.org/packages/87/57/31b4f8e12680b739a91f472b5671294236b82586889ef764b5fbc6669238/pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:c8d8b4eb992936023be7dee581270af5c6e0697a8559895f527f5b7105ecd36a", size = 2329951, upload-time = "2025-11-04T13:43:18.062Z" }, + { url = "https://files.pythonhosted.org/packages/7d/73/3c2c8edef77b8f7310e6fb012dbc4b8551386ed575b9eb6fb2506e28a7eb/pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:242a206cd0318f95cd21bdacff3fcc3aab23e79bba5cac3db5a841c9ef9c6963", size = 2318428, upload-time = "2025-11-04T13:43:20.679Z" }, + { url = "https://files.pythonhosted.org/packages/2f/02/8559b1f26ee0d502c74f9cca5c0d2fd97e967e083e006bbbb4e97f3a043a/pydantic_core-2.41.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d3a978c4f57a597908b7e697229d996d77a6d3c94901e9edee593adada95ce1a", size = 2147009, upload-time = "2025-11-04T13:43:23.286Z" }, + { url = "https://files.pythonhosted.org/packages/5f/9b/1b3f0e9f9305839d7e84912f9e8bfbd191ed1b1ef48083609f0dabde978c/pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b2379fa7ed44ddecb5bfe4e48577d752db9fc10be00a6b7446e9663ba143de26", size = 2101980, upload-time = "2025-11-04T13:43:25.97Z" }, + { url = "https://files.pythonhosted.org/packages/a4/ed/d71fefcb4263df0da6a85b5d8a7508360f2f2e9b3bf5814be9c8bccdccc1/pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:266fb4cbf5e3cbd0b53669a6d1b039c45e3ce651fd5442eff4d07c2cc8d66808", size = 1923865, upload-time = "2025-11-04T13:43:28.763Z" }, + { url = "https://files.pythonhosted.org/packages/ce/3a/626b38db460d675f873e4444b4bb030453bbe7b4ba55df821d026a0493c4/pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58133647260ea01e4d0500089a8c4f07bd7aa6ce109682b1426394988d8aaacc", size = 2134256, upload-time = "2025-11-04T13:43:31.71Z" }, + { url = "https://files.pythonhosted.org/packages/83/d9/8412d7f06f616bbc053d30cb4e5f76786af3221462ad5eee1f202021eb4e/pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:287dad91cfb551c363dc62899a80e9e14da1f0e2b6ebde82c806612ca2a13ef1", size = 2174762, upload-time = "2025-11-04T13:43:34.744Z" }, + { url = "https://files.pythonhosted.org/packages/55/4c/162d906b8e3ba3a99354e20faa1b49a85206c47de97a639510a0e673f5da/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:03b77d184b9eb40240ae9fd676ca364ce1085f203e1b1256f8ab9984dca80a84", size = 2143141, upload-time = "2025-11-04T13:43:37.701Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f2/f11dd73284122713f5f89fc940f370d035fa8e1e078d446b3313955157fe/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:a668ce24de96165bb239160b3d854943128f4334822900534f2fe947930e5770", size = 2330317, upload-time = "2025-11-04T13:43:40.406Z" }, + { url = "https://files.pythonhosted.org/packages/88/9d/b06ca6acfe4abb296110fb1273a4d848a0bfb2ff65f3ee92127b3244e16b/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f14f8f046c14563f8eb3f45f499cc658ab8d10072961e07225e507adb700e93f", size = 2316992, upload-time = "2025-11-04T13:43:43.602Z" }, + { url = "https://files.pythonhosted.org/packages/36/c7/cfc8e811f061c841d7990b0201912c3556bfeb99cdcb7ed24adc8d6f8704/pydantic_core-2.41.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:56121965f7a4dc965bff783d70b907ddf3d57f6eba29b6d2e5dabfaf07799c51", size = 2145302, upload-time = "2025-11-04T13:43:46.64Z" }, +] + +[[package]] +name = "pydantic-settings" +version = "2.12.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pydantic", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "python-dotenv", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "typing-inspection", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/43/4b/ac7e0aae12027748076d72a8764ff1c9d82ca75a7a52622e67ed3f765c54/pydantic_settings-2.12.0.tar.gz", hash = "sha256:005538ef951e3c2a68e1c08b292b5f2e71490def8589d4221b95dab00dafcfd0", size = 194184, upload-time = "2025-11-10T14:25:47.013Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c1/60/5d4751ba3f4a40a6891f24eec885f51afd78d208498268c734e256fb13c4/pydantic_settings-2.12.0-py3-none-any.whl", hash = "sha256:fddb9fd99a5b18da837b29710391e945b1e30c135477f484084ee513adb93809", size = 51880, upload-time = "2025-11-10T14:25:45.546Z" }, +] + +[[package]] +name = "pygments" +version = "2.19.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, +] + +[[package]] +name = "pyright" +version = "1.1.408" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "nodeenv", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/74/b2/5db700e52554b8f025faa9c3c624c59f1f6c8841ba81ab97641b54322f16/pyright-1.1.408.tar.gz", hash = "sha256:f28f2321f96852fa50b5829ea492f6adb0e6954568d1caa3f3af3a5f555eb684", size = 4400578, upload-time = "2026-01-08T08:07:38.795Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0c/82/a2c93e32800940d9573fb28c346772a14778b84ba7524e691b324620ab89/pyright-1.1.408-py3-none-any.whl", hash = "sha256:090b32865f4fdb1e0e6cd82bf5618480d48eecd2eb2e70f960982a3d9a4c17c1", size = 6399144, upload-time = "2026-01-08T08:07:37.082Z" }, +] + +[[package]] +name = "pytest" +version = "9.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "exceptiongroup", marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform == 'win32')" }, + { name = "iniconfig", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "packaging", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pluggy", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pygments", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "tomli", marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform == 'win32')" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901, upload-time = "2025-12-06T21:30:51.014Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801, upload-time = "2025-12-06T21:30:49.154Z" }, +] + +[[package]] +name = "pytest-asyncio" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "backports-asyncio-runner", marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform == 'win32')" }, + { name = "pytest", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "typing-extensions", marker = "(python_full_version < '3.13' and sys_platform == 'darwin') or (python_full_version < '3.13' and sys_platform == 'linux') or (python_full_version < '3.13' and sys_platform == 'win32')" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/90/2c/8af215c0f776415f3590cac4f9086ccefd6fd463befeae41cd4d3f193e5a/pytest_asyncio-1.3.0.tar.gz", hash = "sha256:d7f52f36d231b80ee124cd216ffb19369aa168fc10095013c6b014a34d3ee9e5", size = 50087, upload-time = "2025-11-10T16:07:47.256Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/35/f8b19922b6a25bc0880171a2f1a003eaeb93657475193ab516fd87cac9da/pytest_asyncio-1.3.0-py3-none-any.whl", hash = "sha256:611e26147c7f77640e6d0a92a38ed17c3e9848063698d5c93d5aa7aa11cebff5", size = 15075, upload-time = "2025-11-10T16:07:45.537Z" }, +] + +[[package]] +name = "pytest-cov" +version = "7.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "coverage", extra = ["toml"], marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pluggy", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pytest", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5e/f7/c933acc76f5208b3b00089573cf6a2bc26dc80a8aece8f52bb7d6b1855ca/pytest_cov-7.0.0.tar.gz", hash = "sha256:33c97eda2e049a0c5298e91f519302a1334c26ac65c1a483d6206fd458361af1", size = 54328, upload-time = "2025-09-09T10:57:02.113Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ee/49/1377b49de7d0c1ce41292161ea0f721913fa8722c19fb9c1e3aa0367eecb/pytest_cov-7.0.0-py3-none-any.whl", hash = "sha256:3b8e9558b16cc1479da72058bdecf8073661c7f57f7d3c5f22a1c23507f2d861", size = 22424, upload-time = "2025-09-09T10:57:00.695Z" }, +] + +[[package]] +name = "pytest-retry" +version = "1.7.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c5/5b/607b017994cca28de3a1ad22a3eee8418e5d428dcd8ec25b26b18e995a73/pytest_retry-1.7.0.tar.gz", hash = "sha256:f8d52339f01e949df47c11ba9ee8d5b362f5824dff580d3870ec9ae0057df80f", size = 19977, upload-time = "2025-01-19T01:56:13.115Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7c/ff/3266c8a73b9b93c4b14160a7e2b31d1e1088e28ed29f4c2d93ae34093bfd/pytest_retry-1.7.0-py3-none-any.whl", hash = "sha256:a2dac85b79a4e2375943f1429479c65beb6c69553e7dae6b8332be47a60954f4", size = 13775, upload-time = "2025-01-19T01:56:11.199Z" }, +] + +[[package]] +name = "pytest-timeout" +version = "2.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ac/82/4c9ecabab13363e72d880f2fb504c5f750433b2b6f16e99f4ec21ada284c/pytest_timeout-2.4.0.tar.gz", hash = "sha256:7e68e90b01f9eff71332b25001f85c75495fc4e3a836701876183c4bcfd0540a", size = 17973, upload-time = "2025-05-05T19:44:34.99Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fa/b6/3127540ecdf1464a00e5a01ee60a1b09175f6913f0644ac748494d9c4b21/pytest_timeout-2.4.0-py3-none-any.whl", hash = "sha256:c42667e5cdadb151aeb5b26d114aff6bdf5a907f176a007a30b940d3d865b5c2", size = 14382, upload-time = "2025-05-05T19:44:33.502Z" }, +] + +[[package]] +name = "pytest-xdist" +version = "3.8.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "execnet", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pytest", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/78/b4/439b179d1ff526791eb921115fca8e44e596a13efeda518b9d845a619450/pytest_xdist-3.8.0.tar.gz", hash = "sha256:7e578125ec9bc6050861aa93f2d59f1d8d085595d6551c2c90b6f4fad8d3a9f1", size = 88069, upload-time = "2025-07-01T13:30:59.346Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ca/31/d4e37e9e550c2b92a9cbc2e4d0b7420a27224968580b5a447f420847c975/pytest_xdist-3.8.0-py3-none-any.whl", hash = "sha256:202ca578cfeb7370784a8c33d6d05bc6e13b4f25b5053c30a152269fd10f0b88", size = 46396, upload-time = "2025-07-01T13:30:56.632Z" }, +] + +[[package]] +name = "python-agent-template" +version = "0.1.0" +source = { virtual = "." } + +[package.dev-dependencies] +dev = [ + { name = "bandit", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "flit", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "mypy", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "poethepoet", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pre-commit", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pydantic", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pydantic-settings", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pygments", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pyright", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pytest", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pytest-asyncio", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pytest-cov", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pytest-retry", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pytest-timeout", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pytest-xdist", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "rich", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "ruff", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "tomli", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "tomli-w", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "types-requests", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "uv", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +docs = [ + { name = "sphinx", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "sphinx-autodoc-typehints", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "tomli", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] + +[package.metadata] + +[package.metadata.requires-dev] +dev = [ + { name = "bandit", specifier = ">=1.7.9" }, + { name = "flit", specifier = ">=3.12.0,<4" }, + { name = "mypy", specifier = ">=1.11.2" }, + { name = "poethepoet", specifier = ">=0.32.0" }, + { name = "pre-commit", specifier = ">=3.7.1" }, + { name = "pydantic", specifier = ">=2.9.0" }, + { name = "pydantic-settings", specifier = ">=2.5.0" }, + { name = "pygments", specifier = ">=2.18.0" }, + { name = "pyright", specifier = ">=1.1.390" }, + { name = "pytest", specifier = ">=8.3.3" }, + { name = "pytest-asyncio", specifier = ">=0.24.0" }, + { name = "pytest-cov", specifier = ">=6.0.0" }, + { name = "pytest-retry", specifier = ">=1.6.3" }, + { name = "pytest-timeout", specifier = ">=2.3.1" }, + { name = "pytest-xdist", specifier = ">=3.6.1" }, + { name = "rich", specifier = ">=13.9.2" }, + { name = "ruff", specifier = ">=0.6.9" }, + { name = "tomli", specifier = ">=2.0.1" }, + { name = "tomli-w", specifier = ">=1.0.0" }, + { name = "types-requests", specifier = ">=2.32.0.20241016" }, + { name = "uv", specifier = ">=0.4.30" }, +] +docs = [ + { name = "sphinx", specifier = ">=7.4,<8" }, + { name = "sphinx-autodoc-typehints", specifier = ">=2.2.1" }, + { name = "tomli", specifier = ">=2.0.1" }, +] + +[[package]] +name = "python-dotenv" +version = "1.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f0/26/19cadc79a718c5edbec86fd4919a6b6d3f681039a2f6d66d14be94e75fb9/python_dotenv-1.2.1.tar.gz", hash = "sha256:42667e897e16ab0d66954af0e60a9caa94f0fd4ecf3aaf6d2d260eec1aa36ad6", size = 44221, upload-time = "2025-10-26T15:12:10.434Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/14/1b/a298b06749107c305e1fe0f814c6c74aea7b2f1e10989cb30f544a1b3253/python_dotenv-1.2.1-py3-none-any.whl", hash = "sha256:b81ee9561e9ca4004139c6cbba3a238c32b03e4894671e181b671e8cb8425d61", size = 21230, upload-time = "2025-10-26T15:12:09.109Z" }, +] + +[[package]] +name = "pyyaml" +version = "6.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960, upload-time = "2025-09-25T21:33:16.546Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f4/a0/39350dd17dd6d6c6507025c0e53aef67a9293a6d37d3511f23ea510d5800/pyyaml-6.0.3-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:214ed4befebe12df36bcc8bc2b64b396ca31be9304b8f59e25c11cf94a4c033b", size = 184227, upload-time = "2025-09-25T21:31:46.04Z" }, + { url = "https://files.pythonhosted.org/packages/05/14/52d505b5c59ce73244f59c7a50ecf47093ce4765f116cdb98286a71eeca2/pyyaml-6.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02ea2dfa234451bbb8772601d7b8e426c2bfa197136796224e50e35a78777956", size = 174019, upload-time = "2025-09-25T21:31:47.706Z" }, + { url = "https://files.pythonhosted.org/packages/43/f7/0e6a5ae5599c838c696adb4e6330a59f463265bfa1e116cfd1fbb0abaaae/pyyaml-6.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b30236e45cf30d2b8e7b3e85881719e98507abed1011bf463a8fa23e9c3e98a8", size = 740646, upload-time = "2025-09-25T21:31:49.21Z" }, + { url = "https://files.pythonhosted.org/packages/2f/3a/61b9db1d28f00f8fd0ae760459a5c4bf1b941baf714e207b6eb0657d2578/pyyaml-6.0.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:66291b10affd76d76f54fad28e22e51719ef9ba22b29e1d7d03d6777a9174198", size = 840793, upload-time = "2025-09-25T21:31:50.735Z" }, + { url = "https://files.pythonhosted.org/packages/7a/1e/7acc4f0e74c4b3d9531e24739e0ab832a5edf40e64fbae1a9c01941cabd7/pyyaml-6.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9c7708761fccb9397fe64bbc0395abcae8c4bf7b0eac081e12b809bf47700d0b", size = 770293, upload-time = "2025-09-25T21:31:51.828Z" }, + { url = "https://files.pythonhosted.org/packages/8b/ef/abd085f06853af0cd59fa5f913d61a8eab65d7639ff2a658d18a25d6a89d/pyyaml-6.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:418cf3f2111bc80e0933b2cd8cd04f286338bb88bdc7bc8e6dd775ebde60b5e0", size = 732872, upload-time = "2025-09-25T21:31:53.282Z" }, + { url = "https://files.pythonhosted.org/packages/1f/15/2bc9c8faf6450a8b3c9fc5448ed869c599c0a74ba2669772b1f3a0040180/pyyaml-6.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5e0b74767e5f8c593e8c9b5912019159ed0533c70051e9cce3e8b6aa699fcd69", size = 758828, upload-time = "2025-09-25T21:31:54.807Z" }, + { url = "https://files.pythonhosted.org/packages/a3/00/531e92e88c00f4333ce359e50c19b8d1de9fe8d581b1534e35ccfbc5f393/pyyaml-6.0.3-cp310-cp310-win32.whl", hash = "sha256:28c8d926f98f432f88adc23edf2e6d4921ac26fb084b028c733d01868d19007e", size = 142415, upload-time = "2025-09-25T21:31:55.885Z" }, + { url = "https://files.pythonhosted.org/packages/2a/fa/926c003379b19fca39dd4634818b00dec6c62d87faf628d1394e137354d4/pyyaml-6.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:bdb2c67c6c1390b63c6ff89f210c8fd09d9a1217a465701eac7316313c915e4c", size = 158561, upload-time = "2025-09-25T21:31:57.406Z" }, + { url = "https://files.pythonhosted.org/packages/6d/16/a95b6757765b7b031c9374925bb718d55e0a9ba8a1b6a12d25962ea44347/pyyaml-6.0.3-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e", size = 185826, upload-time = "2025-09-25T21:31:58.655Z" }, + { url = "https://files.pythonhosted.org/packages/16/19/13de8e4377ed53079ee996e1ab0a9c33ec2faf808a4647b7b4c0d46dd239/pyyaml-6.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824", size = 175577, upload-time = "2025-09-25T21:32:00.088Z" }, + { url = "https://files.pythonhosted.org/packages/0c/62/d2eb46264d4b157dae1275b573017abec435397aa59cbcdab6fc978a8af4/pyyaml-6.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c", size = 775556, upload-time = "2025-09-25T21:32:01.31Z" }, + { url = "https://files.pythonhosted.org/packages/10/cb/16c3f2cf3266edd25aaa00d6c4350381c8b012ed6f5276675b9eba8d9ff4/pyyaml-6.0.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:850774a7879607d3a6f50d36d04f00ee69e7fc816450e5f7e58d7f17f1ae5c00", size = 882114, upload-time = "2025-09-25T21:32:03.376Z" }, + { url = "https://files.pythonhosted.org/packages/71/60/917329f640924b18ff085ab889a11c763e0b573da888e8404ff486657602/pyyaml-6.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d", size = 806638, upload-time = "2025-09-25T21:32:04.553Z" }, + { url = "https://files.pythonhosted.org/packages/dd/6f/529b0f316a9fd167281a6c3826b5583e6192dba792dd55e3203d3f8e655a/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37d57ad971609cf3c53ba6a7e365e40660e3be0e5175fa9f2365a379d6095a", size = 767463, upload-time = "2025-09-25T21:32:06.152Z" }, + { url = "https://files.pythonhosted.org/packages/f2/6a/b627b4e0c1dd03718543519ffb2f1deea4a1e6d42fbab8021936a4d22589/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37503bfbfc9d2c40b344d06b2199cf0e96e97957ab1c1b546fd4f87e53e5d3e4", size = 794986, upload-time = "2025-09-25T21:32:07.367Z" }, + { url = "https://files.pythonhosted.org/packages/45/91/47a6e1c42d9ee337c4839208f30d9f09caa9f720ec7582917b264defc875/pyyaml-6.0.3-cp311-cp311-win32.whl", hash = "sha256:8098f252adfa6c80ab48096053f512f2321f0b998f98150cea9bd23d83e1467b", size = 142543, upload-time = "2025-09-25T21:32:08.95Z" }, + { url = "https://files.pythonhosted.org/packages/da/e3/ea007450a105ae919a72393cb06f122f288ef60bba2dc64b26e2646fa315/pyyaml-6.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:9f3bfb4965eb874431221a3ff3fdcddc7e74e3b07799e0e84ca4a0f867d449bf", size = 158763, upload-time = "2025-09-25T21:32:09.96Z" }, + { url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063, upload-time = "2025-09-25T21:32:11.445Z" }, + { url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973, upload-time = "2025-09-25T21:32:12.492Z" }, + { url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116, upload-time = "2025-09-25T21:32:13.652Z" }, + { url = "https://files.pythonhosted.org/packages/65/30/d7353c338e12baef4ecc1b09e877c1970bd3382789c159b4f89d6a70dc09/pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c", size = 844011, upload-time = "2025-09-25T21:32:15.21Z" }, + { url = "https://files.pythonhosted.org/packages/8b/9d/b3589d3877982d4f2329302ef98a8026e7f4443c765c46cfecc8858c6b4b/pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc", size = 807870, upload-time = "2025-09-25T21:32:16.431Z" }, + { url = "https://files.pythonhosted.org/packages/05/c0/b3be26a015601b822b97d9149ff8cb5ead58c66f981e04fedf4e762f4bd4/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e", size = 761089, upload-time = "2025-09-25T21:32:17.56Z" }, + { url = "https://files.pythonhosted.org/packages/be/8e/98435a21d1d4b46590d5459a22d88128103f8da4c2d4cb8f14f2a96504e1/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea", size = 790181, upload-time = "2025-09-25T21:32:18.834Z" }, + { url = "https://files.pythonhosted.org/packages/74/93/7baea19427dcfbe1e5a372d81473250b379f04b1bd3c4c5ff825e2327202/pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5", size = 137658, upload-time = "2025-09-25T21:32:20.209Z" }, + { url = "https://files.pythonhosted.org/packages/86/bf/899e81e4cce32febab4fb42bb97dcdf66bc135272882d1987881a4b519e9/pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b", size = 154003, upload-time = "2025-09-25T21:32:21.167Z" }, + { url = "https://files.pythonhosted.org/packages/1a/08/67bd04656199bbb51dbed1439b7f27601dfb576fb864099c7ef0c3e55531/pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd", size = 140344, upload-time = "2025-09-25T21:32:22.617Z" }, + { url = "https://files.pythonhosted.org/packages/d1/11/0fd08f8192109f7169db964b5707a2f1e8b745d4e239b784a5a1dd80d1db/pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8", size = 181669, upload-time = "2025-09-25T21:32:23.673Z" }, + { url = "https://files.pythonhosted.org/packages/b1/16/95309993f1d3748cd644e02e38b75d50cbc0d9561d21f390a76242ce073f/pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1", size = 173252, upload-time = "2025-09-25T21:32:25.149Z" }, + { url = "https://files.pythonhosted.org/packages/50/31/b20f376d3f810b9b2371e72ef5adb33879b25edb7a6d072cb7ca0c486398/pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c", size = 767081, upload-time = "2025-09-25T21:32:26.575Z" }, + { url = "https://files.pythonhosted.org/packages/49/1e/a55ca81e949270d5d4432fbbd19dfea5321eda7c41a849d443dc92fd1ff7/pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5", size = 841159, upload-time = "2025-09-25T21:32:27.727Z" }, + { url = "https://files.pythonhosted.org/packages/74/27/e5b8f34d02d9995b80abcef563ea1f8b56d20134d8f4e5e81733b1feceb2/pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6", size = 801626, upload-time = "2025-09-25T21:32:28.878Z" }, + { url = "https://files.pythonhosted.org/packages/f9/11/ba845c23988798f40e52ba45f34849aa8a1f2d4af4b798588010792ebad6/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6", size = 753613, upload-time = "2025-09-25T21:32:30.178Z" }, + { url = "https://files.pythonhosted.org/packages/3d/e0/7966e1a7bfc0a45bf0a7fb6b98ea03fc9b8d84fa7f2229e9659680b69ee3/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be", size = 794115, upload-time = "2025-09-25T21:32:31.353Z" }, + { url = "https://files.pythonhosted.org/packages/de/94/980b50a6531b3019e45ddeada0626d45fa85cbe22300844a7983285bed3b/pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26", size = 137427, upload-time = "2025-09-25T21:32:32.58Z" }, + { url = "https://files.pythonhosted.org/packages/97/c9/39d5b874e8b28845e4ec2202b5da735d0199dbe5b8fb85f91398814a9a46/pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c", size = 154090, upload-time = "2025-09-25T21:32:33.659Z" }, + { url = "https://files.pythonhosted.org/packages/73/e8/2bdf3ca2090f68bb3d75b44da7bbc71843b19c9f2b9cb9b0f4ab7a5a4329/pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb", size = 140246, upload-time = "2025-09-25T21:32:34.663Z" }, + { url = "https://files.pythonhosted.org/packages/9d/8c/f4bd7f6465179953d3ac9bc44ac1a8a3e6122cf8ada906b4f96c60172d43/pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac", size = 181814, upload-time = "2025-09-25T21:32:35.712Z" }, + { url = "https://files.pythonhosted.org/packages/bd/9c/4d95bb87eb2063d20db7b60faa3840c1b18025517ae857371c4dd55a6b3a/pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310", size = 173809, upload-time = "2025-09-25T21:32:36.789Z" }, + { url = "https://files.pythonhosted.org/packages/92/b5/47e807c2623074914e29dabd16cbbdd4bf5e9b2db9f8090fa64411fc5382/pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7", size = 766454, upload-time = "2025-09-25T21:32:37.966Z" }, + { url = "https://files.pythonhosted.org/packages/02/9e/e5e9b168be58564121efb3de6859c452fccde0ab093d8438905899a3a483/pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788", size = 836355, upload-time = "2025-09-25T21:32:39.178Z" }, + { url = "https://files.pythonhosted.org/packages/88/f9/16491d7ed2a919954993e48aa941b200f38040928474c9e85ea9e64222c3/pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5", size = 794175, upload-time = "2025-09-25T21:32:40.865Z" }, + { url = "https://files.pythonhosted.org/packages/dd/3f/5989debef34dc6397317802b527dbbafb2b4760878a53d4166579111411e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764", size = 755228, upload-time = "2025-09-25T21:32:42.084Z" }, + { url = "https://files.pythonhosted.org/packages/d7/ce/af88a49043cd2e265be63d083fc75b27b6ed062f5f9fd6cdc223ad62f03e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35", size = 789194, upload-time = "2025-09-25T21:32:43.362Z" }, + { url = "https://files.pythonhosted.org/packages/23/20/bb6982b26a40bb43951265ba29d4c246ef0ff59c9fdcdf0ed04e0687de4d/pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac", size = 156429, upload-time = "2025-09-25T21:32:57.844Z" }, + { url = "https://files.pythonhosted.org/packages/f4/f4/a4541072bb9422c8a883ab55255f918fa378ecf083f5b85e87fc2b4eda1b/pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3", size = 143912, upload-time = "2025-09-25T21:32:59.247Z" }, + { url = "https://files.pythonhosted.org/packages/7c/f9/07dd09ae774e4616edf6cda684ee78f97777bdd15847253637a6f052a62f/pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3", size = 189108, upload-time = "2025-09-25T21:32:44.377Z" }, + { url = "https://files.pythonhosted.org/packages/4e/78/8d08c9fb7ce09ad8c38ad533c1191cf27f7ae1effe5bb9400a46d9437fcf/pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba", size = 183641, upload-time = "2025-09-25T21:32:45.407Z" }, + { url = "https://files.pythonhosted.org/packages/7b/5b/3babb19104a46945cf816d047db2788bcaf8c94527a805610b0289a01c6b/pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c", size = 831901, upload-time = "2025-09-25T21:32:48.83Z" }, + { url = "https://files.pythonhosted.org/packages/8b/cc/dff0684d8dc44da4d22a13f35f073d558c268780ce3c6ba1b87055bb0b87/pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702", size = 861132, upload-time = "2025-09-25T21:32:50.149Z" }, + { url = "https://files.pythonhosted.org/packages/b1/5e/f77dc6b9036943e285ba76b49e118d9ea929885becb0a29ba8a7c75e29fe/pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c", size = 839261, upload-time = "2025-09-25T21:32:51.808Z" }, + { url = "https://files.pythonhosted.org/packages/ce/88/a9db1376aa2a228197c58b37302f284b5617f56a5d959fd1763fb1675ce6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065", size = 805272, upload-time = "2025-09-25T21:32:52.941Z" }, + { url = "https://files.pythonhosted.org/packages/da/92/1446574745d74df0c92e6aa4a7b0b3130706a4142b2d1a5869f2eaa423c6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65", size = 829923, upload-time = "2025-09-25T21:32:54.537Z" }, + { url = "https://files.pythonhosted.org/packages/f0/7a/1c7270340330e575b92f397352af856a8c06f230aa3e76f86b39d01b416a/pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9", size = 174062, upload-time = "2025-09-25T21:32:55.767Z" }, + { url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341, upload-time = "2025-09-25T21:32:56.828Z" }, +] + +[[package]] +name = "requests" +version = "2.32.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "charset-normalizer", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "idna", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "urllib3", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" }, +] + +[[package]] +name = "rich" +version = "14.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markdown-it-py", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pygments", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fb/d2/8920e102050a0de7bfabeb4c4614a49248cf8d5d7a8d01885fbb24dc767a/rich-14.2.0.tar.gz", hash = "sha256:73ff50c7c0c1c77c8243079283f4edb376f0f6442433aecb8ce7e6d0b92d1fe4", size = 219990, upload-time = "2025-10-09T14:16:53.064Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/25/7a/b0178788f8dc6cafce37a212c99565fa1fe7872c70c6c9c1e1a372d9d88f/rich-14.2.0-py3-none-any.whl", hash = "sha256:76bc51fe2e57d2b1be1f96c524b890b816e334ab4c1e45888799bfaab0021edd", size = 243393, upload-time = "2025-10-09T14:16:51.245Z" }, +] + +[[package]] +name = "ruff" +version = "0.14.13" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/50/0a/1914efb7903174b381ee2ffeebb4253e729de57f114e63595114c8ca451f/ruff-0.14.13.tar.gz", hash = "sha256:83cd6c0763190784b99650a20fec7633c59f6ebe41c5cc9d45ee42749563ad47", size = 6059504, upload-time = "2026-01-15T20:15:16.918Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c3/ae/0deefbc65ca74b0ab1fd3917f94dc3b398233346a74b8bbb0a916a1a6bf6/ruff-0.14.13-py3-none-linux_armv6l.whl", hash = "sha256:76f62c62cd37c276cb03a275b198c7c15bd1d60c989f944db08a8c1c2dbec18b", size = 13062418, upload-time = "2026-01-15T20:14:50.779Z" }, + { url = "https://files.pythonhosted.org/packages/47/df/5916604faa530a97a3c154c62a81cb6b735c0cb05d1e26d5ad0f0c8ac48a/ruff-0.14.13-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:914a8023ece0528d5cc33f5a684f5f38199bbb566a04815c2c211d8f40b5d0ed", size = 13442344, upload-time = "2026-01-15T20:15:07.94Z" }, + { url = "https://files.pythonhosted.org/packages/4c/f3/e0e694dd69163c3a1671e102aa574a50357536f18a33375050334d5cd517/ruff-0.14.13-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d24899478c35ebfa730597a4a775d430ad0d5631b8647a3ab368c29b7e7bd063", size = 12354720, upload-time = "2026-01-15T20:15:09.854Z" }, + { url = "https://files.pythonhosted.org/packages/c3/e8/67f5fcbbaee25e8fc3b56cc33e9892eca7ffe09f773c8e5907757a7e3bdb/ruff-0.14.13-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9aaf3870f14d925bbaf18b8a2347ee0ae7d95a2e490e4d4aea6813ed15ebc80e", size = 12774493, upload-time = "2026-01-15T20:15:20.908Z" }, + { url = "https://files.pythonhosted.org/packages/6b/ce/d2e9cb510870b52a9565d885c0d7668cc050e30fa2c8ac3fb1fda15c083d/ruff-0.14.13-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac5b7f63dd3b27cc811850f5ffd8fff845b00ad70e60b043aabf8d6ecc304e09", size = 12815174, upload-time = "2026-01-15T20:15:05.74Z" }, + { url = "https://files.pythonhosted.org/packages/88/00/c38e5da58beebcf4fa32d0ddd993b63dfacefd02ab7922614231330845bf/ruff-0.14.13-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:78d2b1097750d90ba82ce4ba676e85230a0ed694178ca5e61aa9b459970b3eb9", size = 13680909, upload-time = "2026-01-15T20:15:14.537Z" }, + { url = "https://files.pythonhosted.org/packages/61/61/cd37c9dd5bd0a3099ba79b2a5899ad417d8f3b04038810b0501a80814fd7/ruff-0.14.13-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:7d0bf87705acbbcb8d4c24b2d77fbb73d40210a95c3903b443cd9e30824a5032", size = 15144215, upload-time = "2026-01-15T20:15:22.886Z" }, + { url = "https://files.pythonhosted.org/packages/56/8a/85502d7edbf98c2df7b8876f316c0157359165e16cdf98507c65c8d07d3d/ruff-0.14.13-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a3eb5da8e2c9e9f13431032fdcbe7681de9ceda5835efee3269417c13f1fed5c", size = 14706067, upload-time = "2026-01-15T20:14:48.271Z" }, + { url = "https://files.pythonhosted.org/packages/7e/2f/de0df127feb2ee8c1e54354dc1179b4a23798f0866019528c938ba439aca/ruff-0.14.13-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:642442b42957093811cd8d2140dfadd19c7417030a7a68cf8d51fcdd5f217427", size = 14133916, upload-time = "2026-01-15T20:14:57.357Z" }, + { url = "https://files.pythonhosted.org/packages/0d/77/9b99686bb9fe07a757c82f6f95e555c7a47801a9305576a9c67e0a31d280/ruff-0.14.13-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4acdf009f32b46f6e8864af19cbf6841eaaed8638e65c8dac845aea0d703c841", size = 13859207, upload-time = "2026-01-15T20:14:55.111Z" }, + { url = "https://files.pythonhosted.org/packages/7d/46/2bdcb34a87a179a4d23022d818c1c236cb40e477faf0d7c9afb6813e5876/ruff-0.14.13-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:591a7f68860ea4e003917d19b5c4f5ac39ff558f162dc753a2c5de897fd5502c", size = 14043686, upload-time = "2026-01-15T20:14:52.841Z" }, + { url = "https://files.pythonhosted.org/packages/1a/a9/5c6a4f56a0512c691cf143371bcf60505ed0f0860f24a85da8bd123b2bf1/ruff-0.14.13-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:774c77e841cc6e046fc3e91623ce0903d1cd07e3a36b1a9fe79b81dab3de506b", size = 12663837, upload-time = "2026-01-15T20:15:18.921Z" }, + { url = "https://files.pythonhosted.org/packages/fe/bb/b920016ece7651fa7fcd335d9d199306665486694d4361547ccb19394c44/ruff-0.14.13-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:61f4e40077a1248436772bb6512db5fc4457fe4c49e7a94ea7c5088655dd21ae", size = 12805867, upload-time = "2026-01-15T20:14:59.272Z" }, + { url = "https://files.pythonhosted.org/packages/7d/b3/0bd909851e5696cd21e32a8fc25727e5f58f1934b3596975503e6e85415c/ruff-0.14.13-py3-none-musllinux_1_2_i686.whl", hash = "sha256:6d02f1428357fae9e98ac7aa94b7e966fd24151088510d32cf6f902d6c09235e", size = 13208528, upload-time = "2026-01-15T20:15:03.732Z" }, + { url = "https://files.pythonhosted.org/packages/3b/3b/e2d94cb613f6bbd5155a75cbe072813756363eba46a3f2177a1fcd0cd670/ruff-0.14.13-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e399341472ce15237be0c0ae5fbceca4b04cd9bebab1a2b2c979e015455d8f0c", size = 13929242, upload-time = "2026-01-15T20:15:11.918Z" }, + { url = "https://files.pythonhosted.org/packages/6a/c5/abd840d4132fd51a12f594934af5eba1d5d27298a6f5b5d6c3be45301caf/ruff-0.14.13-py3-none-win32.whl", hash = "sha256:ef720f529aec113968b45dfdb838ac8934e519711da53a0456038a0efecbd680", size = 12919024, upload-time = "2026-01-15T20:14:43.647Z" }, + { url = "https://files.pythonhosted.org/packages/c2/55/6384b0b8ce731b6e2ade2b5449bf07c0e4c31e8a2e68ea65b3bafadcecc5/ruff-0.14.13-py3-none-win_amd64.whl", hash = "sha256:6070bd026e409734b9257e03e3ef18c6e1a216f0435c6751d7a8ec69cb59abef", size = 14097887, upload-time = "2026-01-15T20:15:01.48Z" }, + { url = "https://files.pythonhosted.org/packages/4d/e1/7348090988095e4e39560cfc2f7555b1b2a7357deba19167b600fdf5215d/ruff-0.14.13-py3-none-win_arm64.whl", hash = "sha256:7ab819e14f1ad9fe39f246cfcc435880ef7a9390d81a2b6ac7e01039083dd247", size = 13080224, upload-time = "2026-01-15T20:14:45.853Z" }, +] + +[[package]] +name = "snowballstemmer" +version = "3.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/75/a7/9810d872919697c9d01295633f5d574fb416d47e535f258272ca1f01f447/snowballstemmer-3.0.1.tar.gz", hash = "sha256:6d5eeeec8e9f84d4d56b847692bacf79bc2c8e90c7f80ca4444ff8b6f2e52895", size = 105575, upload-time = "2025-05-09T16:34:51.843Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c8/78/3565d011c61f5a43488987ee32b6f3f656e7f107ac2782dd57bdd7d91d9a/snowballstemmer-3.0.1-py3-none-any.whl", hash = "sha256:6cd7b3897da8d6c9ffb968a6781fa6532dce9c3618a4b127d920dab764a19064", size = 103274, upload-time = "2025-05-09T16:34:50.371Z" }, +] + +[[package]] +name = "sphinx" +version = "7.4.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "alabaster", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "babel", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "docutils", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "imagesize", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "jinja2", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "packaging", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pygments", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "requests", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "snowballstemmer", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "sphinxcontrib-applehelp", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "sphinxcontrib-devhelp", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "sphinxcontrib-htmlhelp", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "sphinxcontrib-jsmath", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "sphinxcontrib-qthelp", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "sphinxcontrib-serializinghtml", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "tomli", marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform == 'win32')" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5b/be/50e50cb4f2eff47df05673d361095cafd95521d2a22521b920c67a372dcb/sphinx-7.4.7.tar.gz", hash = "sha256:242f92a7ea7e6c5b406fdc2615413890ba9f699114a9c09192d7dfead2ee9cfe", size = 8067911, upload-time = "2024-07-20T14:46:56.059Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0d/ef/153f6803c5d5f8917dbb7f7fcf6d34a871ede3296fa89c2c703f5f8a6c8e/sphinx-7.4.7-py3-none-any.whl", hash = "sha256:c2419e2135d11f1951cd994d6eb18a1835bd8fdd8429f9ca375dc1f3281bd239", size = 3401624, upload-time = "2024-07-20T14:46:52.142Z" }, +] + +[[package]] +name = "sphinx-autodoc-typehints" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "sphinx", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/74/cd/03e7b917230dc057922130a79ba0240df1693bfd76727ea33fae84b39138/sphinx_autodoc_typehints-2.3.0.tar.gz", hash = "sha256:535c78ed2d6a1bad393ba9f3dfa2602cf424e2631ee207263e07874c38fde084", size = 40709, upload-time = "2024-08-29T16:25:48.343Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/f3/e0a4ce49da4b6f4e4ce84b3c39a0677831884cb9d8a87ccbf1e9e56e53ac/sphinx_autodoc_typehints-2.3.0-py3-none-any.whl", hash = "sha256:3098e2c6d0ba99eacd013eb06861acc9b51c6e595be86ab05c08ee5506ac0c67", size = 19836, upload-time = "2024-08-29T16:25:46.707Z" }, +] + +[[package]] +name = "sphinxcontrib-applehelp" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ba/6e/b837e84a1a704953c62ef8776d45c3e8d759876b4a84fe14eba2859106fe/sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1", size = 20053, upload-time = "2024-07-29T01:09:00.465Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5d/85/9ebeae2f76e9e77b952f4b274c27238156eae7979c5421fba91a28f4970d/sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5", size = 119300, upload-time = "2024-07-29T01:08:58.99Z" }, +] + +[[package]] +name = "sphinxcontrib-devhelp" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f6/d2/5beee64d3e4e747f316bae86b55943f51e82bb86ecd325883ef65741e7da/sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad", size = 12967, upload-time = "2024-07-29T01:09:23.417Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/35/7a/987e583882f985fe4d7323774889ec58049171828b58c2217e7f79cdf44e/sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2", size = 82530, upload-time = "2024-07-29T01:09:21.945Z" }, +] + +[[package]] +name = "sphinxcontrib-htmlhelp" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/93/983afd9aa001e5201eab16b5a444ed5b9b0a7a010541e0ddfbbfd0b2470c/sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9", size = 22617, upload-time = "2024-07-29T01:09:37.889Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0a/7b/18a8c0bcec9182c05a0b3ec2a776bba4ead82750a55ff798e8d406dae604/sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8", size = 98705, upload-time = "2024-07-29T01:09:36.407Z" }, +] + +[[package]] +name = "sphinxcontrib-jsmath" +version = "1.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/e8/9ed3830aeed71f17c026a07a5097edcf44b692850ef215b161b8ad875729/sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8", size = 5787, upload-time = "2019-01-21T16:10:16.347Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/42/4c8646762ee83602e3fb3fbe774c2fac12f317deb0b5dbeeedd2d3ba4b77/sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178", size = 5071, upload-time = "2019-01-21T16:10:14.333Z" }, +] + +[[package]] +name = "sphinxcontrib-qthelp" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/68/bc/9104308fc285eb3e0b31b67688235db556cd5b0ef31d96f30e45f2e51cae/sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab", size = 17165, upload-time = "2024-07-29T01:09:56.435Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/27/83/859ecdd180cacc13b1f7e857abf8582a64552ea7a061057a6c716e790fce/sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb", size = 88743, upload-time = "2024-07-29T01:09:54.885Z" }, +] + +[[package]] +name = "sphinxcontrib-serializinghtml" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3b/44/6716b257b0aa6bfd51a1b31665d1c205fb12cb5ad56de752dfa15657de2f/sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d", size = 16080, upload-time = "2024-07-29T01:10:09.332Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/52/a7/d2782e4e3f77c8450f727ba74a8f12756d5ba823d81b941f1b04da9d033a/sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331", size = 92072, upload-time = "2024-07-29T01:10:08.203Z" }, +] + +[[package]] +name = "stevedore" +version = "5.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/96/5b/496f8abebd10c3301129abba7ddafd46c71d799a70c44ab080323987c4c9/stevedore-5.6.0.tar.gz", hash = "sha256:f22d15c6ead40c5bbfa9ca54aa7e7b4a07d59b36ae03ed12ced1a54cf0b51945", size = 516074, upload-time = "2025-11-20T10:06:07.264Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f4/40/8561ce06dc46fd17242c7724ab25b257a2ac1b35f4ebf551b40ce6105cfa/stevedore-5.6.0-py3-none-any.whl", hash = "sha256:4a36dccefd7aeea0c70135526cecb7766c4c84c473b1af68db23d541b6dc1820", size = 54428, upload-time = "2025-11-20T10:06:05.946Z" }, +] + +[[package]] +name = "tomli" +version = "2.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/82/30/31573e9457673ab10aa432461bee537ce6cef177667deca369efb79df071/tomli-2.4.0.tar.gz", hash = "sha256:aa89c3f6c277dd275d8e243ad24f3b5e701491a860d5121f2cdd399fbb31fc9c", size = 17477, upload-time = "2026-01-11T11:22:38.165Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3c/d9/3dc2289e1f3b32eb19b9785b6a006b28ee99acb37d1d47f78d4c10e28bf8/tomli-2.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b5ef256a3fd497d4973c11bf142e9ed78b150d36f5773f1ca6088c230ffc5867", size = 153663, upload-time = "2026-01-11T11:21:45.27Z" }, + { url = "https://files.pythonhosted.org/packages/51/32/ef9f6845e6b9ca392cd3f64f9ec185cc6f09f0a2df3db08cbe8809d1d435/tomli-2.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5572e41282d5268eb09a697c89a7bee84fae66511f87533a6f88bd2f7b652da9", size = 148469, upload-time = "2026-01-11T11:21:46.873Z" }, + { url = "https://files.pythonhosted.org/packages/d6/c2/506e44cce89a8b1b1e047d64bd495c22c9f71f21e05f380f1a950dd9c217/tomli-2.4.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:551e321c6ba03b55676970b47cb1b73f14a0a4dce6a3e1a9458fd6d921d72e95", size = 236039, upload-time = "2026-01-11T11:21:48.503Z" }, + { url = "https://files.pythonhosted.org/packages/b3/40/e1b65986dbc861b7e986e8ec394598187fa8aee85b1650b01dd925ca0be8/tomli-2.4.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5e3f639a7a8f10069d0e15408c0b96a2a828cfdec6fca05296ebcdcc28ca7c76", size = 243007, upload-time = "2026-01-11T11:21:49.456Z" }, + { url = "https://files.pythonhosted.org/packages/9c/6f/6e39ce66b58a5b7ae572a0f4352ff40c71e8573633deda43f6a379d56b3e/tomli-2.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1b168f2731796b045128c45982d3a4874057626da0e2ef1fdd722848b741361d", size = 240875, upload-time = "2026-01-11T11:21:50.755Z" }, + { url = "https://files.pythonhosted.org/packages/aa/ad/cb089cb190487caa80204d503c7fd0f4d443f90b95cf4ef5cf5aa0f439b0/tomli-2.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:133e93646ec4300d651839d382d63edff11d8978be23da4cc106f5a18b7d0576", size = 246271, upload-time = "2026-01-11T11:21:51.81Z" }, + { url = "https://files.pythonhosted.org/packages/0b/63/69125220e47fd7a3a27fd0de0c6398c89432fec41bc739823bcc66506af6/tomli-2.4.0-cp311-cp311-win32.whl", hash = "sha256:b6c78bdf37764092d369722d9946cb65b8767bfa4110f902a1b2542d8d173c8a", size = 96770, upload-time = "2026-01-11T11:21:52.647Z" }, + { url = "https://files.pythonhosted.org/packages/1e/0d/a22bb6c83f83386b0008425a6cd1fa1c14b5f3dd4bad05e98cf3dbbf4a64/tomli-2.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:d3d1654e11d724760cdb37a3d7691f0be9db5fbdaef59c9f532aabf87006dbaa", size = 107626, upload-time = "2026-01-11T11:21:53.459Z" }, + { url = "https://files.pythonhosted.org/packages/2f/6d/77be674a3485e75cacbf2ddba2b146911477bd887dda9d8c9dfb2f15e871/tomli-2.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:cae9c19ed12d4e8f3ebf46d1a75090e4c0dc16271c5bce1c833ac168f08fb614", size = 94842, upload-time = "2026-01-11T11:21:54.831Z" }, + { url = "https://files.pythonhosted.org/packages/3c/43/7389a1869f2f26dba52404e1ef13b4784b6b37dac93bac53457e3ff24ca3/tomli-2.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:920b1de295e72887bafa3ad9f7a792f811847d57ea6b1215154030cf131f16b1", size = 154894, upload-time = "2026-01-11T11:21:56.07Z" }, + { url = "https://files.pythonhosted.org/packages/e9/05/2f9bf110b5294132b2edf13fe6ca6ae456204f3d749f623307cbb7a946f2/tomli-2.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7d6d9a4aee98fac3eab4952ad1d73aee87359452d1c086b5ceb43ed02ddb16b8", size = 149053, upload-time = "2026-01-11T11:21:57.467Z" }, + { url = "https://files.pythonhosted.org/packages/e8/41/1eda3ca1abc6f6154a8db4d714a4d35c4ad90adc0bcf700657291593fbf3/tomli-2.4.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:36b9d05b51e65b254ea6c2585b59d2c4cb91c8a3d91d0ed0f17591a29aaea54a", size = 243481, upload-time = "2026-01-11T11:21:58.661Z" }, + { url = "https://files.pythonhosted.org/packages/d2/6d/02ff5ab6c8868b41e7d4b987ce2b5f6a51d3335a70aa144edd999e055a01/tomli-2.4.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1c8a885b370751837c029ef9bc014f27d80840e48bac415f3412e6593bbc18c1", size = 251720, upload-time = "2026-01-11T11:22:00.178Z" }, + { url = "https://files.pythonhosted.org/packages/7b/57/0405c59a909c45d5b6f146107c6d997825aa87568b042042f7a9c0afed34/tomli-2.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8768715ffc41f0008abe25d808c20c3d990f42b6e2e58305d5da280ae7d1fa3b", size = 247014, upload-time = "2026-01-11T11:22:01.238Z" }, + { url = "https://files.pythonhosted.org/packages/2c/0e/2e37568edd944b4165735687cbaf2fe3648129e440c26d02223672ee0630/tomli-2.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b438885858efd5be02a9a133caf5812b8776ee0c969fea02c45e8e3f296ba51", size = 251820, upload-time = "2026-01-11T11:22:02.727Z" }, + { url = "https://files.pythonhosted.org/packages/5a/1c/ee3b707fdac82aeeb92d1a113f803cf6d0f37bdca0849cb489553e1f417a/tomli-2.4.0-cp312-cp312-win32.whl", hash = "sha256:0408e3de5ec77cc7f81960c362543cbbd91ef883e3138e81b729fc3eea5b9729", size = 97712, upload-time = "2026-01-11T11:22:03.777Z" }, + { url = "https://files.pythonhosted.org/packages/69/13/c07a9177d0b3bab7913299b9278845fc6eaaca14a02667c6be0b0a2270c8/tomli-2.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:685306e2cc7da35be4ee914fd34ab801a6acacb061b6a7abca922aaf9ad368da", size = 108296, upload-time = "2026-01-11T11:22:04.86Z" }, + { url = "https://files.pythonhosted.org/packages/18/27/e267a60bbeeee343bcc279bb9e8fbed0cbe224bc7b2a3dc2975f22809a09/tomli-2.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:5aa48d7c2356055feef06a43611fc401a07337d5b006be13a30f6c58f869e3c3", size = 94553, upload-time = "2026-01-11T11:22:05.854Z" }, + { url = "https://files.pythonhosted.org/packages/34/91/7f65f9809f2936e1f4ce6268ae1903074563603b2a2bd969ebbda802744f/tomli-2.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:84d081fbc252d1b6a982e1870660e7330fb8f90f676f6e78b052ad4e64714bf0", size = 154915, upload-time = "2026-01-11T11:22:06.703Z" }, + { url = "https://files.pythonhosted.org/packages/20/aa/64dd73a5a849c2e8f216b755599c511badde80e91e9bc2271baa7b2cdbb1/tomli-2.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9a08144fa4cba33db5255f9b74f0b89888622109bd2776148f2597447f92a94e", size = 149038, upload-time = "2026-01-11T11:22:07.56Z" }, + { url = "https://files.pythonhosted.org/packages/9e/8a/6d38870bd3d52c8d1505ce054469a73f73a0fe62c0eaf5dddf61447e32fa/tomli-2.4.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c73add4bb52a206fd0c0723432db123c0c75c280cbd67174dd9d2db228ebb1b4", size = 242245, upload-time = "2026-01-11T11:22:08.344Z" }, + { url = "https://files.pythonhosted.org/packages/59/bb/8002fadefb64ab2669e5b977df3f5e444febea60e717e755b38bb7c41029/tomli-2.4.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1fb2945cbe303b1419e2706e711b7113da57b7db31ee378d08712d678a34e51e", size = 250335, upload-time = "2026-01-11T11:22:09.951Z" }, + { url = "https://files.pythonhosted.org/packages/a5/3d/4cdb6f791682b2ea916af2de96121b3cb1284d7c203d97d92d6003e91c8d/tomli-2.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bbb1b10aa643d973366dc2cb1ad94f99c1726a02343d43cbc011edbfac579e7c", size = 245962, upload-time = "2026-01-11T11:22:11.27Z" }, + { url = "https://files.pythonhosted.org/packages/f2/4a/5f25789f9a460bd858ba9756ff52d0830d825b458e13f754952dd15fb7bb/tomli-2.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4cbcb367d44a1f0c2be408758b43e1ffb5308abe0ea222897d6bfc8e8281ef2f", size = 250396, upload-time = "2026-01-11T11:22:12.325Z" }, + { url = "https://files.pythonhosted.org/packages/aa/2f/b73a36fea58dfa08e8b3a268750e6853a6aac2a349241a905ebd86f3047a/tomli-2.4.0-cp313-cp313-win32.whl", hash = "sha256:7d49c66a7d5e56ac959cb6fc583aff0651094ec071ba9ad43df785abc2320d86", size = 97530, upload-time = "2026-01-11T11:22:13.865Z" }, + { url = "https://files.pythonhosted.org/packages/3b/af/ca18c134b5d75de7e8dc551c5234eaba2e8e951f6b30139599b53de9c187/tomli-2.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:3cf226acb51d8f1c394c1b310e0e0e61fecdd7adcb78d01e294ac297dd2e7f87", size = 108227, upload-time = "2026-01-11T11:22:15.224Z" }, + { url = "https://files.pythonhosted.org/packages/22/c3/b386b832f209fee8073c8138ec50f27b4460db2fdae9ffe022df89a57f9b/tomli-2.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:d20b797a5c1ad80c516e41bc1fb0443ddb5006e9aaa7bda2d71978346aeb9132", size = 94748, upload-time = "2026-01-11T11:22:16.009Z" }, + { url = "https://files.pythonhosted.org/packages/f3/c4/84047a97eb1004418bc10bdbcfebda209fca6338002eba2dc27cc6d13563/tomli-2.4.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:26ab906a1eb794cd4e103691daa23d95c6919cc2fa9160000ac02370cc9dd3f6", size = 154725, upload-time = "2026-01-11T11:22:17.269Z" }, + { url = "https://files.pythonhosted.org/packages/a8/5d/d39038e646060b9d76274078cddf146ced86dc2b9e8bbf737ad5983609a0/tomli-2.4.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:20cedb4ee43278bc4f2fee6cb50daec836959aadaf948db5172e776dd3d993fc", size = 148901, upload-time = "2026-01-11T11:22:18.287Z" }, + { url = "https://files.pythonhosted.org/packages/73/e5/383be1724cb30f4ce44983d249645684a48c435e1cd4f8b5cded8a816d3c/tomli-2.4.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:39b0b5d1b6dd03684b3fb276407ebed7090bbec989fa55838c98560c01113b66", size = 243375, upload-time = "2026-01-11T11:22:19.154Z" }, + { url = "https://files.pythonhosted.org/packages/31/f0/bea80c17971c8d16d3cc109dc3585b0f2ce1036b5f4a8a183789023574f2/tomli-2.4.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a26d7ff68dfdb9f87a016ecfd1e1c2bacbe3108f4e0f8bcd2228ef9a766c787d", size = 250639, upload-time = "2026-01-11T11:22:20.168Z" }, + { url = "https://files.pythonhosted.org/packages/2c/8f/2853c36abbb7608e3f945d8a74e32ed3a74ee3a1f468f1ffc7d1cb3abba6/tomli-2.4.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:20ffd184fb1df76a66e34bd1b36b4a4641bd2b82954befa32fe8163e79f1a702", size = 246897, upload-time = "2026-01-11T11:22:21.544Z" }, + { url = "https://files.pythonhosted.org/packages/49/f0/6c05e3196ed5337b9fe7ea003e95fd3819a840b7a0f2bf5a408ef1dad8ed/tomli-2.4.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:75c2f8bbddf170e8effc98f5e9084a8751f8174ea6ccf4fca5398436e0320bc8", size = 254697, upload-time = "2026-01-11T11:22:23.058Z" }, + { url = "https://files.pythonhosted.org/packages/f3/f5/2922ef29c9f2951883525def7429967fc4d8208494e5ab524234f06b688b/tomli-2.4.0-cp314-cp314-win32.whl", hash = "sha256:31d556d079d72db7c584c0627ff3a24c5d3fb4f730221d3444f3efb1b2514776", size = 98567, upload-time = "2026-01-11T11:22:24.033Z" }, + { url = "https://files.pythonhosted.org/packages/7b/31/22b52e2e06dd2a5fdbc3ee73226d763b184ff21fc24e20316a44ccc4d96b/tomli-2.4.0-cp314-cp314-win_amd64.whl", hash = "sha256:43e685b9b2341681907759cf3a04e14d7104b3580f808cfde1dfdb60ada85475", size = 108556, upload-time = "2026-01-11T11:22:25.378Z" }, + { url = "https://files.pythonhosted.org/packages/48/3d/5058dff3255a3d01b705413f64f4306a141a8fd7a251e5a495e3f192a998/tomli-2.4.0-cp314-cp314-win_arm64.whl", hash = "sha256:3d895d56bd3f82ddd6faaff993c275efc2ff38e52322ea264122d72729dca2b2", size = 96014, upload-time = "2026-01-11T11:22:26.138Z" }, + { url = "https://files.pythonhosted.org/packages/b8/4e/75dab8586e268424202d3a1997ef6014919c941b50642a1682df43204c22/tomli-2.4.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:5b5807f3999fb66776dbce568cc9a828544244a8eb84b84b9bafc080c99597b9", size = 163339, upload-time = "2026-01-11T11:22:27.143Z" }, + { url = "https://files.pythonhosted.org/packages/06/e3/b904d9ab1016829a776d97f163f183a48be6a4deb87304d1e0116a349519/tomli-2.4.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c084ad935abe686bd9c898e62a02a19abfc9760b5a79bc29644463eaf2840cb0", size = 159490, upload-time = "2026-01-11T11:22:28.399Z" }, + { url = "https://files.pythonhosted.org/packages/e3/5a/fc3622c8b1ad823e8ea98a35e3c632ee316d48f66f80f9708ceb4f2a0322/tomli-2.4.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0f2e3955efea4d1cfbcb87bc321e00dc08d2bcb737fd1d5e398af111d86db5df", size = 269398, upload-time = "2026-01-11T11:22:29.345Z" }, + { url = "https://files.pythonhosted.org/packages/fd/33/62bd6152c8bdd4c305ad9faca48f51d3acb2df1f8791b1477d46ff86e7f8/tomli-2.4.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0e0fe8a0b8312acf3a88077a0802565cb09ee34107813bba1c7cd591fa6cfc8d", size = 276515, upload-time = "2026-01-11T11:22:30.327Z" }, + { url = "https://files.pythonhosted.org/packages/4b/ff/ae53619499f5235ee4211e62a8d7982ba9e439a0fb4f2f351a93d67c1dd2/tomli-2.4.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:413540dce94673591859c4c6f794dfeaa845e98bf35d72ed59636f869ef9f86f", size = 273806, upload-time = "2026-01-11T11:22:32.56Z" }, + { url = "https://files.pythonhosted.org/packages/47/71/cbca7787fa68d4d0a9f7072821980b39fbb1b6faeb5f5cf02f4a5559fa28/tomli-2.4.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:0dc56fef0e2c1c470aeac5b6ca8cc7b640bb93e92d9803ddaf9ea03e198f5b0b", size = 281340, upload-time = "2026-01-11T11:22:33.505Z" }, + { url = "https://files.pythonhosted.org/packages/f5/00/d595c120963ad42474cf6ee7771ad0d0e8a49d0f01e29576ee9195d9ecdf/tomli-2.4.0-cp314-cp314t-win32.whl", hash = "sha256:d878f2a6707cc9d53a1be1414bbb419e629c3d6e67f69230217bb663e76b5087", size = 108106, upload-time = "2026-01-11T11:22:34.451Z" }, + { url = "https://files.pythonhosted.org/packages/de/69/9aa0c6a505c2f80e519b43764f8b4ba93b5a0bbd2d9a9de6e2b24271b9a5/tomli-2.4.0-cp314-cp314t-win_amd64.whl", hash = "sha256:2add28aacc7425117ff6364fe9e06a183bb0251b03f986df0e78e974047571fd", size = 120504, upload-time = "2026-01-11T11:22:35.764Z" }, + { url = "https://files.pythonhosted.org/packages/b3/9f/f1668c281c58cfae01482f7114a4b88d345e4c140386241a1a24dcc9e7bc/tomli-2.4.0-cp314-cp314t-win_arm64.whl", hash = "sha256:2b1e3b80e1d5e52e40e9b924ec43d81570f0e7d09d11081b797bc4692765a3d4", size = 99561, upload-time = "2026-01-11T11:22:36.624Z" }, + { url = "https://files.pythonhosted.org/packages/23/d1/136eb2cb77520a31e1f64cbae9d33ec6df0d78bdf4160398e86eec8a8754/tomli-2.4.0-py3-none-any.whl", hash = "sha256:1f776e7d669ebceb01dee46484485f43a4048746235e683bcdffacdf1fb4785a", size = 14477, upload-time = "2026-01-11T11:22:37.446Z" }, +] + +[[package]] +name = "tomli-w" +version = "1.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/19/75/241269d1da26b624c0d5e110e8149093c759b7a286138f4efd61a60e75fe/tomli_w-1.2.0.tar.gz", hash = "sha256:2dd14fac5a47c27be9cd4c976af5a12d87fb1f0b4512f81d69cce3b35ae25021", size = 7184, upload-time = "2025-01-15T12:07:24.262Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/18/c86eb8e0202e32dd3df50d43d7ff9854f8e0603945ff398974c1d91ac1ef/tomli_w-1.2.0-py3-none-any.whl", hash = "sha256:188306098d013b691fcadc011abd66727d3c414c571bb01b1a174ba8c983cf90", size = 6675, upload-time = "2025-01-15T12:07:22.074Z" }, +] + +[[package]] +name = "types-requests" +version = "2.32.4.20260107" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "urllib3", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0f/f3/a0663907082280664d745929205a89d41dffb29e89a50f753af7d57d0a96/types_requests-2.32.4.20260107.tar.gz", hash = "sha256:018a11ac158f801bfa84857ddec1650750e393df8a004a8a9ae2a9bec6fcb24f", size = 23165, upload-time = "2026-01-07T03:20:54.091Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1c/12/709ea261f2bf91ef0a26a9eed20f2623227a8ed85610c1e54c5805692ecb/types_requests-2.32.4.20260107-py3-none-any.whl", hash = "sha256:b703fe72f8ce5b31ef031264fe9395cac8f46a04661a79f7ed31a80fb308730d", size = 20676, upload-time = "2026-01-07T03:20:52.929Z" }, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, +] + +[[package]] +name = "typing-inspection" +version = "0.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" }, +] + +[[package]] +name = "urllib3" +version = "2.6.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c7/24/5f1b3bdffd70275f6661c76461e25f024d5a38a46f04aaca912426a2b1d3/urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed", size = 435556, upload-time = "2026-01-07T16:24:43.925Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4", size = 131584, upload-time = "2026-01-07T16:24:42.685Z" }, +] + +[[package]] +name = "uv" +version = "0.9.26" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ff/6a/ef4ea19097ecdfd7df6e608f93874536af045c68fd70aa628c667815c458/uv-0.9.26.tar.gz", hash = "sha256:8b7017a01cc48847a7ae26733383a2456dd060fc50d21d58de5ee14f6b6984d7", size = 3790483, upload-time = "2026-01-15T20:51:33.582Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fe/e1/5c0b17833d5e3b51a897957348ff8d937a3cdfc5eea5c4a7075d8d7b9870/uv-0.9.26-py3-none-linux_armv6l.whl", hash = "sha256:7dba609e32b7bd13ef81788d580970c6ff3a8874d942755b442cffa8f25dba57", size = 22638031, upload-time = "2026-01-15T20:51:44.187Z" }, + { url = "https://files.pythonhosted.org/packages/a5/8b/68ac5825a615a8697e324f52ac0b92feb47a0ec36a63759c5f2931f0c3a0/uv-0.9.26-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b815e3b26eeed00e00f831343daba7a9d99c1506883c189453bb4d215f54faac", size = 21507805, upload-time = "2026-01-15T20:50:42.574Z" }, + { url = "https://files.pythonhosted.org/packages/0d/a2/664a338aefe009f6e38e47455ee2f64a21da7ad431dbcaf8b45d8b1a2b7a/uv-0.9.26-py3-none-macosx_11_0_arm64.whl", hash = "sha256:1b012e6c4dfe767f818cbb6f47d02c207c9b0c82fee69a5de6d26ffb26a3ef3c", size = 20249791, upload-time = "2026-01-15T20:50:49.835Z" }, + { url = "https://files.pythonhosted.org/packages/ba/3d/b8186a7dec1346ca4630c674b760517d28bffa813a01965f4b57596bacf3/uv-0.9.26-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.musllinux_1_1_aarch64.whl", hash = "sha256:ea296b700d7c4c27acdfd23ffaef2b0ecdd0aa1b58d942c62ee87df3b30f06ac", size = 22039108, upload-time = "2026-01-15T20:51:00.675Z" }, + { url = "https://files.pythonhosted.org/packages/aa/a9/687fd587e7a3c2c826afe72214fb24b7f07b0d8b0b0300e6a53b554180ea/uv-0.9.26-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.musllinux_1_1_armv7l.whl", hash = "sha256:1ba860d2988efc27e9c19f8537a2f9fa499a8b7ebe4afbe2d3d323d72f9aee61", size = 22174763, upload-time = "2026-01-15T20:50:46.471Z" }, + { url = "https://files.pythonhosted.org/packages/38/69/7fa03ee7d59e562fca1426436f15a8c107447d41b34e0899e25ee69abfad/uv-0.9.26-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8610bdfc282a681a0a40b90495a478599aa3484c12503ef79ef42cd271fd80fe", size = 22189861, upload-time = "2026-01-15T20:51:15.618Z" }, + { url = "https://files.pythonhosted.org/packages/10/2d/4be446a2ec09f3c428632b00a138750af47c76b0b9f987e9a5b52fef0405/uv-0.9.26-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c4bf700bd071bd595084b9ee0a8d77c6a0a10ca3773d3771346a2599f306bd9c", size = 23005589, upload-time = "2026-01-15T20:50:57.185Z" }, + { url = "https://files.pythonhosted.org/packages/c3/16/860990b812136695a63a8da9fb5f819c3cf18ea37dcf5852e0e1b795ca0d/uv-0.9.26-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:89a7beea1c692f76a6f8da13beff3cbb43f7123609e48e03517cc0db5c5de87c", size = 24713505, upload-time = "2026-01-15T20:51:04.366Z" }, + { url = "https://files.pythonhosted.org/packages/01/43/5d7f360d551e62d8f8bf6624b8fca9895cea49ebe5fce8891232d7ed2321/uv-0.9.26-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:182f5c086c7d03ad447e522b70fa29a0302a70bcfefad4b8cd08496828a0e179", size = 24342500, upload-time = "2026-01-15T20:51:47.863Z" }, + { url = "https://files.pythonhosted.org/packages/9b/9c/2bae010a189e7d8e5dc555edcfd053b11ce96fad2301b919ba0d9dd23659/uv-0.9.26-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d8c62a501f13425b4b0ce1dd4c6b82f3ce5a5179e2549c55f4bb27cc0eb8ef8", size = 23222578, upload-time = "2026-01-15T20:51:36.85Z" }, + { url = "https://files.pythonhosted.org/packages/38/16/a07593a040fe6403c36f3b0a99b309f295cbfe19a1074dbadb671d5d4ef7/uv-0.9.26-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7e89798bd3df7dcc4b2b4ac4e2fc11d6b3ff4fe7d764aa3012d664c635e2922", size = 23250201, upload-time = "2026-01-15T20:51:19.117Z" }, + { url = "https://files.pythonhosted.org/packages/23/a0/45893e15ad3ab842db27c1eb3b8605b9b4023baa5d414e67cfa559a0bff0/uv-0.9.26-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:60a66f1783ec4efc87b7e1f9bd66e8fd2de3e3b30d122b31cb1487f63a3ea8b7", size = 22229160, upload-time = "2026-01-15T20:51:22.931Z" }, + { url = "https://files.pythonhosted.org/packages/fc/c0/20a597a5c253702a223b5e745cf8c16cd5dd053080f896bb10717b3bedec/uv-0.9.26-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:63c6a1f1187facba1fb45a2fa45396980631a3427ac11b0e3d9aa3ebcf2c73cf", size = 23090730, upload-time = "2026-01-15T20:51:26.611Z" }, + { url = "https://files.pythonhosted.org/packages/40/c9/744537867d9ab593fea108638b57cca1165a0889cfd989981c942b6de9a5/uv-0.9.26-py3-none-musllinux_1_1_i686.whl", hash = "sha256:c6d8650fbc980ccb348b168266143a9bd4deebc86437537caaf8ff2a39b6ea50", size = 22436632, upload-time = "2026-01-15T20:51:12.045Z" }, + { url = "https://files.pythonhosted.org/packages/6b/e2/be683e30262f2cf02dcb41b6c32910a6939517d50ec45f502614d239feb7/uv-0.9.26-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:25278f9298aa4dade38241a93d036739b0c87278dcfad1ec1f57e803536bfc49", size = 23480064, upload-time = "2026-01-15T20:50:53.333Z" }, + { url = "https://files.pythonhosted.org/packages/50/3e/4a7e6bc5db2beac9c4966f212805f1903d37d233f2e160737f0b24780ada/uv-0.9.26-py3-none-win32.whl", hash = "sha256:10d075e0193e3a0e6c54f830731c4cb965d6f4e11956e84a7bed7ed61d42aa27", size = 21000052, upload-time = "2026-01-15T20:51:40.753Z" }, + { url = "https://files.pythonhosted.org/packages/07/5d/eb80c6eff2a9f7d5cf35ec84fda323b74aa0054145db28baf72d35a7a301/uv-0.9.26-py3-none-win_amd64.whl", hash = "sha256:0315fc321f5644b12118f9928086513363ed9b29d74d99f1539fda1b6b5478ab", size = 23684930, upload-time = "2026-01-15T20:51:08.448Z" }, + { url = "https://files.pythonhosted.org/packages/ed/9d/3b2631931649b1783f5024796ca8ad2b42a01a829b9ce1202d973cc7bce5/uv-0.9.26-py3-none-win_arm64.whl", hash = "sha256:344ff38749b6cd7b7dfdfb382536f168cafe917ae3a5aa78b7a63746ba2a905b", size = 22158123, upload-time = "2026-01-15T20:51:30.939Z" }, +] + +[[package]] +name = "virtualenv" +version = "20.36.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "distlib", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "filelock", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "platformdirs", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "typing-extensions", marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform == 'win32')" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/aa/a3/4d310fa5f00863544e1d0f4de93bddec248499ccf97d4791bc3122c9d4f3/virtualenv-20.36.1.tar.gz", hash = "sha256:8befb5c81842c641f8ee658481e42641c68b5eab3521d8e092d18320902466ba", size = 6032239, upload-time = "2026-01-09T18:21:01.296Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6a/2a/dc2228b2888f51192c7dc766106cd475f1b768c10caaf9727659726f7391/virtualenv-20.36.1-py3-none-any.whl", hash = "sha256:575a8d6b124ef88f6f51d56d656132389f961062a9177016a50e4f507bbcc19f", size = 6008258, upload-time = "2026-01-09T18:20:59.425Z" }, +] From 311a7da4d3f36fd39fb478d760d9b9dabcb7c530 Mon Sep 17 00:00:00 2001 From: pmalarme Date: Wed, 18 Feb 2026 17:02:00 +0100 Subject: [PATCH 02/90] feat: add CODEOWNERS, enhance workflows, and update documentation --- .github/CODEOWNERS | 14 +++ .github/workflows/checks.yml | 4 - .github/workflows/docker.yml | 67 +++++++++++++ .github/workflows/monorepo-release.yml | 97 +++++++++++++++++++ .github/workflows/release.yml | 118 ++++++++++++++++++++--- DEVELOPMENT.md | 2 +- README.md | 114 ++++++++++++++-------- agents/agent1/README.md | 28 +++++- agents/agent1/pyproject.toml | 1 - pyproject.toml | 25 +++-- scripts/run_tasks_in_agents_if_exists.py | 36 ++++--- 11 files changed, 426 insertions(+), 80 deletions(-) create mode 100644 .github/CODEOWNERS create mode 100644 .github/workflows/docker.yml create mode 100644 .github/workflows/monorepo-release.yml diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 0000000..98cce8e --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1,14 @@ +# CODEOWNERS — uncomment and customize after creating a repo from this template. +# See: https://docs.github.com/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-code-owners +# +# Default owners for everything in the repo +# * @your-org/your-team +# +# Agent-specific ownership +# agents/agent1/ @your-org/agent1-team +# +# CI / workflow changes require admin review +# .github/ @your-org/platform-team +# +# Documentation +# docs/ @your-org/docs-team diff --git a/.github/workflows/checks.yml b/.github/workflows/checks.yml index 788fbce..3a393a0 100644 --- a/.github/workflows/checks.yml +++ b/.github/workflows/checks.yml @@ -35,7 +35,3 @@ jobs: - name: Run checks run: uv run poe check - - - name: Run changed-agents lint (fast path) - if: ${{ github.event_name == 'pull_request' }} - run: uv run python scripts/run_tasks_in_changed_agents.py lint diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml new file mode 100644 index 0000000..6603b8f --- /dev/null +++ b/.github/workflows/docker.yml @@ -0,0 +1,67 @@ +name: docker + +on: + workflow_dispatch: + pull_request: + paths: + - "agents/*/Dockerfile" + - "agents/*/src/**" + - "agents/*/pyproject.toml" + push: + branches: ["main"] + paths: + - "agents/*/Dockerfile" + - "agents/*/src/**" + - "agents/*/pyproject.toml" + +permissions: + contents: read + +jobs: + detect: + name: detect agents with Dockerfiles + runs-on: ubuntu-latest + + outputs: + agents: ${{ steps.find.outputs.agents }} + + steps: + - name: Checkout + uses: actions/checkout@v6 + with: + persist-credentials: false + + - name: Find agents with Dockerfiles + id: find + run: | + agents=$(find agents -maxdepth 2 -name Dockerfile -printf '%h\n' \ + | xargs -I{} basename {} \ + | jq -Rcn '[inputs]') + echo "agents=$agents" >> "$GITHUB_OUTPUT" + + build: + name: build ${{ matrix.agent }} + needs: detect + if: ${{ needs.detect.outputs.agents != '[]' }} + runs-on: ubuntu-latest + + strategy: + fail-fast: false + matrix: + agent: ${{ fromJson(needs.detect.outputs.agents) }} + + steps: + - name: Checkout + uses: actions/checkout@v6 + with: + persist-credentials: false + + - name: Build Docker image + run: | + docker build \ + -t "${{ matrix.agent }}:ci" \ + -f "agents/${{ matrix.agent }}/Dockerfile" \ + "agents/${{ matrix.agent }}" + + - name: Smoke test + run: docker run --rm "${{ matrix.agent }}:ci" --help || true diff --git a/.github/workflows/monorepo-release.yml b/.github/workflows/monorepo-release.yml new file mode 100644 index 0000000..1de50af --- /dev/null +++ b/.github/workflows/monorepo-release.yml @@ -0,0 +1,97 @@ +name: monorepo-release + +on: + workflow_dispatch: + push: + branches: ["main"] + paths: + - "pyproject.toml" + - "shared_tasks.toml" + - ".pre-commit-config.yaml" + - "scripts/**" + - ".github/**" + - "docs/source/**" + - "docs/manual/**" + - "DEVELOPMENT.md" + - "README.md" + - "CODING_STANDARDS.md" + - "CONTRIBUTING.md" + - "SECURITY.md" + - "CODE_OF_CONDUCT.md" + +permissions: + contents: write + +jobs: + release: + name: tag and release monorepo + runs-on: ubuntu-latest + + steps: + - name: Checkout + uses: actions/checkout@v6 + with: + fetch-depth: 0 + persist-credentials: true + + - name: Read monorepo version + id: version + run: | + VERSION=$(grep -m1 '^version' pyproject.toml \ + | sed 's/version *= *"\(.*\)"/\1/') + echo "version=${VERSION}" >> "$GITHUB_OUTPUT" + echo "tag=v${VERSION}" >> "$GITHUB_OUTPUT" + echo "Monorepo version: ${VERSION}" + + - name: Check if tag exists + id: check + run: | + TAG="${{ steps.version.outputs.tag }}" + if git rev-parse "refs/tags/${TAG}" >/dev/null 2>&1; then + echo "::notice::Tag ${TAG} already exists — skipping release." + echo "exists=true" >> "$GITHUB_OUTPUT" + else + echo "exists=false" >> "$GITHUB_OUTPUT" + fi + + - name: Create tag and release + if: steps.check.outputs.exists == 'false' + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + TAG="${{ steps.version.outputs.tag }}" + VERSION="${{ steps.version.outputs.version }}" + + git config user.name "github-actions[bot]" + git config user.email "github-actions[bot]@users.noreply.github.com" + + # Create annotated tag + git tag -a "$TAG" -m "Monorepo release v${VERSION}" + git push origin "$TAG" + + # Generate release notes from merged PRs since previous monorepo tag + PREV_TAG=$(git tag --list "v*" --sort=-v:refname \ + | grep -v "^${TAG}$" | head -1 || true) + + NOTES="" + if [ -n "$PREV_TAG" ]; then + # Extract PR numbers from commit messages + PR_NUMS=$(git log "${PREV_TAG}..${TAG}" --oneline \ + | grep -oP '#\K\d+' | sort -un || true) + + for NUM in $PR_NUMS; do + TITLE=$(gh pr view "$NUM" --json title --jq '.title' 2>/dev/null || true) + if [ -n "$TITLE" ]; then + NOTES="${NOTES}- ${TITLE} (#${NUM})"$'\n' + fi + done + fi + + # Fallback if no PRs found + if [ -z "$NOTES" ]; then + NOTES="- Initial monorepo release v${VERSION}" + fi + + gh release create "$TAG" \ + --title "Monorepo v${VERSION}" \ + --notes "## Template v${VERSION}"$'\n\n'"### Changes"$'\n\n'"${NOTES}" diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index bc03407..393acdd 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -2,23 +2,27 @@ name: release on: workflow_dispatch: - release: - types: [published] + push: + branches: ["main"] + paths: + - "agents/*/pyproject.toml" + - "agents/*/src/**" permissions: - contents: read + contents: write # create tags, releases, and upload assets packages: write jobs: - publish: - name: build and publish + release: + name: build and release runs-on: ubuntu-latest steps: - name: Checkout uses: actions/checkout@v6 with: - persist-credentials: false + fetch-depth: 0 + persist-credentials: true - name: Set up uv uses: astral-sh/setup-uv@v5 @@ -32,10 +36,100 @@ jobs: - name: Build changed agent packages run: uv run poe build-changed - - name: Publish to GitHub Packages + - name: Tag and release built wheels env: - UV_PUBLISH_URL: https://nuget.pkg.github.com/${{ github.repository_owner }}/upload - UV_PUBLISH_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: uv run poe publish - # Each agent has its own version; the registry rejects duplicate - # versions, so only agents with bumped versions are actually uploaded. + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + shopt -s nullglob + WHEELS=(dist/*.whl) + + if [ ${#WHEELS[@]} -eq 0 ]; then + echo "::notice::No wheels in dist/ — nothing to release." + exit 0 + fi + + git config user.name "github-actions[bot]" + git config user.email "github-actions[bot]@users.noreply.github.com" + + for WHL in "${WHEELS[@]}"; do + # Wheel filename: {name}-{version}-{python}-{abi}-{platform}.whl + BASENAME=$(basename "$WHL") + NAME=$(echo "$BASENAME" | cut -d- -f1) + VERSION=$(echo "$BASENAME" | cut -d- -f2) + TAG="${NAME}-v${VERSION}" + + # Skip if tag already exists + if git rev-parse "refs/tags/${TAG}" >/dev/null 2>&1; then + echo "::notice::Tag ${TAG} already exists — skipping." + continue + fi + + echo "::group::Releasing ${TAG}" + + # Create annotated tag + git tag -a "$TAG" -m "Release ${NAME} v${VERSION}" + git push origin "$TAG" + + # Generate changelog from merged PRs since previous tag for this agent + PREV_TAG=$(git tag --list "${NAME}-v*" --sort=-v:refname \ + | grep -v "^${TAG}$" | head -1 || true) + + NOTES="" + if [ -n "$PREV_TAG" ]; then + # Extract PR numbers from commit messages (covers squash and merge commits) + PR_NUMS=$(git log "${PREV_TAG}..${TAG}" --oneline -- "agents/${NAME}/" \ + | grep -oP '#\K\d+' | sort -un || true) + + for NUM in $PR_NUMS; do + TITLE=$(gh pr view "$NUM" --json title --jq '.title' 2>/dev/null || true) + if [ -n "$TITLE" ]; then + NOTES="${NOTES}- ${TITLE} (#${NUM})"$'\n' + fi + done + fi + + # Fallback if no PRs found + if [ -z "$NOTES" ]; then + NOTES="- Initial release of ${NAME} ${VERSION}" + fi + + # Create GitHub release with wheel + sdist attached + ASSETS=("$WHL") + SDIST="dist/${NAME}-${VERSION}.tar.gz" + [ -f "$SDIST" ] && ASSETS+=("$SDIST") + + gh release create "$TAG" "${ASSETS[@]}" \ + --title "${NAME} v${VERSION}" \ + --notes "## ${NAME} v${VERSION}"$'\n\n'"### Changes"$'\n\n'"${NOTES}" + + echo "::endgroup::" + done + + # ── Publish to package registry ───────────────────────────────── + # Uncomment ONE of the blocks below and configure the matching + # secret in your repository settings. + # Also uncomment the corresponding [[tool.uv.index]] block in + # pyproject.toml so that `uv publish` knows the target URL. + # + # Azure Artifacts (recommended for private packages): + # 1. Create a feed: https://learn.microsoft.com/azure/devops/artifacts/quickstarts/python-packages + # 2. Generate a PAT with Packaging > Read & Write scope. + # 3. Add the PAT as a repository secret named AZURE_ARTIFACTS_TOKEN. + # 4. Uncomment the block below and the [[tool.uv.index]] in pyproject.toml. + # + # - name: Publish to Azure Artifacts + # env: + # UV_PUBLISH_URL: https://pkgs.dev.azure.com///_packaging//pypi/upload/ + # UV_PUBLISH_TOKEN: ${{ secrets.AZURE_ARTIFACTS_TOKEN }} + # run: uv publish + # + # PyPI (public packages): + # 1. Create an API token: https://pypi.org/manage/account/token/ + # 2. Add it as a repository secret named PYPI_TOKEN. + # 3. Uncomment the block below and the [[tool.uv.index]] in pyproject.toml. + # + # - name: Publish to PyPI + # env: + # UV_PUBLISH_URL: https://upload.pypi.org/legacy/ + # UV_PUBLISH_TOKEN: ${{ secrets.PYPI_TOKEN }} + # run: uv publish diff --git a/DEVELOPMENT.md b/DEVELOPMENT.md index 5464a2e..977b360 100644 --- a/DEVELOPMENT.md +++ b/DEVELOPMENT.md @@ -100,7 +100,7 @@ This will create a virtual environment in `.venv/`, install all dependencies, an uv run poe setup --python 3.13 ``` -The default version for the setup Poe task is Python 3.13 and defined in [pyproject.toml](../pyproject.toml) under `[tool.poe.tasks.setup]`. +The default version for the setup Poe task is Python 3.13 and defined in [pyproject.toml](pyproject.toml) under `[tool.poe.tasks.setup]`. ### VS Code Setup diff --git a/README.md b/README.md index ddddb56..37abd83 100644 --- a/README.md +++ b/README.md @@ -56,8 +56,10 @@ flowchart TB subgraph L6["6. Release"] direction LR - R1[Build changed agents only] - R2[Publish to GitHub Packages] + R1[Build changed agents] + R2[Tag + GitHub Release] + R3[Publish to registry] + R4[Monorepo tag + release] end L1 --> L2 @@ -76,7 +78,7 @@ Each layer catches different classes of issues: | **CI quality gate** | On PR / push | Full repo-wide type safety, test regressions, code quality | | **CI security** | On PR / push / schedule | Dataflow vulnerabilities, outdated dependencies, security posture gaps | | **Copilot Review** | On PR (after security scan) | AI-powered code review with suggestions and inline comments | -| **Release** | On GitHub release | Builds and publishes only changed agents to the package registry | +| **Release** | On push to main or manual | Agent release: builds changed agents, creates `-v` tags with wheel assets. Monorepo release: tags shared infra changes as `v` | --- @@ -114,7 +116,8 @@ Repo root │ ├─ workflows/ # GitHub Actions workflows │ │ ├─ checks.yml # lint, type-check, test on PRs and pushes │ │ ├─ docs.yml # build Sphinx docs, deploy to GitHub Pages -│ │ ├─ release.yml # build and publish packages +│ │ ├─ release.yml # build and publish agent packages +│ │ ├─ monorepo-release.yml # tag and release shared monorepo infra │ │ ├─ codeql-analysis.yml # CodeQL security scanning │ │ ├─ security-review.md # agentic workflow (security review) │ │ └─ security-review.lock.yml # compiled agentic workflow (generated) @@ -216,13 +219,10 @@ flowchart TD S1 --> S2["Config validation
YAML · TOML · JSON"] S2 --> S3["AST check
(syntax errors)"] S3 --> S4["pyupgrade
(modern Python 3.10+)"] - S4 --> S5["Ruff format + lint"] - S5 --> S6["MyPy
(scoped to staged)"] - S6 --> S7["Bandit
(security scan)"] - S7 --> S8["Markdown fence
code check"] - S8 --> S9["nbQA
(notebook parse)"] - S9 --> S10["uv-lock sync
(if manifests changed)"] - S10 --> S11["poe pre-commit-check
(Pyright staged)"] + S4 --> S5["poe pre-commit-check
Ruff fmt + lint, Pyright,
Markdown lint (staged)"] + S5 --> S6["Bandit
(security scan)"] + S6 --> S7["nbQA
(notebook parse)"] + S7 --> S8["uv-lock sync
(if manifests changed)"] ``` ### CI workflows — on every PR and push @@ -253,15 +253,16 @@ flowchart TD SR4 --> SR5["Assign Copilot
as PR reviewer"] ``` -### Release workflow — on GitHub release +### Release workflow — on push to main or manual dispatch ```mermaid flowchart LR - R0["GitHub release
(published)"] --> R1["Checkout +
uv setup"] - R1 --> R2["uv sync
--all-extras --dev"] - R2 --> R3["poe build-changed
(changed agents only)"] - R3 --> R4["poe publish
(uv publish → dist/)"] - R4 --> R5["GitHub Packages
(or configured registry)"] + R0["Push to main
(agent changes)"] --> R1["poe build-changed"] + R1 --> R2["Iterate wheels
in dist/"] + R2 --> R3["Skip if tag
already exists"] + R3 --> R4["Create tag
agent1-v1.2.0"] + R4 --> R5["GitHub release
PR changelog +
.whl + .tar.gz"] + R5 --> R6["Publish to
registry"] ``` ### Docs workflow — on push to main @@ -374,7 +375,10 @@ uv run --package [args] 1. Bump the version in `agents//pyproject.toml`. 2. Merge to main. -3. Create a GitHub release — the release workflow builds and publishes automatically. + +The release workflow automatically builds changed agents, creates a `-v` tag and GitHub release with the wheel attached, and generates release notes from merged PRs. + +For shared infrastructure changes (scripts, workflows, Copilot instructions, docs), bump the `version` in the root `pyproject.toml` — the [monorepo release workflow](.github/workflows/monorepo-release.yml) handles tagging and releasing. --- @@ -384,35 +388,67 @@ Each agent is an independent package with its own version, enabling independent - `poe build` — cleans `dist/` and builds **all** agent packages. - `poe build-changed` — cleans `dist/` and builds only agents with **changed files**. -- `poe publish` — uploads everything in `dist/`. The registry rejects duplicate versions, so only agents with bumped versions actually get uploaded. +- `poe publish` — uploads everything in `dist/` to the configured registry. + +### Versioning convention + +The repository uses **two versioning tracks**: + +| Track | Version source | Tag format | Example | +| --- | --- | --- | --- | +| **Monorepo** | root `pyproject.toml` | `v` | `v0.2.0` | +| **Agent** | `agents//pyproject.toml` | `-v` | `agent1-v1.0.0` | + +Both use semantic versioning. Tags are created automatically by their respective release workflows when the version is bumped and merged to `main`. + +``` +v0.1.0 # monorepo release (shared infra) +v0.2.0 # monorepo release +agent1-v1.0.0 # agent release +agent1-v1.1.0-rc.1 # agent pre-release +agent2-v0.3.0 # different agent, independent version +``` + +### Agent release workflow + +The [agent release workflow](.github/workflows/release.yml) triggers on pushes to `main` that change agent sources or pyproject files, and on `workflow_dispatch`. It: + +1. Runs `poe build-changed` to build only agents with modified files. +2. Iterates over the wheels in `dist/`, extracting agent name and version from each filename. +3. Skips any agent whose `-v` tag already exists. +4. Creates an annotated tag and pushes it. +5. Creates a GitHub release with the `.whl` and `.tar.gz` attached, and release notes generated from merged PRs (not individual commits) that touched `agents//`. +6. Publishes the built packages to the configured registry (see below). + +### Monorepo release workflow + +The [monorepo release workflow](.github/workflows/monorepo-release.yml) triggers on pushes to `main` that change shared infrastructure — root `pyproject.toml`, `shared_tasks.toml`, scripts, workflows, Copilot instructions, docs config, or project documentation — and on `workflow_dispatch`. It: -The [release workflow](.github/workflows/release.yml) runs on GitHub release events and `workflow_dispatch`. It uses `build-changed` → `publish` so only modified agents are built and published. +1. Reads the version from the root `pyproject.toml`. +2. Skips if a `v` tag already exists. +3. Creates an annotated tag and pushes it. +4. Creates a GitHub release with release notes generated from merged PRs. -### Changing the publish target +### Setting up publishing -By default, packages are published to **GitHub Packages**. To publish to a different registry (PyPI, Artifactory, Azure Artifacts, etc.), update two places: +Publishing is **commented out** by default — the workflow only creates tags and GitHub releases. To enable it: -1. **`pyproject.toml`** — update the `[[tool.uv.index]]` section: +#### Azure Artifacts (recommended for private packages) - ```toml - [[tool.uv.index]] - name = "pypi" # or your registry name - url = "https://pypi.org/simple/" - publish-url = "https://upload.pypi.org/legacy/" - explicit = true - ``` +1. [Create a feed](https://learn.microsoft.com/azure/devops/artifacts/quickstarts/python-packages) in your Azure DevOps organization. +2. Generate a Personal Access Token (PAT) with **Packaging > Read & Write** scope. +3. Add the PAT as a repository secret named `AZURE_ARTIFACTS_TOKEN` (Settings → Secrets and variables → Actions). +4. Uncomment the "Publish to Azure Artifacts" block in `.github/workflows/release.yml`. +5. Uncomment the Azure `[[tool.uv.index]]` block in `pyproject.toml` and fill in your org/project/feed. -2. **`.github/workflows/release.yml`** — update the publish step environment variables: +#### PyPI (public packages) - ```yaml - - name: Publish to PyPI - env: - UV_PUBLISH_URL: https://upload.pypi.org/legacy/ - UV_PUBLISH_TOKEN: ${{ secrets.PYPI_TOKEN }} - run: uv run poe publish - ``` +1. [Create an API token](https://pypi.org/manage/account/token/) on PyPI. +2. Add it as a repository secret named `PYPI_TOKEN`. +3. Uncomment the "Publish to PyPI" block in `.github/workflows/release.yml`. +4. Uncomment the PyPI `[[tool.uv.index]]` block in `pyproject.toml`. - For PyPI, create an API token and store it as a repository secret (`PYPI_TOKEN`). For GitHub Packages, the built-in `GITHUB_TOKEN` is used automatically. +> **Note:** GitHub Packages does **not** support a Python/pip registry. --- diff --git a/agents/agent1/README.md b/agents/agent1/README.md index aa22dd1..d2b24e0 100644 --- a/agents/agent1/README.md +++ b/agents/agent1/README.md @@ -25,10 +25,32 @@ Example agent built from the python-agent-template. Use this as a starting point - Build the container (from `agents/agent1`): `docker build -t agent1:latest .`. - Run the container: `docker run --rm agent1:latest agent1 Bob` (override args as needed). -## Publish the package to GitHub Packages +### Push to Azure Container Registry + +```sh +# Log in to ACR +az acr login --name + +# Tag and push +docker tag agent1:latest .azurecr.io/agent1: +docker push .azurecr.io/agent1: +``` + +## Publish the package - Configure env vars for publishing: - - `export UV_PUBLISH_URL=https://pypi.pkg.github.com/` - - `export UV_PUBLISH_TOKEN=` + + **Azure Artifacts** (recommended for private packages): + ```sh + export UV_PUBLISH_URL=https://pkgs.dev.azure.com///_packaging//pypi/upload/ + export UV_PUBLISH_TOKEN= + ``` + + **PyPI** (public packages): + ```sh + export UV_PUBLISH_URL=https://upload.pypi.org/legacy/ + export UV_PUBLISH_TOKEN= + ``` + - Publish from the agent dir (`agents/agent1`): `uv run poe publish` (uploads the built wheel/sdist). From repo root use `uv run poe -C agents/agent1 publish`. - Package namespace: `python_agent_template.agents.agent1` uses a namespace root without `__init__.py` so multiple agents can coexist (PyPA guidance: https://packaging.python.org/en/latest/guides/packaging-namespace-packages/). diff --git a/agents/agent1/pyproject.toml b/agents/agent1/pyproject.toml index da96d36..3141f2a 100644 --- a/agents/agent1/pyproject.toml +++ b/agents/agent1/pyproject.toml @@ -22,7 +22,6 @@ classifiers = [ "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", - "Typing :: Typed", ] [project.scripts] diff --git a/pyproject.toml b/pyproject.toml index b549b57..bcbba51 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -147,7 +147,7 @@ convention = "google" [tool.pytest.ini_options] testpaths = 'agents/**/tests' -addopts = "-ra -q -r fEX" +addopts = "-ra -q -r fEX --cov-fail-under=80" filterwarnings = [] timeout = 120 asyncio_mode = "auto" @@ -257,11 +257,24 @@ sequence = [ ] args = [{ name = "files", default = ".", positional = true, multiple = true }] -[[tool.uv.index]] -name = "github" -url = "https://nuget.pkg.github.com/pmalarme/index.json" -publish-url = "https://nuget.pkg.github.com/pmalarme/upload" -explicit = true +# ── Package registry ────────────────────────────────────────────────────────── +# GitHub Packages does NOT support a Python/pip registry. +# Uncomment ONE of the blocks below and configure the matching +# UV_PUBLISH_URL / UV_PUBLISH_TOKEN in .github/workflows/release.yml. +# +# Azure Artifacts (recommended for private packages): +# [[tool.uv.index]] +# name = "azure" +# url = "https://pkgs.dev.azure.com///_packaging//pypi/simple/" +# publish-url = "https://pkgs.dev.azure.com///_packaging//pypi/upload/" +# explicit = true +# +# PyPI (public packages): +# [[tool.uv.index]] +# name = "pypi" +# url = "https://pypi.org/simple/" +# publish-url = "https://upload.pypi.org/legacy/" +# explicit = true [build-system] requires = ["flit-core>=3.12.0,<4"] diff --git a/scripts/run_tasks_in_agents_if_exists.py b/scripts/run_tasks_in_agents_if_exists.py index 1d1a19e..b9676f2 100644 --- a/scripts/run_tasks_in_agents_if_exists.py +++ b/scripts/run_tasks_in_agents_if_exists.py @@ -35,6 +35,7 @@ from __future__ import annotations +import argparse import sys from pathlib import Path @@ -42,35 +43,42 @@ from rich import print from utils.task_utils import discover_projects, extract_poe_tasks -MIN_ARGS = 2 + +def _parse_args(argv: list[str] | None = None) -> argparse.Namespace: + """Parse CLI arguments for running Poe tasks across agents.""" + parser = argparse.ArgumentParser( + description="Run a named Poe task in each agent that defines it.", + ) + parser.add_argument("task", help="Poe task name to run (e.g. lint, test, build)") + parser.add_argument( + "extra", + nargs="*", + help="Extra arguments forwarded to the Poe task", + ) + return parser.parse_args(argv) def main() -> None: """Run a requested Poe task in each agent that defines it. - If agent names are provided, only those under agents/ are considered; otherwise all workspace members. - - Args: - None. Parses CLI args: ``task`` (required). + Parses CLI args via argparse: ``task`` (required) and optional extra + arguments forwarded to the underlying Poe task. """ + args = _parse_args() pyproject_file = Path(__file__).resolve().parent.parent / "pyproject.toml" projects = discover_projects(pyproject_file) - if len(sys.argv) < MIN_ARGS: - print("Please provide a task name") - sys.exit(1) - - task_name = sys.argv[1] + cli_args = [args.task, *args.extra] for project in projects: tasks = extract_poe_tasks(project / "pyproject.toml") - if task_name in tasks: - print(f"Running task {task_name} in {project}") + if args.task in tasks: + print(f"Running task {args.task} in {project}") app = PoeThePoet(cwd=project) - result = app(cli_args=sys.argv[1:]) + result = app(cli_args=cli_args) if result: sys.exit(result) else: - print(f"Task {task_name} not found in {project}") + print(f"Task {args.task} not found in {project}") if __name__ == "__main__": From ae8a741f68388bcc55cf01fdf342a829a3b4f760 Mon Sep 17 00:00:00 2001 From: pmalarme Date: Wed, 18 Feb 2026 18:45:00 +0100 Subject: [PATCH 03/90] feat: enhance documentation and improve code structure in workflows and scripts --- .github/workflows/security-review.lock.yml | 22 ++-------------------- .github/workflows/security-review.md | 10 +++++----- agents/agent1/docs/source/conf.py | 15 +++++++++------ docs/source/conf.py | 13 ++++++++----- scripts/check_md_code_blocks.py | 4 +++- 5 files changed, 27 insertions(+), 37 deletions(-) diff --git a/.github/workflows/security-review.lock.yml b/.github/workflows/security-review.lock.yml index 76120ae..443ec27 100644 --- a/.github/workflows/security-review.lock.yml +++ b/.github/workflows/security-review.lock.yml @@ -23,11 +23,7 @@ # # Automated security review for pull requests. Analyzes changed files against 15 security posture categories and posts inline review comments on findings, then requests Copilot code review. # -# Resolved workflow manifest: -# Imports: -# - ../agents/security-reviewer.agent.md -# -# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"eb4207411f27c5086812defd1c381fa0ac703b11169172ae6b0f2ef12abc08b2"} +# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"d0e48e418a4ef46187a36e4016998439ab7a03812880f4c080fd947047878f30"} name: "Security Review" "on": @@ -180,9 +176,6 @@ jobs: GH_AW_PROMPT_EOF cat << 'GH_AW_PROMPT_EOF' >> "$GH_AW_PROMPT" - {{#runtime-import .github/agents/security-reviewer.agent.md}} - GH_AW_PROMPT_EOF - cat << 'GH_AW_PROMPT_EOF' >> "$GH_AW_PROMPT" {{#runtime-import .github/workflows/security-review.md}} GH_AW_PROMPT_EOF - name: Interpolate variables and render templates @@ -281,17 +274,6 @@ jobs: uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 with: persist-credentials: false - - name: Merge remote .github folder - uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 - env: - GH_AW_AGENT_FILE: ".github/agents/security-reviewer.agent.md" - GH_AW_AGENT_IMPORT_SPEC: "../agents/security-reviewer.agent.md" - with: - script: | - const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); - setupGlobals(core, github, context, exec, io); - const { main } = require('/opt/gh-aw/actions/merge_remote_agent_github_folder.cjs'); - await main(); - name: Create gh-aw temp directory run: bash /opt/gh-aw/actions/create_gh_aw_tmp_dir.sh - name: Configure Git credentials @@ -772,7 +754,7 @@ jobs: run: | set -o pipefail sudo -E awf --env-all --container-workdir "${GITHUB_WORKSPACE}" --allow-domains api.business.githubcopilot.com,api.enterprise.githubcopilot.com,api.github.com,api.githubcopilot.com,api.individual.githubcopilot.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,github.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,ppa.launchpad.net,raw.githubusercontent.com,registry.npmjs.org,s.symcb.com,s.symcd.com,security.ubuntu.com,telemetry.enterprise.githubcopilot.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --enable-host-access --image-tag 0.20.0 --skip-pull --enable-api-proxy \ - -- /bin/bash -c '/usr/local/bin/copilot --add-dir /tmp/gh-aw/ --log-level all --log-dir /tmp/gh-aw/sandbox/agent/logs/ --add-dir "${GITHUB_WORKSPACE}" --disable-builtin-mcps --allow-all-tools --allow-all-paths --share /tmp/gh-aw/sandbox/agent/logs/conversation.md --prompt "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"${GH_AW_MODEL_AGENT_COPILOT:+ --model "$GH_AW_MODEL_AGENT_COPILOT"}' 2>&1 | tee -a /tmp/gh-aw/agent-stdio.log + -- /bin/bash -c '/usr/local/bin/copilot --add-dir /tmp/gh-aw/ --log-level all --log-dir /tmp/gh-aw/sandbox/agent/logs/ --add-dir "${GITHUB_WORKSPACE}" --disable-builtin-mcps --agent security-reviewer --allow-all-tools --allow-all-paths --share /tmp/gh-aw/sandbox/agent/logs/conversation.md --prompt "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"${GH_AW_MODEL_AGENT_COPILOT:+ --model "$GH_AW_MODEL_AGENT_COPILOT"}' 2>&1 | tee -a /tmp/gh-aw/agent-stdio.log env: COPILOT_AGENT_RUNNER_TYPE: STANDALONE COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }} diff --git a/.github/workflows/security-review.md b/.github/workflows/security-review.md index 26dfdec..bf716ed 100644 --- a/.github/workflows/security-review.md +++ b/.github/workflows/security-review.md @@ -1,6 +1,5 @@ --- -description: > - Automated security review for pull requests. Analyzes changed files against +description: Automated security review for pull requests. Analyzes changed files against 15 security posture categories and posts inline review comments on findings, then requests Copilot code review. @@ -8,13 +7,14 @@ on: pull_request: types: [opened, synchronize] -imports: - - ../agents/security-reviewer.agent.md - permissions: contents: read pull-requests: read +engine: + id: copilot + agent: security-reviewer + tools: github: toolsets: [repos, pull_requests] diff --git a/agents/agent1/docs/source/conf.py b/agents/agent1/docs/source/conf.py index 7a06132..9a5b180 100644 --- a/agents/agent1/docs/source/conf.py +++ b/agents/agent1/docs/source/conf.py @@ -3,12 +3,13 @@ import logging import sys from pathlib import Path +from typing import Any logger = logging.getLogger(__name__) -try: +if sys.version_info >= (3, 11): import tomllib -except ModuleNotFoundError: # Python < 3.11 +else: try: import tomli as tomllib # type: ignore[import-not-found] except ModuleNotFoundError: @@ -24,8 +25,8 @@ def _find_upwards(start: Path, marker: str = "pyproject.toml") -> Path: return parent logger.debug("%s not found starting at %s", marker, start) err = FileNotFoundError(marker) - if hasattr(err, "add_note"): - err.add_note(f"search start: {start}") + if hasattr(err, "add_note"): # Python >= 3.11 + err.add_note(f"search start: {start}") # pyright: ignore[reportUnknownMemberType,reportAttributeAccessIssue] raise err @@ -47,7 +48,7 @@ def _get_project_version(default: str = "0.0.0") -> str: try: with pyproject_path.open("rb") as f: - data = tomllib.load(f) + data: dict[str, Any] = tomllib.load(f) except OSError as exc: logger.warning("Failed to read %s; falling back to default version.", pyproject_path, exc_info=exc) return default @@ -55,7 +56,9 @@ def _get_project_version(default: str = "0.0.0") -> str: logger.warning("Failed to parse %s; falling back to default version.", pyproject_path, exc_info=exc) return default - version = data.get("project", {}).get("version") or data.get("tool", {}).get("poetry", {}).get("version") + version: str = ( + data.get("project", {}).get("version") or data.get("tool", {}).get("poetry", {}).get("version") or default + ) return version or default diff --git a/docs/source/conf.py b/docs/source/conf.py index 1b0e5b5..0d7f972 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -3,10 +3,11 @@ import logging import sys from pathlib import Path +from typing import Any -try: +if sys.version_info >= (3, 11): import tomllib -except ModuleNotFoundError: # Python < 3.11 +else: try: import tomli as tomllib # type: ignore[import-not-found] except ModuleNotFoundError: @@ -30,7 +31,7 @@ def _get_project_version(default: str = "0.0.0") -> str: try: with pyproject_path.open("rb") as f: - data = tomllib.load(f) + data: dict[str, Any] = tomllib.load(f) except OSError as exc: logger.warning("Failed to read %s; falling back to default version.", pyproject_path, exc_info=exc) return default @@ -38,7 +39,9 @@ def _get_project_version(default: str = "0.0.0") -> str: logger.warning("Failed to parse %s; falling back to default version.", pyproject_path, exc_info=exc) return default - version = data.get("project", {}).get("version") or data.get("tool", {}).get("poetry", {}).get("version") + version: str = ( + data.get("project", {}).get("version") or data.get("tool", {}).get("poetry", {}).get("version") or default + ) return version or default @@ -56,7 +59,7 @@ def _get_project_version(default: str = "0.0.0") -> str: if tomllib is not None: # Only enable when the TOML parser (and therefore the extension's deps) is available. # Import is intentionally unused; it fails fast if the dependency stack is missing. - import sphinx_autodoc_typehints # noqa: F401 # pyright: ignore[reportMissingImports,reportUnusedImport] + import sphinx_autodoc_typehints # noqa: F401 # pyright: ignore[reportUnusedImport] extensions.append("sphinx_autodoc_typehints") except Exception: diff --git a/scripts/check_md_code_blocks.py b/scripts/check_md_code_blocks.py index cfcac2b..1b426a1 100644 --- a/scripts/check_md_code_blocks.py +++ b/scripts/check_md_code_blocks.py @@ -213,7 +213,9 @@ def check_code_blocks( files_with_errors.append(markdown_file_path) if files_with_errors: - raise RuntimeError("Type checking or linting issues found in the following files:\n" + "\n".join(files_with_errors)) + raise RuntimeError( + "Type checking or linting issues found in the following files:\n" + "\n".join(files_with_errors) + ) def main() -> None: From a4d5d4e9c6b58c8bd313a1103374e5fdadd2b833 Mon Sep 17 00:00:00 2001 From: pmalarme Date: Wed, 18 Feb 2026 20:02:42 +0100 Subject: [PATCH 04/90] feat: add comprehensive documentation and workflows for Python environment setup, testing, and release processes --- .github/actions/setup-python-env/README.md | 48 ++++++++++ .github/actions/setup-python-env/action.yml | 34 +++++++ .github/workflows/codeql-analysis.yml | 2 +- .github/workflows/python-code-quality.yml | 53 +++++++++++ .github/workflows/python-docker-build.yml | 78 ++++++++++++++++ .../{docker.yml => python-docker.yml} | 13 +-- .../workflows/{docs.yml => python-docs.yml} | 13 +-- .github/workflows/python-package-build.yml | 41 ++++++++ .../{release.yml => python-release.yml} | 12 +-- .../{checks.yml => python-tests.yml} | 19 ++-- DEVELOPMENT.md | 18 ++-- README.md | 93 +++++++++++++------ pyproject.toml | 2 +- 13 files changed, 354 insertions(+), 72 deletions(-) create mode 100644 .github/actions/setup-python-env/README.md create mode 100644 .github/actions/setup-python-env/action.yml create mode 100644 .github/workflows/python-code-quality.yml create mode 100644 .github/workflows/python-docker-build.yml rename .github/workflows/{docker.yml => python-docker.yml} (87%) rename .github/workflows/{docs.yml => python-docs.yml} (77%) create mode 100644 .github/workflows/python-package-build.yml rename .github/workflows/{release.yml => python-release.yml} (95%) rename .github/workflows/{checks.yml => python-tests.yml} (58%) diff --git a/.github/actions/setup-python-env/README.md b/.github/actions/setup-python-env/README.md new file mode 100644 index 0000000..7dba772 --- /dev/null +++ b/.github/actions/setup-python-env/README.md @@ -0,0 +1,48 @@ +# Setup Python Environment + +Composite GitHub Action that sets up [uv](https://docs.astral.sh/uv/) with a +specific Python version and installs project dependencies via `uv sync`. + +## Inputs + +| Input | Required | Default | Description | +|---|---|---|---| +| `python-version` | No | `"3.13"` | Python version to install (e.g. `"3.13"`, `"3.10"`). | +| `include-docs` | No | `"false"` | When `"true"`, adds `--group docs` to install Sphinx and related packages. | +| `extra-args` | No | `""` | Additional arguments appended to the `uv sync` command. | + +The base command is always `uv sync --all-extras --dev`. The `include-docs` flag +and `extra-args` extend it. + +## Usage + +### Minimal (defaults to Python 3.13) + +```yaml +- uses: ./.github/actions/setup-python-env +``` + +### With a Python version matrix + +```yaml +- uses: ./.github/actions/setup-python-env + with: + python-version: ${{ matrix.python-version }} +``` + +### Including docs dependencies + +```yaml +- uses: ./.github/actions/setup-python-env + with: + include-docs: "true" +``` + +### With extra sync arguments + +```yaml +- uses: ./.github/actions/setup-python-env + with: + include-docs: "true" + extra-args: "--all-packages -U --prerelease=if-necessary-or-explicit" +``` diff --git a/.github/actions/setup-python-env/action.yml b/.github/actions/setup-python-env/action.yml new file mode 100644 index 0000000..de38929 --- /dev/null +++ b/.github/actions/setup-python-env/action.yml @@ -0,0 +1,34 @@ +name: "Setup Python environment" +description: "Set up uv with Python and install project dependencies." + +inputs: + python-version: + description: "Python version to install (e.g. '3.13', '3.10')." + required: false + default: "3.13" + include-docs: + description: "Install the docs dependency group (sphinx, sphinx_autodoc_typehints, …)." + required: false + default: "false" + extra-args: + description: "Additional arguments appended to the `uv sync` command." + required: false + default: "" + +runs: + using: composite + steps: + - name: Set up uv + uses: astral-sh/setup-uv@v5 + with: + python-version: ${{ inputs.python-version }} + enable-cache: true + + - name: Install dependencies + shell: bash + run: | + args="--all-extras --dev" + if [[ "${{ inputs.include-docs }}" == "true" ]]; then + args="$args --group docs" + fi + uv sync $args ${{ inputs.extra-args }} diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index ce8585c..eab7aef 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -4,7 +4,7 @@ on: workflow_dispatch: pull_request: push: - branches: [ "main", "feature*", "fix*" ] + branches: [ "main" ] schedule: - cron: '45 1 * * 1' diff --git a/.github/workflows/python-code-quality.yml b/.github/workflows/python-code-quality.yml new file mode 100644 index 0000000..6263ac3 --- /dev/null +++ b/.github/workflows/python-code-quality.yml @@ -0,0 +1,53 @@ +name: "Python: code quality" + +on: + workflow_dispatch: + pull_request: + branches: ["main", "feature*", "fix*"] + +permissions: + contents: read + +jobs: + code-quality: + name: code quality (python ${{ matrix.python-version }}) + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + python-version: ["3.10", "3.11", "3.12", "3.13"] + + steps: + - name: Checkout + uses: actions/checkout@v6 + with: + persist-credentials: false + + - name: Setup Python environment + uses: ./.github/actions/setup-python-env + with: + python-version: ${{ matrix.python-version }} + # Include docs group so Pyright can resolve sphinx_autodoc_typehints + # imports in docs/source/conf.py files. + include-docs: "true" + + - name: Lock file check + run: uv run poe lock-verify + + - name: Format check + run: uv run poe fmt + + - name: Lint + run: uv run poe lint + + - name: Pyright + run: uv run poe pyright + + - name: MyPy + run: uv run poe mypy + + - name: Bandit security scan + run: uv run poe bandit + + - name: Markdown code block lint + run: uv run poe markdown-code-lint diff --git a/.github/workflows/python-docker-build.yml b/.github/workflows/python-docker-build.yml new file mode 100644 index 0000000..919e8c3 --- /dev/null +++ b/.github/workflows/python-docker-build.yml @@ -0,0 +1,78 @@ +name: "Python: docker build & smoke test" + +on: + workflow_dispatch: + pull_request: + branches: ["main", "feature*", "fix*"] + paths: + - "agents/*/Dockerfile" + - "agents/*/src/**" + - "agents/*/pyproject.toml" + +permissions: + contents: read + +jobs: + detect: + name: detect agents with Dockerfiles + runs-on: ubuntu-latest + + outputs: + agents: ${{ steps.find.outputs.agents }} + + steps: + - name: Checkout + uses: actions/checkout@v6 + with: + fetch-depth: 0 + persist-credentials: false + + - name: Find changed agents with Dockerfiles + id: find + run: | + # Get the base ref to diff against + if [[ "${{ github.event_name }}" == "pull_request" ]]; then + BASE="${{ github.event.pull_request.base.sha }}" + else + BASE="HEAD~1" + fi + + # Find agents that have both a Dockerfile AND changed files + changed_agents=$(git diff --name-only "$BASE" HEAD \ + | grep '^agents/' \ + | cut -d/ -f2 \ + | sort -u \ + | while read -r agent; do + if [[ -f "agents/$agent/Dockerfile" ]]; then + echo "$agent" + fi + done \ + | jq -Rcn '[inputs]') + echo "agents=$changed_agents" >> "$GITHUB_OUTPUT" + + build-and-test: + name: build & smoke test ${{ matrix.agent }} + needs: detect + if: ${{ needs.detect.outputs.agents != '[]' }} + runs-on: ubuntu-latest + + strategy: + fail-fast: false + matrix: + agent: ${{ fromJson(needs.detect.outputs.agents) }} + + steps: + - name: Checkout + uses: actions/checkout@v6 + with: + persist-credentials: false + + - name: Build Docker image + run: | + docker build \ + -t "${{ matrix.agent }}:ci" \ + -f "agents/${{ matrix.agent }}/Dockerfile" \ + "agents/${{ matrix.agent }}" + + - name: Smoke test + run: docker run --rm "${{ matrix.agent }}:ci" --help || true diff --git a/.github/workflows/docker.yml b/.github/workflows/python-docker.yml similarity index 87% rename from .github/workflows/docker.yml rename to .github/workflows/python-docker.yml index 6603b8f..4a54052 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/python-docker.yml @@ -1,14 +1,9 @@ -name: docker +name: "Python: docker build & smoke test" on: workflow_dispatch: pull_request: - paths: - - "agents/*/Dockerfile" - - "agents/*/src/**" - - "agents/*/pyproject.toml" - push: - branches: ["main"] + branches: ["main", "feature*", "fix*"] paths: - "agents/*/Dockerfile" - "agents/*/src/**" @@ -39,8 +34,8 @@ jobs: | jq -Rcn '[inputs]') echo "agents=$agents" >> "$GITHUB_OUTPUT" - build: - name: build ${{ matrix.agent }} + build-and-test: + name: build & smoke test ${{ matrix.agent }} needs: detect if: ${{ needs.detect.outputs.agents != '[]' }} runs-on: ubuntu-latest diff --git a/.github/workflows/docs.yml b/.github/workflows/python-docs.yml similarity index 77% rename from .github/workflows/docs.yml rename to .github/workflows/python-docs.yml index bd9efc1..4992552 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/python-docs.yml @@ -1,4 +1,4 @@ -name: docs +name: "Python: docs" on: workflow_dispatch: @@ -31,14 +31,11 @@ jobs: with: persist-credentials: false - - name: Set up uv - uses: astral-sh/setup-uv@v5 + - name: Setup Python environment + uses: ./.github/actions/setup-python-env with: - python-version: "3.13" - enable-cache: true - - - name: Install dependencies (with docs group) - run: uv sync --all-packages --all-extras --dev -U --prerelease=if-necessary-or-explicit --group docs + include-docs: "true" + extra-args: "--all-packages -U --prerelease=if-necessary-or-explicit" - name: Build documentation run: uv run python scripts/generate_docs.py diff --git a/.github/workflows/python-package-build.yml b/.github/workflows/python-package-build.yml new file mode 100644 index 0000000..06687ef --- /dev/null +++ b/.github/workflows/python-package-build.yml @@ -0,0 +1,41 @@ +name: "Python: package build" + +on: + workflow_dispatch: + pull_request: + branches: ["main", "feature*", "fix*"] + paths: + - "agents/*/pyproject.toml" + - "agents/*/src/**" + +permissions: + contents: read + +jobs: + build: + name: build changed packages + runs-on: ubuntu-latest + + steps: + - name: Checkout + uses: actions/checkout@v6 + with: + fetch-depth: 0 + persist-credentials: false + + - name: Setup Python environment + uses: ./.github/actions/setup-python-env + + - name: Build changed agent packages + run: uv run poe build-changed + + - name: Verify wheels were produced + run: | + shopt -s nullglob + WHEELS=(dist/*.whl) + if [ ${#WHEELS[@]} -eq 0 ]; then + echo "::notice::No wheels in dist/ — no changed agents to build." + else + echo "Built ${#WHEELS[@]} wheel(s):" + ls -lh dist/ + fi diff --git a/.github/workflows/release.yml b/.github/workflows/python-release.yml similarity index 95% rename from .github/workflows/release.yml rename to .github/workflows/python-release.yml index 393acdd..84c3dee 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/python-release.yml @@ -1,4 +1,4 @@ -name: release +name: "Python: release" on: workflow_dispatch: @@ -24,14 +24,8 @@ jobs: fetch-depth: 0 persist-credentials: true - - name: Set up uv - uses: astral-sh/setup-uv@v5 - with: - python-version: "3.13" - enable-cache: true - - - name: Install dependencies - run: uv sync --all-extras --dev + - name: Setup Python environment + uses: ./.github/actions/setup-python-env - name: Build changed agent packages run: uv run poe build-changed diff --git a/.github/workflows/checks.yml b/.github/workflows/python-tests.yml similarity index 58% rename from .github/workflows/checks.yml rename to .github/workflows/python-tests.yml index 3a393a0..c08caed 100644 --- a/.github/workflows/checks.yml +++ b/.github/workflows/python-tests.yml @@ -1,17 +1,16 @@ -name: checks +name: "Python: tests" on: workflow_dispatch: pull_request: - push: branches: ["main", "feature*", "fix*"] permissions: contents: read jobs: - lint-test: - name: lint, type-check, test (python ${{ matrix.python-version }}) + tests: + name: tests (python ${{ matrix.python-version }}) runs-on: ubuntu-latest strategy: fail-fast: false @@ -24,14 +23,10 @@ jobs: with: persist-credentials: false - - name: Set up uv - uses: astral-sh/setup-uv@v5 + - name: Setup Python environment + uses: ./.github/actions/setup-python-env with: python-version: ${{ matrix.python-version }} - enable-cache: true - - name: Install dependencies - run: uv sync --all-extras --dev - - - name: Run checks - run: uv run poe check + - name: Run tests + run: uv run poe test diff --git a/DEVELOPMENT.md b/DEVELOPMENT.md index a50353b..e24a01f 100644 --- a/DEVELOPMENT.md +++ b/DEVELOPMENT.md @@ -125,10 +125,16 @@ This solution is a monorepo hosting multiple Python-based agents. Each agent is ``` Repo root ├─ .github/ # GitHub configuration and automation +│ ├─ actions/ # reusable composite actions +│ │ └─ setup-python-env/ # set up uv + install dependencies │ ├─ workflows/ # GitHub Actions workflows -│ │ ├─ checks.yml # lint, type-check, test on PRs and pushes -│ │ ├─ docs.yml # build Sphinx docs, deploy to GitHub Pages -│ │ ├─ release.yml # build and publish packages +│ │ ├─ python-code-quality.yml # format, lint, type-check on PRs +│ │ ├─ python-tests.yml # test on PRs +│ │ ├─ python-package-build.yml # build changed agent wheels on PR +│ │ ├─ python-docs.yml # build Sphinx docs, deploy to GitHub Pages +│ │ ├─ python-release.yml # build and publish agent packages +│ │ ├─ python-docker-build.yml # build and smoke-test agent Docker images +│ │ ├─ monorepo-release.yml # tag and release shared monorepo infra │ │ ├─ codeql-analysis.yml # CodeQL security scanning │ │ ├─ security-review.md # agentic workflow (security review) │ │ └─ security-review.lock.yml # compiled agentic workflow (generated) @@ -518,7 +524,7 @@ Each agent is an independent package with its own version in its `pyproject.toml #### Release workflow -The [release workflow](.github/workflows/release.yml) runs on GitHub release events and `workflow_dispatch`. It: +The [release workflow](.github/workflows/python-release.yml) triggers on pushes to `main` that change agent sources or pyproject files, and on `workflow_dispatch`. It: 1. Checks out the code and installs dependencies. 2. Runs `poe build-changed` to build only agents with changes. @@ -540,7 +546,7 @@ By default, packages are published to GitHub Packages. To publish to a different explicit = true ``` -2. **`.github/workflows/release.yml`** — update the environment variables in the publish step: +2. **`.github/workflows/python-release.yml`** — update the environment variables in the publish step: ```yaml - name: Publish to PyPI @@ -554,7 +560,7 @@ By default, packages are published to GitHub Packages. To publish to a different ### Documentation -Documentation is built using Sphinx and published to GitHub Pages via the [docs workflow](.github/workflows/docs.yml). +Documentation is built using Sphinx and published to GitHub Pages via the [docs workflow](.github/workflows/python-docs.yml). - Install docs deps: `uv run poe docs-install` - Build locally: `uv run poe docs` diff --git a/README.md b/README.md index 37abd83..73aebd3 100644 --- a/README.md +++ b/README.md @@ -31,14 +31,26 @@ flowchart TB H6[uv-lock sync] end - subgraph L3["3. CI - Quality gate"] - direction LR - C1["Ruff format + lint"] - C2[Pyright strict] - C3[MyPy strict] - C4[Bandit] - C5[PyTest + coverage] - C6[Markdown code lint] + subgraph L3["3. CI - Quality gate"] + direction TB + subgraph L3a["Code quality"] + direction LR + C0[Lock verify] + C1["Ruff format + lint"] + C2[Pyright strict] + C3[MyPy strict] + C4[Bandit] + C5[Markdown code lint] + end + subgraph L3b["Tests"] + direction LR + T1[PyTest + coverage] + end + subgraph L3c["Build validation"] + direction LR + B1[Wheel build] + B2[Docker build + smoke test] + end end subgraph L4["4. CI - Security Scanning"] @@ -75,8 +87,8 @@ Each layer catches different classes of issues: | --- | --- | --- | | **Editor** | As you type | Type errors, formatting, AI-aware context via custom instructions | | **Pre-commit** | On `git commit` (staged files) | Style drift, security anti-patterns, broken configs, stale lockfiles | -| **CI quality gate** | On PR / push | Full repo-wide type safety, test regressions, code quality | -| **CI security** | On PR / push / schedule | Dataflow vulnerabilities, outdated dependencies, security posture gaps | +| **CI quality gate** | On PR | Lock verification, full repo-wide type safety, code quality, test regressions, coverage, build validation. Split into three sub-layers: *code quality* (lock-verify, format, lint, type checks, Bandit, markdown lint), *tests* (PyTest + coverage), and *build validation* (wheel build + Docker build & smoke test, both path-filtered) | +| **CI security** | On PR / push to main / schedule | Dataflow vulnerabilities, outdated dependencies, security posture gaps | | **Copilot Review** | On PR (after security scan) | AI-powered code review with suggestions and inline comments | | **Release** | On push to main or manual | Agent release: builds changed agents, creates `-v` tags with wheel assets. Monorepo release: tags shared infra changes as `v` | @@ -113,10 +125,15 @@ uv run poe check ``` Repo root ├─ .github/ # GitHub configuration and automation +│ ├─ actions/ # reusable composite actions +│ │ └─ setup-python-env/ # set up uv + install dependencies │ ├─ workflows/ # GitHub Actions workflows -│ │ ├─ checks.yml # lint, type-check, test on PRs and pushes -│ │ ├─ docs.yml # build Sphinx docs, deploy to GitHub Pages -│ │ ├─ release.yml # build and publish agent packages +│ │ ├─ python-code-quality.yml # format, lint, type-check, security scan +│ │ ├─ python-tests.yml # pytest across Python matrix +│ │ ├─ python-docs.yml # build Sphinx docs, deploy to GitHub Pages +│ │ ├─ python-release.yml # build and publish agent packages +│ │ ├─ python-package-build.yml # build changed agent wheels on PR +│ │ ├─ python-docker-build.yml # build and smoke-test agent Docker images │ │ ├─ monorepo-release.yml # tag and release shared monorepo infra │ │ ├─ codeql-analysis.yml # CodeQL security scanning │ │ ├─ security-review.md # agentic workflow (security review) @@ -225,22 +242,40 @@ flowchart TD S7 --> S8["uv-lock sync
(if manifests changed)"] ``` -### CI workflows — on every PR and push +### CI workflows — on every PR + +Every pull request triggers up to six parallel workflows. Code quality and tests run on all PRs across a Python 3.10–3.13 matrix. Package build and Docker build are path-filtered — they only run when agent source code, pyproject files, or Dockerfiles change. CodeQL and the Copilot security agent provide additional security coverage. ```mermaid flowchart TD - subgraph trigger["Trigger: pull_request / push"] + subgraph trigger["Trigger: pull_request"] direction LR T1["PR opened / sync"] - T2["Push to main,
feature*, fix*"] end - trigger --> CW["checks.yml
Python 3.10–3.13 matrix"] - trigger --> CQ["codeql-analysis.yml
CodeQL SAST"] - trigger --> SR["security-review.md
Copilot security agent"] + trigger --> CQ_QUAL["python-code-quality.yml
Python 3.10–3.13 matrix"] + trigger --> CQ_TEST["python-tests.yml
Python 3.10–3.13 matrix"] + trigger --> PB["python-package-build.yml
Wheel build
(path-filtered)"] + trigger --> DK["python-docker-build.yml
Docker build & smoke test
(path-filtered)"] + trigger --> CQ["codeql-analysis.yml
CodeQL SAST
(PR + push to main only)"] + trigger --> SR["security-review.md
Copilot security agent
(PR only)"] + + CQ_QUAL --> CQ_QUAL1["uv sync"] + CQ_QUAL1 --> CQ_QUAL1b["Lock verify"] + CQ_QUAL1b --> CQ_QUAL2["Format + Lint"] + CQ_QUAL2 --> CQ_QUAL3["Pyright + MyPy"] + CQ_QUAL3 --> CQ_QUAL4["Bandit + Markdown lint"] - CW --> CW1["uv sync"] - CW1 --> CW2["poe check
(full quality gate)"] + CQ_TEST --> CQ_TEST1["uv sync"] + CQ_TEST1 --> CQ_TEST2["poe test"] + + PB --> PB1["uv sync"] + PB1 --> PB2["poe build-changed"] + PB2 --> PB3["Verify wheels"] + + DK --> DK1["Detect changed agents
with Dockerfiles"] + DK1 --> DK2["docker build"] + DK2 --> DK3["Smoke test
(--help)"] CQ --> CQ1["CodeQL init
(Python + Actions)"] CQ1 --> CQ2["Autobuild"] @@ -255,6 +290,8 @@ flowchart TD ### Release workflow — on push to main or manual dispatch +When agent source code or `pyproject.toml` files are pushed to `main`, the release workflow automatically builds only the changed agents, creates per-agent tags (`-v`), publishes GitHub releases with wheel assets and PR-based changelogs, and optionally uploads packages to the configured registry. The [monorepo release workflow](.github/workflows/monorepo-release.yml) handles shared infrastructure releases separately. + ```mermaid flowchart LR R0["Push to main
(agent changes)"] --> R1["poe build-changed"] @@ -267,6 +304,8 @@ flowchart LR ### Docs workflow — on push to main +When documentation sources, agent source code, or the docs generation script change on `main`, the docs workflow installs Sphinx dependencies, generates unified and per-agent documentation, and deploys the result to GitHub Pages. + ```mermaid flowchart LR D0["Push to main
(docs/agents/scripts changed)"] --> D1["Install docs deps"] @@ -288,7 +327,8 @@ flowchart TD subgraph pr["On every PR"] direction TB SR["Copilot security agent
15 posture categories
Inline review comments"] - CHECKS["Quality gate
Ruff · Pyright · MyPy
Bandit · Tests"] + QUAL["Code quality
Ruff · Pyright · MyPy
Bandit · Markdown lint"] + TESTS["Tests
pytest across
Python 3.10–3.13"] end always --> pr @@ -311,6 +351,7 @@ flowchart TD | Task | What it does | | --- | --- | +| `poe lock-verify` | Verify `uv.lock` is in sync with `pyproject.toml` | | `poe fmt` | Ruff format (Black-like, 120-col, import sorting) | | `poe lint` | Ruff lint (pycodestyle, pyflakes, bugbear, pylint, Bandit rules, ...) | | `poe pyright` | Pyright strict type checking | @@ -411,7 +452,7 @@ agent2-v0.3.0 # different agent, independent version ### Agent release workflow -The [agent release workflow](.github/workflows/release.yml) triggers on pushes to `main` that change agent sources or pyproject files, and on `workflow_dispatch`. It: +The [agent release workflow](.github/workflows/python-release.yml) triggers on pushes to `main` that change agent sources or pyproject files, and on `workflow_dispatch`. It: 1. Runs `poe build-changed` to build only agents with modified files. 2. Iterates over the wheels in `dist/`, extracting agent name and version from each filename. @@ -438,14 +479,14 @@ Publishing is **commented out** by default — the workflow only creates tags an 1. [Create a feed](https://learn.microsoft.com/azure/devops/artifacts/quickstarts/python-packages) in your Azure DevOps organization. 2. Generate a Personal Access Token (PAT) with **Packaging > Read & Write** scope. 3. Add the PAT as a repository secret named `AZURE_ARTIFACTS_TOKEN` (Settings → Secrets and variables → Actions). -4. Uncomment the "Publish to Azure Artifacts" block in `.github/workflows/release.yml`. +4. Uncomment the "Publish to Azure Artifacts" block in `.github/workflows/python-release.yml`. 5. Uncomment the Azure `[[tool.uv.index]]` block in `pyproject.toml` and fill in your org/project/feed. #### PyPI (public packages) 1. [Create an API token](https://pypi.org/manage/account/token/) on PyPI. 2. Add it as a repository secret named `PYPI_TOKEN`. -3. Uncomment the "Publish to PyPI" block in `.github/workflows/release.yml`. +3. Uncomment the "Publish to PyPI" block in `.github/workflows/python-release.yml`. 4. Uncomment the PyPI `[[tool.uv.index]]` block in `pyproject.toml`. > **Note:** GitHub Packages does **not** support a Python/pip registry. @@ -527,7 +568,7 @@ The `.github/instructions/` directory contains context-aware instructions that g ## Documentation -Documentation is built using Sphinx and published to GitHub Pages via the [docs workflow](.github/workflows/docs.yml). +Documentation is built using Sphinx and published to GitHub Pages via the [docs workflow](.github/workflows/python-docs.yml). ```sh # Install docs dependencies diff --git a/pyproject.toml b/pyproject.toml index bcbba51..c36f8b6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -260,7 +260,7 @@ args = [{ name = "files", default = ".", positional = true, multiple = true }] # ── Package registry ────────────────────────────────────────────────────────── # GitHub Packages does NOT support a Python/pip registry. # Uncomment ONE of the blocks below and configure the matching -# UV_PUBLISH_URL / UV_PUBLISH_TOKEN in .github/workflows/release.yml. +# UV_PUBLISH_URL / UV_PUBLISH_TOKEN in .github/workflows/python-release.yml. # # Azure Artifacts (recommended for private packages): # [[tool.uv.index]] From de286951ed77f62dcae6461c053672b9cb223012 Mon Sep 17 00:00:00 2001 From: pmalarme Date: Wed, 18 Feb 2026 20:28:20 +0100 Subject: [PATCH 05/90] feat: update permissions for pull request handling in security review workflow --- .github/workflows/security-review.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/security-review.md b/.github/workflows/security-review.md index bf716ed..d4aea84 100644 --- a/.github/workflows/security-review.md +++ b/.github/workflows/security-review.md @@ -9,7 +9,7 @@ on: permissions: contents: read - pull-requests: read + pull-requests: write engine: id: copilot From 384abd657cc3c7adb19bac2d092d48ec3a497167 Mon Sep 17 00:00:00 2001 From: pmalarme Date: Wed, 18 Feb 2026 20:37:55 +0100 Subject: [PATCH 06/90] feat: update permissions for pull requests in security review workflow --- .github/workflows/security-review.md | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/.github/workflows/security-review.md b/.github/workflows/security-review.md index d4aea84..ff76878 100644 --- a/.github/workflows/security-review.md +++ b/.github/workflows/security-review.md @@ -9,7 +9,7 @@ on: permissions: contents: read - pull-requests: write + pull-requests: read engine: id: copilot @@ -22,9 +22,11 @@ tools: safe-outputs: create-pull-request-review-comment: max: 20 + footer: "if-body" submit-pull-request-review: max: 1 - footer: "if-body" + target: "triggering" + footer: false add-reviewer: reviewers: [copilot] max: 1 From 642e7f1a31e59f0dc0c1b28e0a273f9ce6e02afe Mon Sep 17 00:00:00 2001 From: pmalarme Date: Wed, 18 Feb 2026 20:43:01 +0100 Subject: [PATCH 07/90] feat: remove outdated Python Docker workflow and update security review instructions --- .github/workflows/python-docker.yml | 62 ---------------------- .github/workflows/security-review.lock.yml | 4 +- .github/workflows/security-review.md | 4 +- 3 files changed, 4 insertions(+), 66 deletions(-) delete mode 100644 .github/workflows/python-docker.yml diff --git a/.github/workflows/python-docker.yml b/.github/workflows/python-docker.yml deleted file mode 100644 index 4a54052..0000000 --- a/.github/workflows/python-docker.yml +++ /dev/null @@ -1,62 +0,0 @@ -name: "Python: docker build & smoke test" - -on: - workflow_dispatch: - pull_request: - branches: ["main", "feature*", "fix*"] - paths: - - "agents/*/Dockerfile" - - "agents/*/src/**" - - "agents/*/pyproject.toml" - -permissions: - contents: read - -jobs: - detect: - name: detect agents with Dockerfiles - runs-on: ubuntu-latest - - outputs: - agents: ${{ steps.find.outputs.agents }} - - steps: - - name: Checkout - uses: actions/checkout@v6 - with: - persist-credentials: false - - - name: Find agents with Dockerfiles - id: find - run: | - agents=$(find agents -maxdepth 2 -name Dockerfile -printf '%h\n' \ - | xargs -I{} basename {} \ - | jq -Rcn '[inputs]') - echo "agents=$agents" >> "$GITHUB_OUTPUT" - - build-and-test: - name: build & smoke test ${{ matrix.agent }} - needs: detect - if: ${{ needs.detect.outputs.agents != '[]' }} - runs-on: ubuntu-latest - - strategy: - fail-fast: false - matrix: - agent: ${{ fromJson(needs.detect.outputs.agents) }} - - steps: - - name: Checkout - uses: actions/checkout@v6 - with: - persist-credentials: false - - - name: Build Docker image - run: | - docker build \ - -t "${{ matrix.agent }}:ci" \ - -f "agents/${{ matrix.agent }}/Dockerfile" \ - "agents/${{ matrix.agent }}" - - - name: Smoke test - run: docker run --rm "${{ matrix.agent }}:ci" --help || true diff --git a/.github/workflows/security-review.lock.yml b/.github/workflows/security-review.lock.yml index 443ec27..7d2033c 100644 --- a/.github/workflows/security-review.lock.yml +++ b/.github/workflows/security-review.lock.yml @@ -23,7 +23,7 @@ # # Automated security review for pull requests. Analyzes changed files against 15 security posture categories and posts inline review comments on findings, then requests Copilot code review. # -# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"d0e48e418a4ef46187a36e4016998439ab7a03812880f4c080fd947047878f30"} +# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"eff26dc915998b86cd77dc770d1402df3042c48c2e0635c97982a4d86618e5ad"} name: "Security Review" "on": @@ -1163,7 +1163,7 @@ jobs: uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 env: GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }} - GH_AW_SAFE_OUTPUTS_HANDLER_CONFIG: "{\"create_pull_request_review_comment\":{\"max\":20,\"side\":\"RIGHT\"},\"missing_data\":{},\"missing_tool\":{},\"submit_pull_request_review\":{\"footer\":\"if-body\",\"max\":1}}" + GH_AW_SAFE_OUTPUTS_HANDLER_CONFIG: "{\"create_pull_request_review_comment\":{\"max\":20,\"side\":\"RIGHT\",\"target\":\"triggering\"},\"missing_data\":{},\"missing_tool\":{},\"submit_pull_request_review\":{\"footer\":\"none\",\"max\":1}}" with: github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} script: | diff --git a/.github/workflows/security-review.md b/.github/workflows/security-review.md index ff76878..7b6ed1c 100644 --- a/.github/workflows/security-review.md +++ b/.github/workflows/security-review.md @@ -22,14 +22,14 @@ tools: safe-outputs: create-pull-request-review-comment: max: 20 - footer: "if-body" + target: "triggering" submit-pull-request-review: max: 1 - target: "triggering" footer: false add-reviewer: reviewers: [copilot] max: 1 + target: "triggering" --- # Security Review From 226b096e73e940101feb158fc1da72312b07f00c Mon Sep 17 00:00:00 2001 From: pmalarme Date: Wed, 18 Feb 2026 21:36:10 +0100 Subject: [PATCH 08/90] feat: update security review workflow to allow multiple reviewers and adjust target settings --- .github/workflows/security-review.lock.yml | 6 +++--- .github/workflows/security-review.md | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/security-review.lock.yml b/.github/workflows/security-review.lock.yml index 7d2033c..d0cb501 100644 --- a/.github/workflows/security-review.lock.yml +++ b/.github/workflows/security-review.lock.yml @@ -23,7 +23,7 @@ # # Automated security review for pull requests. Analyzes changed files against 15 security posture categories and posts inline review comments on findings, then requests Copilot code review. # -# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"eff26dc915998b86cd77dc770d1402df3042c48c2e0635c97982a4d86618e5ad"} +# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"3082f8a5932786883854d0039f5bfb05484915fd6f9b1fdd0d273218d043cb5b"} name: "Security Review" "on": @@ -372,7 +372,7 @@ jobs: mkdir -p /tmp/gh-aw/safeoutputs mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs cat > /opt/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_EOF' - {"add_reviewer":{"max":1,"reviewers":["copilot"]},"create_pull_request_review_comment":{"max":20},"missing_data":{},"missing_tool":{},"noop":{"max":1},"submit_pull_request_review":{"max":1}} + {"add_reviewer":{"max":3,"reviewers":["copilot"]},"create_pull_request_review_comment":{"max":20},"missing_data":{},"missing_tool":{},"noop":{"max":1},"submit_pull_request_review":{"max":1}} GH_AW_SAFE_OUTPUTS_CONFIG_EOF cat > /opt/gh-aw/safeoutputs/tools.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_EOF' [ @@ -445,7 +445,7 @@ jobs: "name": "submit_pull_request_review" }, { - "description": "Add reviewers to a GitHub pull request. Reviewers receive notifications and can approve or request changes. Use 'copilot' as a reviewer name to request the Copilot PR review bot. CONSTRAINTS: Maximum 1 reviewer(s) can be added.", + "description": "Add reviewers to a GitHub pull request. Reviewers receive notifications and can approve or request changes. Use 'copilot' as a reviewer name to request the Copilot PR review bot. CONSTRAINTS: Maximum 3 reviewer(s) can be added.", "inputSchema": { "additionalProperties": false, "properties": { diff --git a/.github/workflows/security-review.md b/.github/workflows/security-review.md index 7b6ed1c..b92635d 100644 --- a/.github/workflows/security-review.md +++ b/.github/workflows/security-review.md @@ -28,8 +28,8 @@ safe-outputs: footer: false add-reviewer: reviewers: [copilot] - max: 1 - target: "triggering" + max: 3 + target: "*" --- # Security Review From 8442740f73fef2ef56ffceb6f29e4c7ba646790c Mon Sep 17 00:00:00 2001 From: pmalarme Date: Wed, 18 Feb 2026 22:07:24 +0100 Subject: [PATCH 09/90] feat: update GitHub Actions setup to version 0.46.1 and enhance documentation for Copilot instructions --- .github/aw/actions-lock.json | 5 +++++ .github/workflows/security-review.lock.yml | 24 +++++++++++----------- 2 files changed, 17 insertions(+), 12 deletions(-) diff --git a/.github/aw/actions-lock.json b/.github/aw/actions-lock.json index 404454a..f494286 100644 --- a/.github/aw/actions-lock.json +++ b/.github/aw/actions-lock.json @@ -9,6 +9,11 @@ "repo": "github/gh-aw/actions/setup", "version": "v0.46.0", "sha": "f88ec26c65cc20ebb8ceabe809c9153385945bfe" + }, + "github/gh-aw/actions/setup@v0.46.1": { + "repo": "github/gh-aw/actions/setup", + "version": "v0.46.1", + "sha": "874bdd8271bf8c21902b068fb1ca6a22d2dc4b7a" } } } diff --git a/.github/workflows/security-review.lock.yml b/.github/workflows/security-review.lock.yml index d0cb501..1c29ccc 100644 --- a/.github/workflows/security-review.lock.yml +++ b/.github/workflows/security-review.lock.yml @@ -13,7 +13,7 @@ # \ /\ / (_) | | | | ( | | | | (_) \ V V /\__ \ # \/ \/ \___/|_| |_|\_\|_| |_|\___/ \_/\_/ |___/ # -# This file was automatically generated by gh-aw (v0.46.0). DO NOT EDIT. +# This file was automatically generated by gh-aw (v0.46.1). DO NOT EDIT. # # To update this file, edit the corresponding .md file and run: # gh aw compile @@ -23,7 +23,7 @@ # # Automated security review for pull requests. Analyzes changed files against 15 security posture categories and posts inline review comments on findings, then requests Copilot code review. # -# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"3082f8a5932786883854d0039f5bfb05484915fd6f9b1fdd0d273218d043cb5b"} +# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"3082f8a5932786883854d0039f5bfb05484915fd6f9b1fdd0d273218d043cb5b","compiler_version":"v0.46.1"} name: "Security Review" "on": @@ -56,7 +56,7 @@ jobs: title: ${{ steps.sanitized.outputs.title }} steps: - name: Setup Scripts - uses: github/gh-aw/actions/setup@f88ec26c65cc20ebb8ceabe809c9153385945bfe # v0.46.0 + uses: github/gh-aw/actions/setup@874bdd8271bf8c21902b068fb1ca6a22d2dc4b7a # v0.46.1 with: destination: /opt/gh-aw/actions - name: Validate context variables @@ -267,7 +267,7 @@ jobs: secret_verification_result: ${{ steps.validate-secret.outputs.verification_result }} steps: - name: Setup Scripts - uses: github/gh-aw/actions/setup@f88ec26c65cc20ebb8ceabe809c9153385945bfe # v0.46.0 + uses: github/gh-aw/actions/setup@874bdd8271bf8c21902b068fb1ca6a22d2dc4b7a # v0.46.1 with: destination: /opt/gh-aw/actions - name: Checkout repository @@ -313,8 +313,8 @@ jobs: engine_name: "GitHub Copilot CLI", model: process.env.GH_AW_MODEL_AGENT_COPILOT || "", version: "", - agent_version: "0.0.410", - cli_version: "v0.46.0", + agent_version: "0.0.411", + cli_version: "v0.46.1", workflow_name: "Security Review", experimental: false, supports_tools_allowlist: true, @@ -351,7 +351,7 @@ jobs: env: COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }} - name: Install GitHub Copilot CLI - run: /opt/gh-aw/actions/install_copilot_cli.sh 0.0.410 + run: /opt/gh-aw/actions/install_copilot_cli.sh 0.0.411 - name: Install awf binary run: bash /opt/gh-aw/actions/install_awf_binary.sh v0.20.0 - name: Determine automatic lockdown mode for GitHub MCP Server @@ -923,7 +923,7 @@ jobs: total_count: ${{ steps.missing_tool.outputs.total_count }} steps: - name: Setup Scripts - uses: github/gh-aw/actions/setup@f88ec26c65cc20ebb8ceabe809c9153385945bfe # v0.46.0 + uses: github/gh-aw/actions/setup@874bdd8271bf8c21902b068fb1ca6a22d2dc4b7a # v0.46.1 with: destination: /opt/gh-aw/actions - name: Download agent output artifact @@ -1010,7 +1010,7 @@ jobs: success: ${{ steps.parse_results.outputs.success }} steps: - name: Setup Scripts - uses: github/gh-aw/actions/setup@f88ec26c65cc20ebb8ceabe809c9153385945bfe # v0.46.0 + uses: github/gh-aw/actions/setup@874bdd8271bf8c21902b068fb1ca6a22d2dc4b7a # v0.46.1 with: destination: /opt/gh-aw/actions - name: Download agent artifacts @@ -1052,7 +1052,7 @@ jobs: env: COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }} - name: Install GitHub Copilot CLI - run: /opt/gh-aw/actions/install_copilot_cli.sh 0.0.410 + run: /opt/gh-aw/actions/install_copilot_cli.sh 0.0.411 - name: Execute GitHub Copilot CLI id: agentic_execution # Copilot CLI tool arguments (sorted): @@ -1106,7 +1106,7 @@ jobs: activated: ${{ steps.check_membership.outputs.is_team_member == 'true' }} steps: - name: Setup Scripts - uses: github/gh-aw/actions/setup@f88ec26c65cc20ebb8ceabe809c9153385945bfe # v0.46.0 + uses: github/gh-aw/actions/setup@874bdd8271bf8c21902b068fb1ca6a22d2dc4b7a # v0.46.1 with: destination: /opt/gh-aw/actions - name: Check team membership for workflow @@ -1144,7 +1144,7 @@ jobs: process_safe_outputs_temporary_id_map: ${{ steps.process_safe_outputs.outputs.temporary_id_map }} steps: - name: Setup Scripts - uses: github/gh-aw/actions/setup@f88ec26c65cc20ebb8ceabe809c9153385945bfe # v0.46.0 + uses: github/gh-aw/actions/setup@874bdd8271bf8c21902b068fb1ca6a22d2dc4b7a # v0.46.1 with: destination: /opt/gh-aw/actions - name: Download agent output artifact From b0d203ba514cc84bdc79d99632f931c75ba2d0f9 Mon Sep 17 00:00:00 2001 From: pmalarme Date: Wed, 18 Feb 2026 22:27:10 +0100 Subject: [PATCH 10/90] feat: enhance security review documentation and add cache memory instructions --- .github/workflows/security-review.lock.yml | 52 +++++++++++++++++++++- .github/workflows/security-review.md | 37 +++++++++++---- 2 files changed, 79 insertions(+), 10 deletions(-) diff --git a/.github/workflows/security-review.lock.yml b/.github/workflows/security-review.lock.yml index 1c29ccc..fa47aee 100644 --- a/.github/workflows/security-review.lock.yml +++ b/.github/workflows/security-review.lock.yml @@ -23,7 +23,7 @@ # # Automated security review for pull requests. Analyzes changed files against 15 security posture categories and posts inline review comments on findings, then requests Copilot code review. # -# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"3082f8a5932786883854d0039f5bfb05484915fd6f9b1fdd0d273218d043cb5b","compiler_version":"v0.46.1"} +# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"239455fd5f26b994eeb00a76f9efccd51af91eaf8fba8d95bb0e7c64bc4cb2c7","compiler_version":"v0.46.1"} name: "Security Review" "on": @@ -114,6 +114,7 @@ jobs: cat "/opt/gh-aw/prompts/xpia.md" >> "$GH_AW_PROMPT" cat "/opt/gh-aw/prompts/temp_folder_prompt.md" >> "$GH_AW_PROMPT" cat "/opt/gh-aw/prompts/markdown.md" >> "$GH_AW_PROMPT" + cat "/opt/gh-aw/prompts/cache_memory_prompt.md" >> "$GH_AW_PROMPT" cat << 'GH_AW_PROMPT_EOF' >> "$GH_AW_PROMPT" GitHub API Access Instructions @@ -193,6 +194,9 @@ jobs: uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 env: GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + GH_AW_ALLOWED_EXTENSIONS: '' + GH_AW_CACHE_DESCRIPTION: '' + GH_AW_CACHE_DIR: '/tmp/gh-aw/cache-memory/' GH_AW_GITHUB_ACTOR: ${{ github.actor }} GH_AW_GITHUB_EVENT_COMMENT_ID: ${{ github.event.comment.id }} GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER: ${{ github.event.discussion.number }} @@ -214,6 +218,9 @@ jobs: return await substitutePlaceholders({ file: process.env.GH_AW_PROMPT, substitutions: { + GH_AW_ALLOWED_EXTENSIONS: process.env.GH_AW_ALLOWED_EXTENSIONS, + GH_AW_CACHE_DESCRIPTION: process.env.GH_AW_CACHE_DESCRIPTION, + GH_AW_CACHE_DIR: process.env.GH_AW_CACHE_DIR, GH_AW_GITHUB_ACTOR: process.env.GH_AW_GITHUB_ACTOR, GH_AW_GITHUB_EVENT_COMMENT_ID: process.env.GH_AW_GITHUB_EVENT_COMMENT_ID, GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER: process.env.GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER, @@ -276,6 +283,16 @@ jobs: persist-credentials: false - name: Create gh-aw temp directory run: bash /opt/gh-aw/actions/create_gh_aw_tmp_dir.sh + # Cache memory file share configuration from frontmatter processed below + - name: Create cache-memory directory + run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh + - name: Restore cache-memory file share data + uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + with: + key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} + path: /tmp/gh-aw/cache-memory + restore-keys: | + memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}- - name: Configure Git credentials env: REPO_NAME: ${{ github.repository }} @@ -754,7 +771,7 @@ jobs: run: | set -o pipefail sudo -E awf --env-all --container-workdir "${GITHUB_WORKSPACE}" --allow-domains api.business.githubcopilot.com,api.enterprise.githubcopilot.com,api.github.com,api.githubcopilot.com,api.individual.githubcopilot.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,github.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,ppa.launchpad.net,raw.githubusercontent.com,registry.npmjs.org,s.symcb.com,s.symcd.com,security.ubuntu.com,telemetry.enterprise.githubcopilot.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --enable-host-access --image-tag 0.20.0 --skip-pull --enable-api-proxy \ - -- /bin/bash -c '/usr/local/bin/copilot --add-dir /tmp/gh-aw/ --log-level all --log-dir /tmp/gh-aw/sandbox/agent/logs/ --add-dir "${GITHUB_WORKSPACE}" --disable-builtin-mcps --agent security-reviewer --allow-all-tools --allow-all-paths --share /tmp/gh-aw/sandbox/agent/logs/conversation.md --prompt "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"${GH_AW_MODEL_AGENT_COPILOT:+ --model "$GH_AW_MODEL_AGENT_COPILOT"}' 2>&1 | tee -a /tmp/gh-aw/agent-stdio.log + -- /bin/bash -c '/usr/local/bin/copilot --add-dir /tmp/gh-aw/ --log-level all --log-dir /tmp/gh-aw/sandbox/agent/logs/ --add-dir "${GITHUB_WORKSPACE}" --disable-builtin-mcps --agent security-reviewer --allow-all-tools --add-dir /tmp/gh-aw/cache-memory/ --allow-all-paths --share /tmp/gh-aw/sandbox/agent/logs/conversation.md --prompt "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"${GH_AW_MODEL_AGENT_COPILOT:+ --model "$GH_AW_MODEL_AGENT_COPILOT"}' 2>&1 | tee -a /tmp/gh-aw/agent-stdio.log env: COPILOT_AGENT_RUNNER_TYPE: STANDALONE COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }} @@ -891,6 +908,12 @@ jobs: else echo 'AWF binary not installed, skipping firewall log summary' fi + - name: Upload cache-memory data as artifact + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + if: always() + with: + name: cache-memory + path: /tmp/gh-aw/cache-memory - name: Upload agent artifacts if: always() continue-on-error: true @@ -912,6 +935,7 @@ jobs: - agent - detection - safe_outputs + - update_cache_memory if: (always()) && (needs.agent.result != 'skipped') runs-on: ubuntu-slim permissions: @@ -1171,3 +1195,27 @@ jobs: setupGlobals(core, github, context, exec, io); const { main } = require('/opt/gh-aw/actions/safe_output_handler_manager.cjs'); await main(); + + update_cache_memory: + needs: + - agent + - detection + if: always() && needs.detection.outputs.success == 'true' + runs-on: ubuntu-latest + permissions: {} + steps: + - name: Setup Scripts + uses: github/gh-aw/actions/setup@874bdd8271bf8c21902b068fb1ca6a22d2dc4b7a # v0.46.1 + with: + destination: /opt/gh-aw/actions + - name: Download cache-memory artifact (default) + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + continue-on-error: true + with: + name: cache-memory + path: /tmp/gh-aw/cache-memory + - name: Save cache-memory to cache (default) + uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + with: + key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} + path: /tmp/gh-aw/cache-memory diff --git a/.github/workflows/security-review.md b/.github/workflows/security-review.md index b92635d..9e6cbc4 100644 --- a/.github/workflows/security-review.md +++ b/.github/workflows/security-review.md @@ -16,6 +16,7 @@ engine: agent: security-reviewer tools: + cache-memory: true github: toolsets: [repos, pull_requests] @@ -29,7 +30,7 @@ safe-outputs: add-reviewer: reviewers: [copilot] max: 3 - target: "*" + target: "triggering" --- # Security Review @@ -40,22 +41,31 @@ agent instructions. ## Instructions -1. **Fetch the pull request diff.** Read the pull request details and all +1. **Access memory first.** Use cache memory at + `/tmp/gh-aw/cache-memory/` to: + - Check prior review context for this PR at + `/tmp/gh-aw/cache-memory/security-review-pr-${{ github.event.pull_request.number }}.json` + - Identify recurring security patterns in this repository from + `/tmp/gh-aw/cache-memory/security-review-patterns.json` + - Avoid repeating the same inline comments from previous reviews unless the + issue remains unresolved in newly changed lines + +2. **Fetch the pull request diff.** Read the pull request details and all changed files for PR #${{ github.event.pull_request.number }}. -2. **Review every changed file** against all 15 security posture categories +3. **Review every changed file** against all 15 security posture categories from the imported agent instructions. Focus only on the lines that were added or modified in the diff — do not flag pre-existing code that was not touched. -3. **Post inline review comments** on specific code lines where you find +4. **Post inline review comments** on specific code lines where you find security issues. Each comment must include: - The security category (e.g., "Input Validation", "Secrets") - Severity: critical, high, medium, low, or informational - A clear description of the issue and why it matters - A concrete, actionable recommendation or code fix -4. **Submit the review.** After posting all inline comments: +5. **Submit the review.** After posting all inline comments: - If you found any **critical** or **high** severity issues, submit the review with `REQUEST_CHANGES` and a summary body listing the top findings. - If you found only **medium** or **low** issues, submit with `COMMENT` and @@ -63,9 +73,16 @@ agent instructions. - If no issues were found, submit with `COMMENT` and a body stating the changes look secure. -5. **Request Copilot review.** After submitting the security review, add - `copilot` as a reviewer on the pull request for an additional code quality - review. +6. **Update memory.** After submitting the review: + - Write/update PR-specific memory at + `/tmp/gh-aw/cache-memory/security-review-pr-${{ github.event.pull_request.number }}.json` + including review timestamp, findings summary, categories found, and files + reviewed + - Update shared pattern memory at + `/tmp/gh-aw/cache-memory/security-review-patterns.json` with recurring + issue themes and counts + +7. **Request Copilot review.** After submitting the security review, add `copilot` as a reviewer on the pull request for an additional code quality review. ## Review Guidelines @@ -78,3 +95,7 @@ agent instructions. principles documented in `CODING_STANDARDS.md`. - **Do not produce false positives.** If you are unsure whether something is a real issue, state your uncertainty and classify it as informational. +- **Use memory intentionally.** + - Track patterns: notice if the same issue types keep recurring + - Avoid repetition: do not post duplicate comments in the same PR + - Build context: use previous review outcomes to improve prioritization From e827dad25091061a9671baca4636be23fe41c003 Mon Sep 17 00:00:00 2001 From: pmalarme Date: Thu, 19 Feb 2026 11:05:33 +0100 Subject: [PATCH 11/90] feat: update GitHub Actions setup to version 0.46.3 and enhance security review documentation --- .github/aw/actions-lock.json | 11 ++----- .github/workflows/security-review.lock.yml | 38 +++++++++++----------- .github/workflows/security-review.md | 4 ++- 3 files changed, 25 insertions(+), 28 deletions(-) diff --git a/.github/aw/actions-lock.json b/.github/aw/actions-lock.json index f494286..36a650c 100644 --- a/.github/aw/actions-lock.json +++ b/.github/aw/actions-lock.json @@ -5,15 +5,10 @@ "version": "v8", "sha": "ed597411d8f924073f98dfc5c65a23a2325f34cd" }, - "github/gh-aw/actions/setup@v0.46.0": { + "github/gh-aw/actions/setup@v0.46.3": { "repo": "github/gh-aw/actions/setup", - "version": "v0.46.0", - "sha": "f88ec26c65cc20ebb8ceabe809c9153385945bfe" - }, - "github/gh-aw/actions/setup@v0.46.1": { - "repo": "github/gh-aw/actions/setup", - "version": "v0.46.1", - "sha": "874bdd8271bf8c21902b068fb1ca6a22d2dc4b7a" + "version": "v0.46.3", + "sha": "a70c5eada06553e3510ac27f2c3bda9d3705bccb" } } } diff --git a/.github/workflows/security-review.lock.yml b/.github/workflows/security-review.lock.yml index fa47aee..dfe58b7 100644 --- a/.github/workflows/security-review.lock.yml +++ b/.github/workflows/security-review.lock.yml @@ -13,7 +13,7 @@ # \ /\ / (_) | | | | ( | | | | (_) \ V V /\__ \ # \/ \/ \___/|_| |_|\_\|_| |_|\___/ \_/\_/ |___/ # -# This file was automatically generated by gh-aw (v0.46.1). DO NOT EDIT. +# This file was automatically generated by gh-aw (v0.46.3). DO NOT EDIT. # # To update this file, edit the corresponding .md file and run: # gh aw compile @@ -23,7 +23,7 @@ # # Automated security review for pull requests. Analyzes changed files against 15 security posture categories and posts inline review comments on findings, then requests Copilot code review. # -# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"239455fd5f26b994eeb00a76f9efccd51af91eaf8fba8d95bb0e7c64bc4cb2c7","compiler_version":"v0.46.1"} +# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"5352cbe0b366e019785bec291855754aa7279dbc1b9a74e2326a64f3c28fcae3","compiler_version":"v0.46.3"} name: "Security Review" "on": @@ -56,7 +56,7 @@ jobs: title: ${{ steps.sanitized.outputs.title }} steps: - name: Setup Scripts - uses: github/gh-aw/actions/setup@874bdd8271bf8c21902b068fb1ca6a22d2dc4b7a # v0.46.1 + uses: github/gh-aw/actions/setup@a70c5eada06553e3510ac27f2c3bda9d3705bccb # v0.46.3 with: destination: /opt/gh-aw/actions - name: Validate context variables @@ -274,7 +274,7 @@ jobs: secret_verification_result: ${{ steps.validate-secret.outputs.verification_result }} steps: - name: Setup Scripts - uses: github/gh-aw/actions/setup@874bdd8271bf8c21902b068fb1ca6a22d2dc4b7a # v0.46.1 + uses: github/gh-aw/actions/setup@a70c5eada06553e3510ac27f2c3bda9d3705bccb # v0.46.3 with: destination: /opt/gh-aw/actions - name: Checkout repository @@ -289,10 +289,10 @@ jobs: - name: Restore cache-memory file share data uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 with: - key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} + key: memory-${{ github.workflow }}-${{ github.event.pull_request.number }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory restore-keys: | - memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}- + memory-${{ github.workflow }}-${{ github.event.pull_request.number }}- - name: Configure Git credentials env: REPO_NAME: ${{ github.repository }} @@ -328,10 +328,10 @@ jobs: const awInfo = { engine_id: "copilot", engine_name: "GitHub Copilot CLI", - model: process.env.GH_AW_MODEL_AGENT_COPILOT || "", + model: "GPT-5.3-Codex (copilot)", version: "", agent_version: "0.0.411", - cli_version: "v0.46.1", + cli_version: "v0.46.3", workflow_name: "Security Review", experimental: false, supports_tools_allowlist: true, @@ -771,12 +771,11 @@ jobs: run: | set -o pipefail sudo -E awf --env-all --container-workdir "${GITHUB_WORKSPACE}" --allow-domains api.business.githubcopilot.com,api.enterprise.githubcopilot.com,api.github.com,api.githubcopilot.com,api.individual.githubcopilot.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,github.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,ppa.launchpad.net,raw.githubusercontent.com,registry.npmjs.org,s.symcb.com,s.symcd.com,security.ubuntu.com,telemetry.enterprise.githubcopilot.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --enable-host-access --image-tag 0.20.0 --skip-pull --enable-api-proxy \ - -- /bin/bash -c '/usr/local/bin/copilot --add-dir /tmp/gh-aw/ --log-level all --log-dir /tmp/gh-aw/sandbox/agent/logs/ --add-dir "${GITHUB_WORKSPACE}" --disable-builtin-mcps --agent security-reviewer --allow-all-tools --add-dir /tmp/gh-aw/cache-memory/ --allow-all-paths --share /tmp/gh-aw/sandbox/agent/logs/conversation.md --prompt "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"${GH_AW_MODEL_AGENT_COPILOT:+ --model "$GH_AW_MODEL_AGENT_COPILOT"}' 2>&1 | tee -a /tmp/gh-aw/agent-stdio.log + -- /bin/bash -c '/usr/local/bin/copilot --add-dir /tmp/gh-aw/ --log-level all --log-dir /tmp/gh-aw/sandbox/agent/logs/ --add-dir "${GITHUB_WORKSPACE}" --disable-builtin-mcps --model '\''GPT-5.3-Codex (copilot)'\'' --agent security-reviewer --allow-all-tools --add-dir /tmp/gh-aw/cache-memory/ --allow-all-paths --share /tmp/gh-aw/sandbox/agent/logs/conversation.md --prompt "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"' 2>&1 | tee -a /tmp/gh-aw/agent-stdio.log env: COPILOT_AGENT_RUNNER_TYPE: STANDALONE COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }} GH_AW_MCP_CONFIG: /home/runner/.copilot/mcp-config.json - GH_AW_MODEL_AGENT_COPILOT: ${{ vars.GH_AW_MODEL_AGENT_COPILOT || '' }} GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }} GITHUB_HEAD_REF: ${{ github.head_ref }} @@ -947,7 +946,7 @@ jobs: total_count: ${{ steps.missing_tool.outputs.total_count }} steps: - name: Setup Scripts - uses: github/gh-aw/actions/setup@874bdd8271bf8c21902b068fb1ca6a22d2dc4b7a # v0.46.1 + uses: github/gh-aw/actions/setup@a70c5eada06553e3510ac27f2c3bda9d3705bccb # v0.46.3 with: destination: /opt/gh-aw/actions - name: Download agent output artifact @@ -999,6 +998,7 @@ jobs: GH_AW_WORKFLOW_ID: "security-review" GH_AW_SECRET_VERIFICATION_RESULT: ${{ needs.agent.outputs.secret_verification_result }} GH_AW_CHECKOUT_PR_SUCCESS: ${{ needs.agent.outputs.checkout_pr_success }} + GH_AW_GROUP_REPORTS: "false" with: github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} script: | @@ -1034,7 +1034,7 @@ jobs: success: ${{ steps.parse_results.outputs.success }} steps: - name: Setup Scripts - uses: github/gh-aw/actions/setup@874bdd8271bf8c21902b068fb1ca6a22d2dc4b7a # v0.46.1 + uses: github/gh-aw/actions/setup@a70c5eada06553e3510ac27f2c3bda9d3705bccb # v0.46.3 with: destination: /opt/gh-aw/actions - name: Download agent artifacts @@ -1095,11 +1095,10 @@ jobs: mkdir -p /tmp/gh-aw/ mkdir -p /tmp/gh-aw/agent/ mkdir -p /tmp/gh-aw/sandbox/agent/logs/ - copilot --add-dir /tmp/ --add-dir /tmp/gh-aw/ --add-dir /tmp/gh-aw/agent/ --log-level all --log-dir /tmp/gh-aw/sandbox/agent/logs/ --disable-builtin-mcps --allow-tool 'shell(cat)' --allow-tool 'shell(grep)' --allow-tool 'shell(head)' --allow-tool 'shell(jq)' --allow-tool 'shell(ls)' --allow-tool 'shell(tail)' --allow-tool 'shell(wc)' --share /tmp/gh-aw/sandbox/agent/logs/conversation.md --prompt "$COPILOT_CLI_INSTRUCTION"${GH_AW_MODEL_DETECTION_COPILOT:+ --model "$GH_AW_MODEL_DETECTION_COPILOT"} 2>&1 | tee /tmp/gh-aw/threat-detection/detection.log + copilot --add-dir /tmp/ --add-dir /tmp/gh-aw/ --add-dir /tmp/gh-aw/agent/ --log-level all --log-dir /tmp/gh-aw/sandbox/agent/logs/ --disable-builtin-mcps --model 'GPT-5.3-Codex (copilot)' --agent security-reviewer --allow-tool 'shell(cat)' --allow-tool 'shell(grep)' --allow-tool 'shell(head)' --allow-tool 'shell(jq)' --allow-tool 'shell(ls)' --allow-tool 'shell(tail)' --allow-tool 'shell(wc)' --share /tmp/gh-aw/sandbox/agent/logs/conversation.md --prompt "$COPILOT_CLI_INSTRUCTION" 2>&1 | tee /tmp/gh-aw/threat-detection/detection.log env: COPILOT_AGENT_RUNNER_TYPE: STANDALONE COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }} - GH_AW_MODEL_DETECTION_COPILOT: ${{ vars.GH_AW_MODEL_DETECTION_COPILOT || '' }} GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt GITHUB_HEAD_REF: ${{ github.head_ref }} GITHUB_REF_NAME: ${{ github.ref_name }} @@ -1130,7 +1129,7 @@ jobs: activated: ${{ steps.check_membership.outputs.is_team_member == 'true' }} steps: - name: Setup Scripts - uses: github/gh-aw/actions/setup@874bdd8271bf8c21902b068fb1ca6a22d2dc4b7a # v0.46.1 + uses: github/gh-aw/actions/setup@a70c5eada06553e3510ac27f2c3bda9d3705bccb # v0.46.3 with: destination: /opt/gh-aw/actions - name: Check team membership for workflow @@ -1158,6 +1157,7 @@ jobs: timeout-minutes: 15 env: GH_AW_ENGINE_ID: "copilot" + GH_AW_ENGINE_MODEL: "GPT-5.3-Codex (copilot)" GH_AW_WORKFLOW_ID: "security-review" GH_AW_WORKFLOW_NAME: "Security Review" outputs: @@ -1168,7 +1168,7 @@ jobs: process_safe_outputs_temporary_id_map: ${{ steps.process_safe_outputs.outputs.temporary_id_map }} steps: - name: Setup Scripts - uses: github/gh-aw/actions/setup@874bdd8271bf8c21902b068fb1ca6a22d2dc4b7a # v0.46.1 + uses: github/gh-aw/actions/setup@a70c5eada06553e3510ac27f2c3bda9d3705bccb # v0.46.3 with: destination: /opt/gh-aw/actions - name: Download agent output artifact @@ -1187,7 +1187,7 @@ jobs: uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 env: GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }} - GH_AW_SAFE_OUTPUTS_HANDLER_CONFIG: "{\"create_pull_request_review_comment\":{\"max\":20,\"side\":\"RIGHT\",\"target\":\"triggering\"},\"missing_data\":{},\"missing_tool\":{},\"submit_pull_request_review\":{\"footer\":\"none\",\"max\":1}}" + GH_AW_SAFE_OUTPUTS_HANDLER_CONFIG: "{\"add_reviewer\":{\"allowed\":[\"copilot\"],\"max\":3,\"target\":\"triggering\"},\"create_pull_request_review_comment\":{\"max\":20,\"side\":\"RIGHT\",\"target\":\"triggering\"},\"missing_data\":{},\"missing_tool\":{},\"submit_pull_request_review\":{\"footer\":\"none\",\"max\":1}}" with: github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} script: | @@ -1205,7 +1205,7 @@ jobs: permissions: {} steps: - name: Setup Scripts - uses: github/gh-aw/actions/setup@874bdd8271bf8c21902b068fb1ca6a22d2dc4b7a # v0.46.1 + uses: github/gh-aw/actions/setup@a70c5eada06553e3510ac27f2c3bda9d3705bccb # v0.46.3 with: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) @@ -1217,5 +1217,5 @@ jobs: - name: Save cache-memory to cache (default) uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 with: - key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} + key: memory-${{ github.workflow }}-${{ github.event.pull_request.number }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory diff --git a/.github/workflows/security-review.md b/.github/workflows/security-review.md index 9e6cbc4..f063774 100644 --- a/.github/workflows/security-review.md +++ b/.github/workflows/security-review.md @@ -14,9 +14,11 @@ permissions: engine: id: copilot agent: security-reviewer + model: GPT-5.3-Codex (copilot) tools: - cache-memory: true + cache-memory: + key: "memory-${{ github.workflow }}-${{ github.event.pull_request.number }}" github: toolsets: [repos, pull_requests] From 3242629c41e5b18285af7681833fa936fc0724f6 Mon Sep 17 00:00:00 2001 From: pmalarme Date: Thu, 19 Feb 2026 11:11:03 +0100 Subject: [PATCH 12/90] feat: update documentation for security review and standardize Copilot model naming --- .github/workflows/security-review.lock.yml | 10 +++++----- .github/workflows/security-review.md | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/security-review.lock.yml b/.github/workflows/security-review.lock.yml index dfe58b7..c7076ec 100644 --- a/.github/workflows/security-review.lock.yml +++ b/.github/workflows/security-review.lock.yml @@ -23,7 +23,7 @@ # # Automated security review for pull requests. Analyzes changed files against 15 security posture categories and posts inline review comments on findings, then requests Copilot code review. # -# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"5352cbe0b366e019785bec291855754aa7279dbc1b9a74e2326a64f3c28fcae3","compiler_version":"v0.46.3"} +# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"473b9bb25f0ebe17039457e2621928ffe0a75eaad7fa54c8d5232e586afa686c","compiler_version":"v0.46.3"} name: "Security Review" "on": @@ -328,7 +328,7 @@ jobs: const awInfo = { engine_id: "copilot", engine_name: "GitHub Copilot CLI", - model: "GPT-5.3-Codex (copilot)", + model: "gpt-5.3-codex", version: "", agent_version: "0.0.411", cli_version: "v0.46.3", @@ -771,7 +771,7 @@ jobs: run: | set -o pipefail sudo -E awf --env-all --container-workdir "${GITHUB_WORKSPACE}" --allow-domains api.business.githubcopilot.com,api.enterprise.githubcopilot.com,api.github.com,api.githubcopilot.com,api.individual.githubcopilot.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,github.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,ppa.launchpad.net,raw.githubusercontent.com,registry.npmjs.org,s.symcb.com,s.symcd.com,security.ubuntu.com,telemetry.enterprise.githubcopilot.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --enable-host-access --image-tag 0.20.0 --skip-pull --enable-api-proxy \ - -- /bin/bash -c '/usr/local/bin/copilot --add-dir /tmp/gh-aw/ --log-level all --log-dir /tmp/gh-aw/sandbox/agent/logs/ --add-dir "${GITHUB_WORKSPACE}" --disable-builtin-mcps --model '\''GPT-5.3-Codex (copilot)'\'' --agent security-reviewer --allow-all-tools --add-dir /tmp/gh-aw/cache-memory/ --allow-all-paths --share /tmp/gh-aw/sandbox/agent/logs/conversation.md --prompt "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"' 2>&1 | tee -a /tmp/gh-aw/agent-stdio.log + -- /bin/bash -c '/usr/local/bin/copilot --add-dir /tmp/gh-aw/ --log-level all --log-dir /tmp/gh-aw/sandbox/agent/logs/ --add-dir "${GITHUB_WORKSPACE}" --disable-builtin-mcps --model gpt-5.3-codex --agent security-reviewer --allow-all-tools --add-dir /tmp/gh-aw/cache-memory/ --allow-all-paths --share /tmp/gh-aw/sandbox/agent/logs/conversation.md --prompt "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"' 2>&1 | tee -a /tmp/gh-aw/agent-stdio.log env: COPILOT_AGENT_RUNNER_TYPE: STANDALONE COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }} @@ -1095,7 +1095,7 @@ jobs: mkdir -p /tmp/gh-aw/ mkdir -p /tmp/gh-aw/agent/ mkdir -p /tmp/gh-aw/sandbox/agent/logs/ - copilot --add-dir /tmp/ --add-dir /tmp/gh-aw/ --add-dir /tmp/gh-aw/agent/ --log-level all --log-dir /tmp/gh-aw/sandbox/agent/logs/ --disable-builtin-mcps --model 'GPT-5.3-Codex (copilot)' --agent security-reviewer --allow-tool 'shell(cat)' --allow-tool 'shell(grep)' --allow-tool 'shell(head)' --allow-tool 'shell(jq)' --allow-tool 'shell(ls)' --allow-tool 'shell(tail)' --allow-tool 'shell(wc)' --share /tmp/gh-aw/sandbox/agent/logs/conversation.md --prompt "$COPILOT_CLI_INSTRUCTION" 2>&1 | tee /tmp/gh-aw/threat-detection/detection.log + copilot --add-dir /tmp/ --add-dir /tmp/gh-aw/ --add-dir /tmp/gh-aw/agent/ --log-level all --log-dir /tmp/gh-aw/sandbox/agent/logs/ --disable-builtin-mcps --model gpt-5.3-codex --agent security-reviewer --allow-tool 'shell(cat)' --allow-tool 'shell(grep)' --allow-tool 'shell(head)' --allow-tool 'shell(jq)' --allow-tool 'shell(ls)' --allow-tool 'shell(tail)' --allow-tool 'shell(wc)' --share /tmp/gh-aw/sandbox/agent/logs/conversation.md --prompt "$COPILOT_CLI_INSTRUCTION" 2>&1 | tee /tmp/gh-aw/threat-detection/detection.log env: COPILOT_AGENT_RUNNER_TYPE: STANDALONE COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }} @@ -1157,7 +1157,7 @@ jobs: timeout-minutes: 15 env: GH_AW_ENGINE_ID: "copilot" - GH_AW_ENGINE_MODEL: "GPT-5.3-Codex (copilot)" + GH_AW_ENGINE_MODEL: "gpt-5.3-codex" GH_AW_WORKFLOW_ID: "security-review" GH_AW_WORKFLOW_NAME: "Security Review" outputs: diff --git a/.github/workflows/security-review.md b/.github/workflows/security-review.md index f063774..f62e8d3 100644 --- a/.github/workflows/security-review.md +++ b/.github/workflows/security-review.md @@ -14,7 +14,7 @@ permissions: engine: id: copilot agent: security-reviewer - model: GPT-5.3-Codex (copilot) + model: gpt-5.3-codex tools: cache-memory: From 9d223dd09a5d4996e490a17d03d5213829e04749 Mon Sep 17 00:00:00 2001 From: pmalarme Date: Thu, 19 Feb 2026 11:22:54 +0100 Subject: [PATCH 13/90] feat: update documentation and remove deprecated model reference for Copilot instructions --- .github/workflows/security-review.lock.yml | 11 ++++++----- .github/workflows/security-review.md | 1 - 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/security-review.lock.yml b/.github/workflows/security-review.lock.yml index c7076ec..9ed5b8b 100644 --- a/.github/workflows/security-review.lock.yml +++ b/.github/workflows/security-review.lock.yml @@ -23,7 +23,7 @@ # # Automated security review for pull requests. Analyzes changed files against 15 security posture categories and posts inline review comments on findings, then requests Copilot code review. # -# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"473b9bb25f0ebe17039457e2621928ffe0a75eaad7fa54c8d5232e586afa686c","compiler_version":"v0.46.3"} +# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"fcd90e0f8df0ddee513d3c30f7e8314ff9a1c789107abd47cd018a8a5e354e1b","compiler_version":"v0.46.3"} name: "Security Review" "on": @@ -328,7 +328,7 @@ jobs: const awInfo = { engine_id: "copilot", engine_name: "GitHub Copilot CLI", - model: "gpt-5.3-codex", + model: process.env.GH_AW_MODEL_AGENT_COPILOT || "", version: "", agent_version: "0.0.411", cli_version: "v0.46.3", @@ -771,11 +771,12 @@ jobs: run: | set -o pipefail sudo -E awf --env-all --container-workdir "${GITHUB_WORKSPACE}" --allow-domains api.business.githubcopilot.com,api.enterprise.githubcopilot.com,api.github.com,api.githubcopilot.com,api.individual.githubcopilot.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,github.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,ppa.launchpad.net,raw.githubusercontent.com,registry.npmjs.org,s.symcb.com,s.symcd.com,security.ubuntu.com,telemetry.enterprise.githubcopilot.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --enable-host-access --image-tag 0.20.0 --skip-pull --enable-api-proxy \ - -- /bin/bash -c '/usr/local/bin/copilot --add-dir /tmp/gh-aw/ --log-level all --log-dir /tmp/gh-aw/sandbox/agent/logs/ --add-dir "${GITHUB_WORKSPACE}" --disable-builtin-mcps --model gpt-5.3-codex --agent security-reviewer --allow-all-tools --add-dir /tmp/gh-aw/cache-memory/ --allow-all-paths --share /tmp/gh-aw/sandbox/agent/logs/conversation.md --prompt "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"' 2>&1 | tee -a /tmp/gh-aw/agent-stdio.log + -- /bin/bash -c '/usr/local/bin/copilot --add-dir /tmp/gh-aw/ --log-level all --log-dir /tmp/gh-aw/sandbox/agent/logs/ --add-dir "${GITHUB_WORKSPACE}" --disable-builtin-mcps --agent security-reviewer --allow-all-tools --add-dir /tmp/gh-aw/cache-memory/ --allow-all-paths --share /tmp/gh-aw/sandbox/agent/logs/conversation.md --prompt "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"${GH_AW_MODEL_AGENT_COPILOT:+ --model "$GH_AW_MODEL_AGENT_COPILOT"}' 2>&1 | tee -a /tmp/gh-aw/agent-stdio.log env: COPILOT_AGENT_RUNNER_TYPE: STANDALONE COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }} GH_AW_MCP_CONFIG: /home/runner/.copilot/mcp-config.json + GH_AW_MODEL_AGENT_COPILOT: ${{ vars.GH_AW_MODEL_AGENT_COPILOT || '' }} GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }} GITHUB_HEAD_REF: ${{ github.head_ref }} @@ -1095,10 +1096,11 @@ jobs: mkdir -p /tmp/gh-aw/ mkdir -p /tmp/gh-aw/agent/ mkdir -p /tmp/gh-aw/sandbox/agent/logs/ - copilot --add-dir /tmp/ --add-dir /tmp/gh-aw/ --add-dir /tmp/gh-aw/agent/ --log-level all --log-dir /tmp/gh-aw/sandbox/agent/logs/ --disable-builtin-mcps --model gpt-5.3-codex --agent security-reviewer --allow-tool 'shell(cat)' --allow-tool 'shell(grep)' --allow-tool 'shell(head)' --allow-tool 'shell(jq)' --allow-tool 'shell(ls)' --allow-tool 'shell(tail)' --allow-tool 'shell(wc)' --share /tmp/gh-aw/sandbox/agent/logs/conversation.md --prompt "$COPILOT_CLI_INSTRUCTION" 2>&1 | tee /tmp/gh-aw/threat-detection/detection.log + copilot --add-dir /tmp/ --add-dir /tmp/gh-aw/ --add-dir /tmp/gh-aw/agent/ --log-level all --log-dir /tmp/gh-aw/sandbox/agent/logs/ --disable-builtin-mcps --allow-tool 'shell(cat)' --allow-tool 'shell(grep)' --allow-tool 'shell(head)' --allow-tool 'shell(jq)' --allow-tool 'shell(ls)' --allow-tool 'shell(tail)' --allow-tool 'shell(wc)' --share /tmp/gh-aw/sandbox/agent/logs/conversation.md --prompt "$COPILOT_CLI_INSTRUCTION"${GH_AW_MODEL_DETECTION_COPILOT:+ --model "$GH_AW_MODEL_DETECTION_COPILOT"} 2>&1 | tee /tmp/gh-aw/threat-detection/detection.log env: COPILOT_AGENT_RUNNER_TYPE: STANDALONE COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }} + GH_AW_MODEL_DETECTION_COPILOT: ${{ vars.GH_AW_MODEL_DETECTION_COPILOT || '' }} GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt GITHUB_HEAD_REF: ${{ github.head_ref }} GITHUB_REF_NAME: ${{ github.ref_name }} @@ -1157,7 +1159,6 @@ jobs: timeout-minutes: 15 env: GH_AW_ENGINE_ID: "copilot" - GH_AW_ENGINE_MODEL: "gpt-5.3-codex" GH_AW_WORKFLOW_ID: "security-review" GH_AW_WORKFLOW_NAME: "Security Review" outputs: diff --git a/.github/workflows/security-review.md b/.github/workflows/security-review.md index f62e8d3..cf24e8c 100644 --- a/.github/workflows/security-review.md +++ b/.github/workflows/security-review.md @@ -14,7 +14,6 @@ permissions: engine: id: copilot agent: security-reviewer - model: gpt-5.3-codex tools: cache-memory: From 2f93ec8db0fb89546c68faba956793e4f9ece328 Mon Sep 17 00:00:00 2001 From: pmalarme Date: Thu, 19 Feb 2026 11:41:11 +0100 Subject: [PATCH 14/90] feat: update documentation and security review configurations for Copilot integration --- .github/workflows/security-review.lock.yml | 4 ++-- .github/workflows/security-review.md | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/security-review.lock.yml b/.github/workflows/security-review.lock.yml index 9ed5b8b..bc5a873 100644 --- a/.github/workflows/security-review.lock.yml +++ b/.github/workflows/security-review.lock.yml @@ -23,7 +23,7 @@ # # Automated security review for pull requests. Analyzes changed files against 15 security posture categories and posts inline review comments on findings, then requests Copilot code review. # -# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"fcd90e0f8df0ddee513d3c30f7e8314ff9a1c789107abd47cd018a8a5e354e1b","compiler_version":"v0.46.3"} +# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"29e30b139a9ea75a3673563b40c05fe5c7c0067bbeae803d3e6f80fdb4dfb632","compiler_version":"v0.46.3"} name: "Security Review" "on": @@ -1188,7 +1188,7 @@ jobs: uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 env: GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }} - GH_AW_SAFE_OUTPUTS_HANDLER_CONFIG: "{\"add_reviewer\":{\"allowed\":[\"copilot\"],\"max\":3,\"target\":\"triggering\"},\"create_pull_request_review_comment\":{\"max\":20,\"side\":\"RIGHT\",\"target\":\"triggering\"},\"missing_data\":{},\"missing_tool\":{},\"submit_pull_request_review\":{\"footer\":\"none\",\"max\":1}}" + GH_AW_SAFE_OUTPUTS_HANDLER_CONFIG: "{\"add_reviewer\":{\"allowed\":[\"copilot\"],\"max\":3,\"target\":\"*\"},\"create_pull_request_review_comment\":{\"max\":20,\"side\":\"RIGHT\",\"target\":\"triggering\"},\"missing_data\":{},\"missing_tool\":{},\"submit_pull_request_review\":{\"footer\":\"none\",\"max\":1}}" with: github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} script: | diff --git a/.github/workflows/security-review.md b/.github/workflows/security-review.md index cf24e8c..30d21e0 100644 --- a/.github/workflows/security-review.md +++ b/.github/workflows/security-review.md @@ -31,7 +31,7 @@ safe-outputs: add-reviewer: reviewers: [copilot] max: 3 - target: "triggering" + target: "*" --- # Security Review From 18986b0d9404f25d83ee96ae01981476bc514e8f Mon Sep 17 00:00:00 2001 From: pmalarme Date: Thu, 19 Feb 2026 11:49:00 +0100 Subject: [PATCH 15/90] feat: update security review documentation and add user-specific reviewer configuration for Copilot --- .github/workflows/security-review.lock.yml | 6 +++--- .github/workflows/security-review.md | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/security-review.lock.yml b/.github/workflows/security-review.lock.yml index bc5a873..6fa61ee 100644 --- a/.github/workflows/security-review.lock.yml +++ b/.github/workflows/security-review.lock.yml @@ -23,7 +23,7 @@ # # Automated security review for pull requests. Analyzes changed files against 15 security posture categories and posts inline review comments on findings, then requests Copilot code review. # -# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"29e30b139a9ea75a3673563b40c05fe5c7c0067bbeae803d3e6f80fdb4dfb632","compiler_version":"v0.46.3"} +# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"9669e244ad67e528b5a95c9a4861067e08835ef832d4c805c7192c070d7fff36","compiler_version":"v0.46.3"} name: "Security Review" "on": @@ -389,7 +389,7 @@ jobs: mkdir -p /tmp/gh-aw/safeoutputs mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs cat > /opt/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_EOF' - {"add_reviewer":{"max":3,"reviewers":["copilot"]},"create_pull_request_review_comment":{"max":20},"missing_data":{},"missing_tool":{},"noop":{"max":1},"submit_pull_request_review":{"max":1}} + {"add_reviewer":{"max":3,"reviewers":["copilot","pmalarme"]},"create_pull_request_review_comment":{"max":20},"missing_data":{},"missing_tool":{},"noop":{"max":1},"submit_pull_request_review":{"max":1}} GH_AW_SAFE_OUTPUTS_CONFIG_EOF cat > /opt/gh-aw/safeoutputs/tools.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_EOF' [ @@ -1188,7 +1188,7 @@ jobs: uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 env: GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }} - GH_AW_SAFE_OUTPUTS_HANDLER_CONFIG: "{\"add_reviewer\":{\"allowed\":[\"copilot\"],\"max\":3,\"target\":\"*\"},\"create_pull_request_review_comment\":{\"max\":20,\"side\":\"RIGHT\",\"target\":\"triggering\"},\"missing_data\":{},\"missing_tool\":{},\"submit_pull_request_review\":{\"footer\":\"none\",\"max\":1}}" + GH_AW_SAFE_OUTPUTS_HANDLER_CONFIG: "{\"add_reviewer\":{\"allowed\":[\"copilot\",\"pmalarme\"],\"max\":3,\"target\":\"triggering\"},\"create_pull_request_review_comment\":{\"max\":20,\"side\":\"RIGHT\",\"target\":\"triggering\"},\"missing_data\":{},\"missing_tool\":{},\"submit_pull_request_review\":{\"footer\":\"none\",\"max\":1}}" with: github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} script: | diff --git a/.github/workflows/security-review.md b/.github/workflows/security-review.md index 30d21e0..0027e4c 100644 --- a/.github/workflows/security-review.md +++ b/.github/workflows/security-review.md @@ -29,9 +29,9 @@ safe-outputs: max: 1 footer: false add-reviewer: - reviewers: [copilot] + reviewers: [copilot, pmalarme] max: 3 - target: "*" + target: "triggering" --- # Security Review From ea6a740f8c54357024e672ed978fe2f5e39f36f3 Mon Sep 17 00:00:00 2001 From: pmalarme Date: Thu, 19 Feb 2026 11:49:03 +0100 Subject: [PATCH 16/90] Update the reviewer for testing if they are assigned --- README.md | 19 ++++++++++++++++--- 1 file changed, 16 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 73aebd3..7de0787 100644 --- a/README.md +++ b/README.md @@ -62,7 +62,7 @@ flowchart TB subgraph L5["5. Copilot Review"] direction LR - CR1[Copilot code review — assigned automatically] + CR1[Copilot code review — assigned by security agent + branch protection] CR2[AI-powered suggestions and comments] end @@ -89,7 +89,7 @@ Each layer catches different classes of issues: | **Pre-commit** | On `git commit` (staged files) | Style drift, security anti-patterns, broken configs, stale lockfiles | | **CI quality gate** | On PR | Lock verification, full repo-wide type safety, code quality, test regressions, coverage, build validation. Split into three sub-layers: *code quality* (lock-verify, format, lint, type checks, Bandit, markdown lint), *tests* (PyTest + coverage), and *build validation* (wheel build + Docker build & smoke test, both path-filtered) | | **CI security** | On PR / push to main / schedule | Dataflow vulnerabilities, outdated dependencies, security posture gaps | -| **Copilot Review** | On PR (after security scan) | AI-powered code review with suggestions and inline comments | +| **Copilot Review** | On PR (via security agent + branch protection) | AI-powered code review with suggestions and inline comments | | **Release** | On push to main or manual | Agent release: builds changed agents, creates `-v` tags with wheel assets. Monorepo release: tags shared infra changes as `v` | --- @@ -533,6 +533,19 @@ The agentic workflow at [`.github/workflows/security-review.md`](.github/workflo 4. Submits a consolidated review (`REQUEST_CHANGES` for critical/high, `COMMENT` otherwise). 5. Requests Copilot as a reviewer for additional code quality coverage. +### Copilot code review + +The `add-reviewer` safe-output in the workflow assigns Copilot as a PR reviewer after the security review completes. This requires a fine-grained PAT stored as the `COPILOT_GITHUB_TOKEN` repository secret with: + +- **Pull requests: Read and write** +- **Copilot Requests: Read-only** + +As an additional safeguard, configure **branch protection rules** to require Copilot review on all PRs (not just those that trigger the security workflow): + +1. Go to **Settings → Rules → Rulesets** (or **Branches → Branch protection rules**). +2. Under **Require a pull request before merging**, add `copilot` as a **required reviewer**. +3. Copilot will automatically review every PR targeting the protected branch. + ### Compiling agentic workflows Agentic workflow `.md` files must be compiled into GitHub Actions `.lock.yml` files before they can run: @@ -591,7 +604,7 @@ The docs workflow triggers on pushes to `main` when documentation sources, agent | **Dependabot** | Weekly updates for pip/uv dependencies and GitHub Actions | Shrinks vulnerability exposure windows | | **CodeQL** | SAST/code scanning for Python and GitHub Actions | Finds dataflow and security issues beyond linters | | **Copilot security agent** | AI-powered reviews against 15 security posture categories | Catches issues that static analysis misses | -| **Branch protection** | Required checks, signed commits, auto-merge for trusted bots | Prevents unverified code from reaching main | +| **Branch protection** | Required checks, signed commits, Copilot reviewer, auto-merge for trusted bots | Prevents unverified code from reaching main | | **Pre-commit hooks** | Staged-file checks before every commit | Catches issues at the earliest possible point | | **Dual type checkers** | Pyright + MyPy with different inference engines | Maximal type safety for AI-generated code | From 83bb9d4b63132910f9fb0f74eefe14164936fa82 Mon Sep 17 00:00:00 2001 From: pmalarme Date: Thu, 19 Feb 2026 11:59:45 +0100 Subject: [PATCH 17/90] feat: update security review documentation and remove user-specific reviewer configuration for Copilot --- .github/workflows/security-review.lock.yml | 6 +++--- .github/workflows/security-review.md | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/security-review.lock.yml b/.github/workflows/security-review.lock.yml index 6fa61ee..9ed5b8b 100644 --- a/.github/workflows/security-review.lock.yml +++ b/.github/workflows/security-review.lock.yml @@ -23,7 +23,7 @@ # # Automated security review for pull requests. Analyzes changed files against 15 security posture categories and posts inline review comments on findings, then requests Copilot code review. # -# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"9669e244ad67e528b5a95c9a4861067e08835ef832d4c805c7192c070d7fff36","compiler_version":"v0.46.3"} +# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"fcd90e0f8df0ddee513d3c30f7e8314ff9a1c789107abd47cd018a8a5e354e1b","compiler_version":"v0.46.3"} name: "Security Review" "on": @@ -389,7 +389,7 @@ jobs: mkdir -p /tmp/gh-aw/safeoutputs mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs cat > /opt/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_EOF' - {"add_reviewer":{"max":3,"reviewers":["copilot","pmalarme"]},"create_pull_request_review_comment":{"max":20},"missing_data":{},"missing_tool":{},"noop":{"max":1},"submit_pull_request_review":{"max":1}} + {"add_reviewer":{"max":3,"reviewers":["copilot"]},"create_pull_request_review_comment":{"max":20},"missing_data":{},"missing_tool":{},"noop":{"max":1},"submit_pull_request_review":{"max":1}} GH_AW_SAFE_OUTPUTS_CONFIG_EOF cat > /opt/gh-aw/safeoutputs/tools.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_EOF' [ @@ -1188,7 +1188,7 @@ jobs: uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 env: GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }} - GH_AW_SAFE_OUTPUTS_HANDLER_CONFIG: "{\"add_reviewer\":{\"allowed\":[\"copilot\",\"pmalarme\"],\"max\":3,\"target\":\"triggering\"},\"create_pull_request_review_comment\":{\"max\":20,\"side\":\"RIGHT\",\"target\":\"triggering\"},\"missing_data\":{},\"missing_tool\":{},\"submit_pull_request_review\":{\"footer\":\"none\",\"max\":1}}" + GH_AW_SAFE_OUTPUTS_HANDLER_CONFIG: "{\"add_reviewer\":{\"allowed\":[\"copilot\"],\"max\":3,\"target\":\"triggering\"},\"create_pull_request_review_comment\":{\"max\":20,\"side\":\"RIGHT\",\"target\":\"triggering\"},\"missing_data\":{},\"missing_tool\":{},\"submit_pull_request_review\":{\"footer\":\"none\",\"max\":1}}" with: github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} script: | diff --git a/.github/workflows/security-review.md b/.github/workflows/security-review.md index 0027e4c..cf24e8c 100644 --- a/.github/workflows/security-review.md +++ b/.github/workflows/security-review.md @@ -29,7 +29,7 @@ safe-outputs: max: 1 footer: false add-reviewer: - reviewers: [copilot, pmalarme] + reviewers: [copilot] max: 3 target: "triggering" --- From fd7f4e5c39ff6f46e913cc482a584b75b6a8a539 Mon Sep 17 00:00:00 2001 From: pmalarme Date: Thu, 19 Feb 2026 12:45:37 +0100 Subject: [PATCH 18/90] feat: enhance documentation and update Copilot instructions for security review workflow --- .github/workflows/security-review.lock.yml | 8 ++++---- .github/workflows/security-review.md | 6 +++--- README.md | 22 +++++++++++++++------- 3 files changed, 22 insertions(+), 14 deletions(-) diff --git a/.github/workflows/security-review.lock.yml b/.github/workflows/security-review.lock.yml index 9ed5b8b..0bd42da 100644 --- a/.github/workflows/security-review.lock.yml +++ b/.github/workflows/security-review.lock.yml @@ -23,7 +23,7 @@ # # Automated security review for pull requests. Analyzes changed files against 15 security posture categories and posts inline review comments on findings, then requests Copilot code review. # -# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"fcd90e0f8df0ddee513d3c30f7e8314ff9a1c789107abd47cd018a8a5e354e1b","compiler_version":"v0.46.3"} +# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"fcf4ec8e200afb9cf705aa7eca88dd83e01c9f2db564ba8a54ebb4d187c9f530","compiler_version":"v0.46.3"} name: "Security Review" "on": @@ -289,10 +289,10 @@ jobs: - name: Restore cache-memory file share data uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 with: - key: memory-${{ github.workflow }}-${{ github.event.pull_request.number }}-${{ github.run_id }} + key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory restore-keys: | - memory-${{ github.workflow }}-${{ github.event.pull_request.number }}- + memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}- - name: Configure Git credentials env: REPO_NAME: ${{ github.repository }} @@ -1218,5 +1218,5 @@ jobs: - name: Save cache-memory to cache (default) uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 with: - key: memory-${{ github.workflow }}-${{ github.event.pull_request.number }}-${{ github.run_id }} + key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory diff --git a/.github/workflows/security-review.md b/.github/workflows/security-review.md index cf24e8c..93da5eb 100644 --- a/.github/workflows/security-review.md +++ b/.github/workflows/security-review.md @@ -16,8 +16,7 @@ engine: agent: security-reviewer tools: - cache-memory: - key: "memory-${{ github.workflow }}-${{ github.event.pull_request.number }}" + cache-memory: true github: toolsets: [repos, pull_requests] @@ -32,6 +31,7 @@ safe-outputs: reviewers: [copilot] max: 3 target: "triggering" + github-token: ${{ secrets.GH_AW_AGENT_TOKEN }} --- # Security Review @@ -83,7 +83,7 @@ agent instructions. `/tmp/gh-aw/cache-memory/security-review-patterns.json` with recurring issue themes and counts -7. **Request Copilot review.** After submitting the security review, add `copilot` as a reviewer on the pull request for an additional code quality review. +7. **Request Copilot review.** After submitting the security review, add Copilot as a reviewer on the pull request for an additional code quality review. ## Review Guidelines diff --git a/README.md b/README.md index 7de0787..b6ad502 100644 --- a/README.md +++ b/README.md @@ -62,7 +62,7 @@ flowchart TB subgraph L5["5. Copilot Review"] direction LR - CR1[Copilot code review — assigned by security agent + branch protection] + CR1[Copilot code review — assigned automatically] CR2[AI-powered suggestions and comments] end @@ -89,7 +89,7 @@ Each layer catches different classes of issues: | **Pre-commit** | On `git commit` (staged files) | Style drift, security anti-patterns, broken configs, stale lockfiles | | **CI quality gate** | On PR | Lock verification, full repo-wide type safety, code quality, test regressions, coverage, build validation. Split into three sub-layers: *code quality* (lock-verify, format, lint, type checks, Bandit, markdown lint), *tests* (PyTest + coverage), and *build validation* (wheel build + Docker build & smoke test, both path-filtered) | | **CI security** | On PR / push to main / schedule | Dataflow vulnerabilities, outdated dependencies, security posture gaps | -| **Copilot Review** | On PR (via security agent + branch protection) | AI-powered code review with suggestions and inline comments | +| **Copilot Review** | On PR (after security review) | AI-powered code review with suggestions and inline comments | | **Release** | On push to main or manual | Agent release: builds changed agents, creates `-v` tags with wheel assets. Monorepo release: tags shared infra changes as `v` | --- @@ -533,18 +533,26 @@ The agentic workflow at [`.github/workflows/security-review.md`](.github/workflo 4. Submits a consolidated review (`REQUEST_CHANGES` for critical/high, `COMMENT` otherwise). 5. Requests Copilot as a reviewer for additional code quality coverage. +>[!IMPORTANT] +> The `security-review.md` workflow is using the custom agent `.github/agents/security-reviewer.agent.md` which is defined in this repository. To be able to use this agent with `copilot` AI Engine, `COPILOT_GITHUB_TOKEN` secret must be added to the repository with a fine-grained PAT that has `Copilot Requests: Read-only` scope on public repositories. For more information see the [documentation](https://github.github.com/gh-aw/reference/auth/#copilot_github_token). + ### Copilot code review -The `add-reviewer` safe-output in the workflow assigns Copilot as a PR reviewer after the security review completes. This requires a fine-grained PAT stored as the `COPILOT_GITHUB_TOKEN` repository secret with: +The [`add-reviewer` safe-output](https://github.github.com/gh-aw/reference/safe-outputs/#add-reviewer-add-reviewer) in the workflow assigns Copilot as a PR reviewer after the security review completes. This requires a fine-grained PAT stored as the [`GH_AW_AGENT_TOKEN` repository secret]https://github.github.com/gh-aw/reference/auth/#gh_aw_agent_token) with: -- **Pull requests: Read and write** -- **Copilot Requests: Read-only** +- Resource owner: Your user account +- Repository access: “Public repositories” or select specific repos +- Repository permissions: + - Actions: Write + - Contents: Write + - Issues: Write + - Pull requests: Write -As an additional safeguard, configure **branch protection rules** to require Copilot review on all PRs (not just those that trigger the security workflow): + ### Compiling agentic workflows From 3f44e217ad2dcb47d8ecbb51972047248fea8a3a Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 19 Feb 2026 11:46:47 +0000 Subject: [PATCH 19/90] Initial plan From a3d6c8b1ca9d44cd6954eaf4236e7ecb985c1dd7 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 19 Feb 2026 11:47:14 +0000 Subject: [PATCH 20/90] Initial plan From d7cc09dad9c2efdd16974256295c5035244a0065 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 19 Feb 2026 11:47:28 +0000 Subject: [PATCH 21/90] Initial plan From 0df6c86a5bd5ff651e776bd71494e7a2e4d06be4 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 19 Feb 2026 11:47:44 +0000 Subject: [PATCH 22/90] Initial plan From 38c93bc1bb1750f7266b6306384276ae7fd1cd47 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 19 Feb 2026 11:47:54 +0000 Subject: [PATCH 23/90] Fix script injection in setup-python-env action by using env var for extra-args Co-authored-by: pmalarme <686568+pmalarme@users.noreply.github.com> --- .github/actions/setup-python-env/action.yml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.github/actions/setup-python-env/action.yml b/.github/actions/setup-python-env/action.yml index de38929..728a9bc 100644 --- a/.github/actions/setup-python-env/action.yml +++ b/.github/actions/setup-python-env/action.yml @@ -26,9 +26,12 @@ runs: - name: Install dependencies shell: bash + env: + EXTRA_ARGS: ${{ inputs.extra-args }} run: | args="--all-extras --dev" if [[ "${{ inputs.include-docs }}" == "true" ]]; then args="$args --group docs" fi - uv sync $args ${{ inputs.extra-args }} + # shellcheck disable=SC2086 # intentional word-splitting for uv flags + uv sync $args $EXTRA_ARGS From 24b1cab3d596d4a92e011c12ac0bbba4561b6a1d Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 19 Feb 2026 11:47:58 +0000 Subject: [PATCH 24/90] fix: pin astral-sh/setup-uv to SHA f0ec1fc # v5.4.2 Co-authored-by: pmalarme <686568+pmalarme@users.noreply.github.com> --- .github/actions/setup-python-env/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/setup-python-env/action.yml b/.github/actions/setup-python-env/action.yml index de38929..5de3d9d 100644 --- a/.github/actions/setup-python-env/action.yml +++ b/.github/actions/setup-python-env/action.yml @@ -19,7 +19,7 @@ runs: using: composite steps: - name: Set up uv - uses: astral-sh/setup-uv@v5 + uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v5.4.2 with: python-version: ${{ inputs.python-version }} enable-cache: true From d650789a75d134cc2065ebc03581b45a20adc30c Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 19 Feb 2026 11:48:00 +0000 Subject: [PATCH 25/90] Initial plan From 5b3eb0972e94d913076eb496474de1b6c8e2b749 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 19 Feb 2026 11:49:45 +0000 Subject: [PATCH 26/90] fix: use persist-credentials: false and explicit token in git remote URL Co-authored-by: pmalarme <686568+pmalarme@users.noreply.github.com> --- .github/workflows/monorepo-release.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/monorepo-release.yml b/.github/workflows/monorepo-release.yml index 1de50af..da09a16 100644 --- a/.github/workflows/monorepo-release.yml +++ b/.github/workflows/monorepo-release.yml @@ -32,7 +32,7 @@ jobs: uses: actions/checkout@v6 with: fetch-depth: 0 - persist-credentials: true + persist-credentials: false - name: Read monorepo version id: version @@ -64,6 +64,7 @@ jobs: git config user.name "github-actions[bot]" git config user.email "github-actions[bot]@users.noreply.github.com" + git remote set-url origin "https://x-access-token:${GH_TOKEN}@github.com/${{ github.repository }}.git" # Create annotated tag git tag -a "$TAG" -m "Monorepo release v${VERSION}" From c18d4e94a0d16641945be05d18feb9c334b413ff Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 19 Feb 2026 11:50:09 +0000 Subject: [PATCH 27/90] fix: pin actions/checkout to SHA in all workflow files Co-authored-by: pmalarme <686568+pmalarme@users.noreply.github.com> --- .github/workflows/codeql-analysis.yml | 2 +- .github/workflows/monorepo-release.yml | 2 +- .github/workflows/python-code-quality.yml | 2 +- .github/workflows/python-docker-build.yml | 4 ++-- .github/workflows/python-docs.yml | 2 +- .github/workflows/python-package-build.yml | 2 +- .github/workflows/python-release.yml | 2 +- .github/workflows/python-tests.yml | 2 +- 8 files changed, 9 insertions(+), 9 deletions(-) diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index eab7aef..444abda 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -24,7 +24,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v6 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 with: persist-credentials: false diff --git a/.github/workflows/monorepo-release.yml b/.github/workflows/monorepo-release.yml index 1de50af..ec20c06 100644 --- a/.github/workflows/monorepo-release.yml +++ b/.github/workflows/monorepo-release.yml @@ -29,7 +29,7 @@ jobs: steps: - name: Checkout - uses: actions/checkout@v6 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 with: fetch-depth: 0 persist-credentials: true diff --git a/.github/workflows/python-code-quality.yml b/.github/workflows/python-code-quality.yml index 6263ac3..c122be1 100644 --- a/.github/workflows/python-code-quality.yml +++ b/.github/workflows/python-code-quality.yml @@ -19,7 +19,7 @@ jobs: steps: - name: Checkout - uses: actions/checkout@v6 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 with: persist-credentials: false diff --git a/.github/workflows/python-docker-build.yml b/.github/workflows/python-docker-build.yml index 919e8c3..fc77698 100644 --- a/.github/workflows/python-docker-build.yml +++ b/.github/workflows/python-docker-build.yml @@ -22,7 +22,7 @@ jobs: steps: - name: Checkout - uses: actions/checkout@v6 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 with: fetch-depth: 0 persist-credentials: false @@ -63,7 +63,7 @@ jobs: steps: - name: Checkout - uses: actions/checkout@v6 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 with: persist-credentials: false diff --git a/.github/workflows/python-docs.yml b/.github/workflows/python-docs.yml index 4992552..94e006e 100644 --- a/.github/workflows/python-docs.yml +++ b/.github/workflows/python-docs.yml @@ -27,7 +27,7 @@ jobs: steps: - name: Checkout - uses: actions/checkout@v6 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 with: persist-credentials: false diff --git a/.github/workflows/python-package-build.yml b/.github/workflows/python-package-build.yml index 06687ef..aede2af 100644 --- a/.github/workflows/python-package-build.yml +++ b/.github/workflows/python-package-build.yml @@ -18,7 +18,7 @@ jobs: steps: - name: Checkout - uses: actions/checkout@v6 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 with: fetch-depth: 0 persist-credentials: false diff --git a/.github/workflows/python-release.yml b/.github/workflows/python-release.yml index 84c3dee..eef48e2 100644 --- a/.github/workflows/python-release.yml +++ b/.github/workflows/python-release.yml @@ -19,7 +19,7 @@ jobs: steps: - name: Checkout - uses: actions/checkout@v6 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 with: fetch-depth: 0 persist-credentials: true diff --git a/.github/workflows/python-tests.yml b/.github/workflows/python-tests.yml index c08caed..3324d61 100644 --- a/.github/workflows/python-tests.yml +++ b/.github/workflows/python-tests.yml @@ -19,7 +19,7 @@ jobs: steps: - name: Checkout - uses: actions/checkout@v6 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 with: persist-credentials: false From 14d179b003e584e2365f6998ea0d1f0922f5691c Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 19 Feb 2026 11:50:55 +0000 Subject: [PATCH 28/90] fix: use env var AGENT instead of inline matrix.agent in docker workflow Co-authored-by: pmalarme <686568+pmalarme@users.noreply.github.com> --- .github/workflows/python-docker-build.yml | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/.github/workflows/python-docker-build.yml b/.github/workflows/python-docker-build.yml index 919e8c3..9aa6d25 100644 --- a/.github/workflows/python-docker-build.yml +++ b/.github/workflows/python-docker-build.yml @@ -68,11 +68,15 @@ jobs: persist-credentials: false - name: Build Docker image + env: + AGENT: ${{ matrix.agent }} run: | docker build \ - -t "${{ matrix.agent }}:ci" \ - -f "agents/${{ matrix.agent }}/Dockerfile" \ - "agents/${{ matrix.agent }}" + -t "${AGENT}:ci" \ + -f "agents/${AGENT}/Dockerfile" \ + "agents/${AGENT}" - name: Smoke test - run: docker run --rm "${{ matrix.agent }}:ci" --help || true + env: + AGENT: ${{ matrix.agent }} + run: docker run --rm "${AGENT}:ci" --help || true From 72834064a8663f489be8e54ba4561a95c5d28c8c Mon Sep 17 00:00:00 2001 From: Pierre Malarme Date: Thu, 19 Feb 2026 13:12:43 +0100 Subject: [PATCH 29/90] Update .github/workflows/monorepo-release.yml Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- .github/workflows/monorepo-release.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/monorepo-release.yml b/.github/workflows/monorepo-release.yml index da09a16..1ac2efc 100644 --- a/.github/workflows/monorepo-release.yml +++ b/.github/workflows/monorepo-release.yml @@ -64,7 +64,8 @@ jobs: git config user.name "github-actions[bot]" git config user.email "github-actions[bot]@users.noreply.github.com" - git remote set-url origin "https://x-access-token:${GH_TOKEN}@github.com/${{ github.repository }}.git" + REMOTE_HOST="${GITHUB_SERVER_URL#https://}" + git remote set-url origin "https://x-access-token:${GH_TOKEN}@${REMOTE_HOST}/${{ github.repository }}.git" # Create annotated tag git tag -a "$TAG" -m "Monorepo release v${VERSION}" From 777c851aae3ad48a6869fceeb8c152696ebddf1b Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 19 Feb 2026 12:18:08 +0000 Subject: [PATCH 30/90] docs: document GITHUB_TOKEN requirement for release workflows in README Co-authored-by: pmalarme <686568+pmalarme@users.noreply.github.com> --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index b6ad502..9bdc231 100644 --- a/README.md +++ b/README.md @@ -470,6 +470,8 @@ The [monorepo release workflow](.github/workflows/monorepo-release.yml) triggers 3. Creates an annotated tag and pushes it. 4. Creates a GitHub release with release notes generated from merged PRs. +Both the agent and monorepo release workflows require the automatic `GITHUB_TOKEN` with `contents: write` permission to create tags and GitHub releases. No manual secret setup is needed — GitHub provides this token automatically for every workflow run. The token is not persisted in the checkout step; it is explicitly injected only in the release step to minimize credential exposure. + ### Setting up publishing Publishing is **commented out** by default — the workflow only creates tags and GitHub releases. To enable it: From a234e18b24c2087db882138eb5f69c6a21d02662 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 19 Feb 2026 12:36:46 +0000 Subject: [PATCH 31/90] fix: push to explicit URL instead of modifying git remote to avoid persisting credentials Co-authored-by: pmalarme <686568+pmalarme@users.noreply.github.com> --- .github/workflows/monorepo-release.yml | 7 ++++--- README.md | 2 +- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/.github/workflows/monorepo-release.yml b/.github/workflows/monorepo-release.yml index 1ac2efc..03f509f 100644 --- a/.github/workflows/monorepo-release.yml +++ b/.github/workflows/monorepo-release.yml @@ -64,12 +64,13 @@ jobs: git config user.name "github-actions[bot]" git config user.email "github-actions[bot]@users.noreply.github.com" - REMOTE_HOST="${GITHUB_SERVER_URL#https://}" - git remote set-url origin "https://x-access-token:${GH_TOKEN}@${REMOTE_HOST}/${{ github.repository }}.git" # Create annotated tag git tag -a "$TAG" -m "Monorepo release v${VERSION}" - git push origin "$TAG" + + # Push using explicit URL — token never stored in .git/config + REMOTE_HOST="${GITHUB_SERVER_URL#https://}" + git push "https://x-access-token:${GH_TOKEN}@${REMOTE_HOST}/${{ github.repository }}.git" "$TAG" # Generate release notes from merged PRs since previous monorepo tag PREV_TAG=$(git tag --list "v*" --sort=-v:refname \ diff --git a/README.md b/README.md index 9bdc231..42d1b85 100644 --- a/README.md +++ b/README.md @@ -470,7 +470,7 @@ The [monorepo release workflow](.github/workflows/monorepo-release.yml) triggers 3. Creates an annotated tag and pushes it. 4. Creates a GitHub release with release notes generated from merged PRs. -Both the agent and monorepo release workflows require the automatic `GITHUB_TOKEN` with `contents: write` permission to create tags and GitHub releases. No manual secret setup is needed — GitHub provides this token automatically for every workflow run. The token is not persisted in the checkout step; it is explicitly injected only in the release step to minimize credential exposure. +Both the agent and monorepo release workflows require the automatic `GITHUB_TOKEN` with `contents: write` permission to create tags and GitHub releases. No manual secret setup is needed — GitHub provides this token automatically for every workflow run. The token is never stored in `.git/config` — it is passed inline to `git push` and scoped to the `GH_TOKEN` step environment variable, so no credentials persist beyond the release step. ### Setting up publishing From 808aa174514bcd9c7867fc298ff965e18bbac04e Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 19 Feb 2026 12:41:57 +0000 Subject: [PATCH 32/90] fix: apply persist-credentials: false and explicit push URL to python-release workflow Co-authored-by: pmalarme <686568+pmalarme@users.noreply.github.com> --- .github/workflows/python-release.yml | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/.github/workflows/python-release.yml b/.github/workflows/python-release.yml index 84c3dee..a244318 100644 --- a/.github/workflows/python-release.yml +++ b/.github/workflows/python-release.yml @@ -22,7 +22,7 @@ jobs: uses: actions/checkout@v6 with: fetch-depth: 0 - persist-credentials: true + persist-credentials: false - name: Setup Python environment uses: ./.github/actions/setup-python-env @@ -45,6 +45,10 @@ jobs: git config user.name "github-actions[bot]" git config user.email "github-actions[bot]@users.noreply.github.com" + # Build authenticated URL — token never stored in .git/config + REMOTE_HOST="${GITHUB_SERVER_URL#https://}" + PUSH_URL="https://x-access-token:${GH_TOKEN}@${REMOTE_HOST}/${{ github.repository }}.git" + for WHL in "${WHEELS[@]}"; do # Wheel filename: {name}-{version}-{python}-{abi}-{platform}.whl BASENAME=$(basename "$WHL") @@ -62,7 +66,7 @@ jobs: # Create annotated tag git tag -a "$TAG" -m "Release ${NAME} v${VERSION}" - git push origin "$TAG" + git push "$PUSH_URL" "$TAG" # Generate changelog from merged PRs since previous tag for this agent PREV_TAG=$(git tag --list "${NAME}-v*" --sort=-v:refname \ From 2e35535d28eb7f811b9f520207c1d036e505996a Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 19 Feb 2026 12:44:33 +0000 Subject: [PATCH 33/90] Initial plan From 822d1a9628f1eb0f75ec60e40e910f28d214e188 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 19 Feb 2026 12:46:17 +0000 Subject: [PATCH 34/90] fix: route action inputs and context values through env vars in shell scripts Co-authored-by: pmalarme <686568+pmalarme@users.noreply.github.com> --- .github/actions/setup-python-env/action.yml | 3 ++- .github/workflows/python-docker-build.yml | 7 +++++-- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/.github/actions/setup-python-env/action.yml b/.github/actions/setup-python-env/action.yml index 39d643b..f88bd52 100644 --- a/.github/actions/setup-python-env/action.yml +++ b/.github/actions/setup-python-env/action.yml @@ -28,9 +28,10 @@ runs: shell: bash env: EXTRA_ARGS: ${{ inputs.extra-args }} + INCLUDE_DOCS: ${{ inputs.include-docs }} run: | args="--all-extras --dev" - if [[ "${{ inputs.include-docs }}" == "true" ]]; then + if [[ "$INCLUDE_DOCS" == "true" ]]; then args="$args --group docs" fi # shellcheck disable=SC2086 # intentional word-splitting for uv flags diff --git a/.github/workflows/python-docker-build.yml b/.github/workflows/python-docker-build.yml index 969ea3f..7c7c650 100644 --- a/.github/workflows/python-docker-build.yml +++ b/.github/workflows/python-docker-build.yml @@ -29,10 +29,13 @@ jobs: - name: Find changed agents with Dockerfiles id: find + env: + EVENT_NAME: ${{ github.event_name }} + BASE_SHA: ${{ github.event.pull_request.base.sha }} run: | # Get the base ref to diff against - if [[ "${{ github.event_name }}" == "pull_request" ]]; then - BASE="${{ github.event.pull_request.base.sha }}" + if [[ "$EVENT_NAME" == "pull_request" ]]; then + BASE="$BASE_SHA" else BASE="HEAD~1" fi From 4aeb3bb2c3d99e516b845b7a01dd4114219de8b3 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 19 Feb 2026 12:57:27 +0000 Subject: [PATCH 35/90] Initial plan From 62702604ac31743fd9deba4e9ccdb7ebb849fa65 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 19 Feb 2026 12:57:58 +0000 Subject: [PATCH 36/90] Initial plan From 51f6e4f11aa60c02aa36c43571e016725e11998d Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 19 Feb 2026 12:58:13 +0000 Subject: [PATCH 37/90] Initial plan From be0edae6e38c204de4311ace75d917b642ac94ea Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 19 Feb 2026 12:58:31 +0000 Subject: [PATCH 38/90] Fix shell injection risk in monorepo-release.yml by using env: variables Co-authored-by: pmalarme <686568+pmalarme@users.noreply.github.com> --- .github/workflows/monorepo-release.yml | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/.github/workflows/monorepo-release.yml b/.github/workflows/monorepo-release.yml index c08b384..0d0016b 100644 --- a/.github/workflows/monorepo-release.yml +++ b/.github/workflows/monorepo-release.yml @@ -45,8 +45,9 @@ jobs: - name: Check if tag exists id: check + env: + TAG: ${{ steps.version.outputs.tag }} run: | - TAG="${{ steps.version.outputs.tag }}" if git rev-parse "refs/tags/${TAG}" >/dev/null 2>&1; then echo "::notice::Tag ${TAG} already exists — skipping release." echo "exists=true" >> "$GITHUB_OUTPUT" @@ -58,10 +59,10 @@ jobs: if: steps.check.outputs.exists == 'false' env: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + TAG: ${{ steps.version.outputs.tag }} + VERSION: ${{ steps.version.outputs.version }} + REPO: ${{ github.repository }} run: | - TAG="${{ steps.version.outputs.tag }}" - VERSION="${{ steps.version.outputs.version }}" - git config user.name "github-actions[bot]" git config user.email "github-actions[bot]@users.noreply.github.com" @@ -70,7 +71,7 @@ jobs: # Push using explicit URL — token never stored in .git/config REMOTE_HOST="${GITHUB_SERVER_URL#https://}" - git push "https://x-access-token:${GH_TOKEN}@${REMOTE_HOST}/${{ github.repository }}.git" "$TAG" + git push "https://x-access-token:${GH_TOKEN}@${REMOTE_HOST}/${REPO}.git" "$TAG" # Generate release notes from merged PRs since previous monorepo tag PREV_TAG=$(git tag --list "v*" --sort=-v:refname \ From ba39fa6e85fb2143c5d46c2468f3775f998d7b2d Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 19 Feb 2026 12:59:06 +0000 Subject: [PATCH 39/90] Pin actions/upload-pages-artifact and actions/deploy-pages to commit SHAs Co-authored-by: pmalarme <686568+pmalarme@users.noreply.github.com> --- .github/workflows/python-docs.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/python-docs.yml b/.github/workflows/python-docs.yml index 94e006e..ba23873 100644 --- a/.github/workflows/python-docs.yml +++ b/.github/workflows/python-docs.yml @@ -41,7 +41,7 @@ jobs: run: uv run python scripts/generate_docs.py - name: Upload Pages artifact - uses: actions/upload-pages-artifact@v4 + uses: actions/upload-pages-artifact@7b1f4a764d45c48632c6b24a0339c27f5614fb0b # v4.0.0 with: path: docs/generated @@ -57,4 +57,4 @@ jobs: steps: - name: Deploy to GitHub Pages id: deployment - uses: actions/deploy-pages@v4 + uses: actions/deploy-pages@d6db90164ac5ed86f2b6aed7e0febac5b3c0c03e # v4.0.5 From 994a7c46f379d21834433d2eed7ed16e6e2eb745 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 19 Feb 2026 12:59:21 +0000 Subject: [PATCH 40/90] Initial plan From 8f22b4f7864554888da54ca69fd484516f5cfbcb Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 19 Feb 2026 13:00:02 +0000 Subject: [PATCH 41/90] Pin actions/deploy-pages and actions/upload-pages-artifact to commit SHAs Co-authored-by: pmalarme <686568+pmalarme@users.noreply.github.com> --- .github/workflows/python-docs.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/python-docs.yml b/.github/workflows/python-docs.yml index 94e006e..ba23873 100644 --- a/.github/workflows/python-docs.yml +++ b/.github/workflows/python-docs.yml @@ -41,7 +41,7 @@ jobs: run: uv run python scripts/generate_docs.py - name: Upload Pages artifact - uses: actions/upload-pages-artifact@v4 + uses: actions/upload-pages-artifact@7b1f4a764d45c48632c6b24a0339c27f5614fb0b # v4.0.0 with: path: docs/generated @@ -57,4 +57,4 @@ jobs: steps: - name: Deploy to GitHub Pages id: deployment - uses: actions/deploy-pages@v4 + uses: actions/deploy-pages@d6db90164ac5ed86f2b6aed7e0febac5b3c0c03e # v4.0.5 From eeeb4f1d47cff14a2f6ddd85ab10083b20ac0528 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 19 Feb 2026 13:00:55 +0000 Subject: [PATCH 42/90] Fix: pass github.repository via env in release workflows Co-authored-by: pmalarme <686568+pmalarme@users.noreply.github.com> --- .github/workflows/monorepo-release.yml | 3 ++- .github/workflows/python-release.yml | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/.github/workflows/monorepo-release.yml b/.github/workflows/monorepo-release.yml index c08b384..3083325 100644 --- a/.github/workflows/monorepo-release.yml +++ b/.github/workflows/monorepo-release.yml @@ -58,6 +58,7 @@ jobs: if: steps.check.outputs.exists == 'false' env: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GH_REPOSITORY: ${{ github.repository }} run: | TAG="${{ steps.version.outputs.tag }}" VERSION="${{ steps.version.outputs.version }}" @@ -70,7 +71,7 @@ jobs: # Push using explicit URL — token never stored in .git/config REMOTE_HOST="${GITHUB_SERVER_URL#https://}" - git push "https://x-access-token:${GH_TOKEN}@${REMOTE_HOST}/${{ github.repository }}.git" "$TAG" + git push "https://x-access-token:${GH_TOKEN}@${REMOTE_HOST}/${GH_REPOSITORY}.git" "$TAG" # Generate release notes from merged PRs since previous monorepo tag PREV_TAG=$(git tag --list "v*" --sort=-v:refname \ diff --git a/.github/workflows/python-release.yml b/.github/workflows/python-release.yml index c9ca069..6b1b44e 100644 --- a/.github/workflows/python-release.yml +++ b/.github/workflows/python-release.yml @@ -33,6 +33,7 @@ jobs: - name: Tag and release built wheels env: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GH_REPOSITORY: ${{ github.repository }} run: | shopt -s nullglob WHEELS=(dist/*.whl) @@ -47,7 +48,7 @@ jobs: # Build authenticated URL — token never stored in .git/config REMOTE_HOST="${GITHUB_SERVER_URL#https://}" - PUSH_URL="https://x-access-token:${GH_TOKEN}@${REMOTE_HOST}/${{ github.repository }}.git" + PUSH_URL="https://x-access-token:${GH_TOKEN}@${REMOTE_HOST}/${GH_REPOSITORY}.git" for WHL in "${WHEELS[@]}"; do # Wheel filename: {name}-{version}-{python}-{abi}-{platform}.whl From ddb8d5c910fc58c7c23ec0214699d22f7101416c Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 19 Feb 2026 13:56:08 +0000 Subject: [PATCH 43/90] Initial plan From fcb6186acdcb3c3cb6ebc9cc138899b7b5be4da6 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 19 Feb 2026 13:57:36 +0000 Subject: [PATCH 44/90] docs: add security note for extra-args in setup-python-env README Co-authored-by: pmalarme <686568+pmalarme@users.noreply.github.com> --- .github/actions/setup-python-env/README.md | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/.github/actions/setup-python-env/README.md b/.github/actions/setup-python-env/README.md index 7dba772..2694938 100644 --- a/.github/actions/setup-python-env/README.md +++ b/.github/actions/setup-python-env/README.md @@ -9,11 +9,21 @@ specific Python version and installs project dependencies via `uv sync`. |---|---|---|---| | `python-version` | No | `"3.13"` | Python version to install (e.g. `"3.13"`, `"3.10"`). | | `include-docs` | No | `"false"` | When `"true"`, adds `--group docs` to install Sphinx and related packages. | -| `extra-args` | No | `""` | Additional arguments appended to the `uv sync` command. | +| `extra-args` | No | `""` | Additional arguments appended to the `uv sync` command. Must be a trusted, static flag string — see [Security note](#security-note) below. | The base command is always `uv sync --all-extras --dev`. The `include-docs` flag and `extra-args` extend it. +## Security note + +`extra-args` is passed to `uv sync` via an environment variable and intentionally +word-split so that callers can supply multiple flags (e.g. +`--all-packages --prerelease=if-necessary-or-explicit`). Because of this word +splitting, **only hardcoded, static strings should be used**. Never pass +dynamic values sourced from issue bodies, PR descriptions, user-controlled +inputs, or any other external source, as those could introduce unexpected `uv +sync` flags and alter environment resolution behaviour. + ## Usage ### Minimal (defaults to Python 3.13) From 5eae4d8867ee90357deb562df6e73af27935ea6a Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 19 Feb 2026 13:58:11 +0000 Subject: [PATCH 45/90] Initial plan From 69d5b39bf65147c1d8c889aca6af03badad25f21 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 19 Feb 2026 13:58:57 +0000 Subject: [PATCH 46/90] Initial plan From 9288d2254d05ddb9023f7292ce35ae455a1f08b0 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 19 Feb 2026 14:00:57 +0000 Subject: [PATCH 47/90] docs: warn against ACTIONS_STEP_DEBUG in release workflows Co-authored-by: pmalarme <686568+pmalarme@users.noreply.github.com> --- .github/workflows/monorepo-release.yml | 4 +++- .github/workflows/python-release.yml | 4 +++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/.github/workflows/monorepo-release.yml b/.github/workflows/monorepo-release.yml index a5237c6..8ba0d32 100644 --- a/.github/workflows/monorepo-release.yml +++ b/.github/workflows/monorepo-release.yml @@ -69,7 +69,9 @@ jobs: # Create annotated tag git tag -a "$TAG" -m "Monorepo release v${VERSION}" - # Push using explicit URL — token never stored in .git/config + # Push using explicit URL — token never stored in .git/config. + # WARNING: do not enable ACTIONS_STEP_DEBUG on production runs of + # this workflow; debug logs expose the full push URL including the token. REMOTE_HOST="${GITHUB_SERVER_URL#https://}" git push "https://x-access-token:${GH_TOKEN}@${REMOTE_HOST}/${GH_REPOSITORY}.git" "$TAG" diff --git a/.github/workflows/python-release.yml b/.github/workflows/python-release.yml index 6b1b44e..6405a1c 100644 --- a/.github/workflows/python-release.yml +++ b/.github/workflows/python-release.yml @@ -46,7 +46,9 @@ jobs: git config user.name "github-actions[bot]" git config user.email "github-actions[bot]@users.noreply.github.com" - # Build authenticated URL — token never stored in .git/config + # Build authenticated URL — token never stored in .git/config. + # WARNING: do not enable ACTIONS_STEP_DEBUG on production runs of + # this workflow; debug logs expose the full push URL including the token. REMOTE_HOST="${GITHUB_SERVER_URL#https://}" PUSH_URL="https://x-access-token:${GH_TOKEN}@${REMOTE_HOST}/${GH_REPOSITORY}.git" From 290792fcf720148030ef2fbf12f51c09d2cde4c1 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 19 Feb 2026 14:02:06 +0000 Subject: [PATCH 48/90] =?UTF-8?q?Use=20http.extraHeader=20for=20git=20push?= =?UTF-8?q?=20=E2=80=94=20token=20never=20embedded=20in=20URL=20or=20writt?= =?UTF-8?q?en=20to=20.git/config?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: pmalarme <686568+pmalarme@users.noreply.github.com> --- .github/workflows/monorepo-release.yml | 7 +++++-- .github/workflows/python-release.yml | 9 ++++++--- 2 files changed, 11 insertions(+), 5 deletions(-) diff --git a/.github/workflows/monorepo-release.yml b/.github/workflows/monorepo-release.yml index a5237c6..865365a 100644 --- a/.github/workflows/monorepo-release.yml +++ b/.github/workflows/monorepo-release.yml @@ -69,9 +69,12 @@ jobs: # Create annotated tag git tag -a "$TAG" -m "Monorepo release v${VERSION}" - # Push using explicit URL — token never stored in .git/config + # Push via Authorization header — token not embedded in URL, never written to .git/config REMOTE_HOST="${GITHUB_SERVER_URL#https://}" - git push "https://x-access-token:${GH_TOKEN}@${REMOTE_HOST}/${GH_REPOSITORY}.git" "$TAG" + ENCODED_TOKEN=$(printf '%s' "x-access-token:${GH_TOKEN}" | base64 -w0) + git -c "http.extraHeader=Authorization: basic ${ENCODED_TOKEN}" \ + push "https://${REMOTE_HOST}/${GH_REPOSITORY}.git" "$TAG" + unset ENCODED_TOKEN # Generate release notes from merged PRs since previous monorepo tag PREV_TAG=$(git tag --list "v*" --sort=-v:refname \ diff --git a/.github/workflows/python-release.yml b/.github/workflows/python-release.yml index 6b1b44e..81c6954 100644 --- a/.github/workflows/python-release.yml +++ b/.github/workflows/python-release.yml @@ -46,9 +46,11 @@ jobs: git config user.name "github-actions[bot]" git config user.email "github-actions[bot]@users.noreply.github.com" - # Build authenticated URL — token never stored in .git/config + # Authenticate via Authorization header — token not embedded in URL, never written to .git/config REMOTE_HOST="${GITHUB_SERVER_URL#https://}" - PUSH_URL="https://x-access-token:${GH_TOKEN}@${REMOTE_HOST}/${GH_REPOSITORY}.git" + ENCODED_TOKEN=$(printf '%s' "x-access-token:${GH_TOKEN}" | base64 -w0) + PUSH_CMD=(git -c "http.extraHeader=Authorization: basic ${ENCODED_TOKEN}" + push "https://${REMOTE_HOST}/${GH_REPOSITORY}.git") for WHL in "${WHEELS[@]}"; do # Wheel filename: {name}-{version}-{python}-{abi}-{platform}.whl @@ -67,7 +69,7 @@ jobs: # Create annotated tag git tag -a "$TAG" -m "Release ${NAME} v${VERSION}" - git push "$PUSH_URL" "$TAG" + "${PUSH_CMD[@]}" "$TAG" # Generate changelog from merged PRs since previous tag for this agent PREV_TAG=$(git tag --list "${NAME}-v*" --sort=-v:refname \ @@ -103,6 +105,7 @@ jobs: echo "::endgroup::" done + unset ENCODED_TOKEN PUSH_CMD # ── Publish to package registry ───────────────────────────────── # Uncomment ONE of the blocks below and configure the matching From 27d356b9db69c042073a1567217673db87a1a751 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 19 Feb 2026 14:07:25 +0000 Subject: [PATCH 49/90] =?UTF-8?q?Replace=20base64+extraHeader=20with=20cre?= =?UTF-8?q?dential.helper=20=E2=80=94=20GH=5FTOKEN=20used=20directly,=20st?= =?UTF-8?q?ays=20masked=20by=20GitHub=20Actions?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: pmalarme <686568+pmalarme@users.noreply.github.com> --- .github/workflows/monorepo-release.yml | 10 +++------- .github/workflows/python-release.yml | 14 +++++--------- 2 files changed, 8 insertions(+), 16 deletions(-) diff --git a/.github/workflows/monorepo-release.yml b/.github/workflows/monorepo-release.yml index 122cce8..4290e18 100644 --- a/.github/workflows/monorepo-release.yml +++ b/.github/workflows/monorepo-release.yml @@ -69,15 +69,11 @@ jobs: # Create annotated tag git tag -a "$TAG" -m "Monorepo release v${VERSION}" - # Push via Authorization header — token not embedded in URL, never written to .git/config - # Push using explicit URL — token never stored in .git/config. - # WARNING: do not enable ACTIONS_STEP_DEBUG on production runs of - # this workflow; debug logs expose the full push URL including the token. + # Push via inline credential helper — GH_TOKEN used directly so it stays masked by GitHub Actions; + # token is never written to .git/config and never embedded in a URL. REMOTE_HOST="${GITHUB_SERVER_URL#https://}" - ENCODED_TOKEN=$(printf '%s' "x-access-token:${GH_TOKEN}" | base64 -w0) - git -c "http.extraHeader=Authorization: basic ${ENCODED_TOKEN}" \ + git -c "credential.helper=!f() { printf 'username=x-access-token\npassword=%s\n' \"${GH_TOKEN}\"; }; f" \ push "https://${REMOTE_HOST}/${GH_REPOSITORY}.git" "$TAG" - unset ENCODED_TOKEN # Generate release notes from merged PRs since previous monorepo tag PREV_TAG=$(git tag --list "v*" --sort=-v:refname \ diff --git a/.github/workflows/python-release.yml b/.github/workflows/python-release.yml index 52979a5..695bf81 100644 --- a/.github/workflows/python-release.yml +++ b/.github/workflows/python-release.yml @@ -46,14 +46,10 @@ jobs: git config user.name "github-actions[bot]" git config user.email "github-actions[bot]@users.noreply.github.com" - # Authenticate via Authorization header — token not embedded in URL, never written to .git/config - # Build authenticated URL — token never stored in .git/config. - # WARNING: do not enable ACTIONS_STEP_DEBUG on production runs of - # this workflow; debug logs expose the full push URL including the token. + # Authenticate via inline credential helper — GH_TOKEN used directly so it stays masked by GitHub Actions; + # token is never written to .git/config and never embedded in a URL. REMOTE_HOST="${GITHUB_SERVER_URL#https://}" - ENCODED_TOKEN=$(printf '%s' "x-access-token:${GH_TOKEN}" | base64 -w0) - PUSH_CMD=(git -c "http.extraHeader=Authorization: basic ${ENCODED_TOKEN}" - push "https://${REMOTE_HOST}/${GH_REPOSITORY}.git") + PUSH_URL="https://${REMOTE_HOST}/${GH_REPOSITORY}.git" for WHL in "${WHEELS[@]}"; do # Wheel filename: {name}-{version}-{python}-{abi}-{platform}.whl @@ -72,7 +68,8 @@ jobs: # Create annotated tag git tag -a "$TAG" -m "Release ${NAME} v${VERSION}" - "${PUSH_CMD[@]}" "$TAG" + git -c "credential.helper=!f() { printf 'username=x-access-token\npassword=%s\n' \"${GH_TOKEN}\"; }; f" \ + push "$PUSH_URL" "$TAG" # Generate changelog from merged PRs since previous tag for this agent PREV_TAG=$(git tag --list "${NAME}-v*" --sort=-v:refname \ @@ -108,7 +105,6 @@ jobs: echo "::endgroup::" done - unset ENCODED_TOKEN PUSH_CMD # ── Publish to package registry ───────────────────────────────── # Uncomment ONE of the blocks below and configure the matching From 3f70fab72b9dc6b8aa12d141dd19c80b351baef2 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 19 Feb 2026 14:18:10 +0000 Subject: [PATCH 50/90] Initial plan From 1ff1d5e59869a038da0571a4c9b490bb218b36fd Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 19 Feb 2026 14:19:36 +0000 Subject: [PATCH 51/90] Initial plan From 86fdc0bfaaf26767282a855142b17a58fd36a0f1 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 19 Feb 2026 14:19:56 +0000 Subject: [PATCH 52/90] Initial plan From 9e76438cf674d84d35b8c2da7aa0ece4b5df0c46 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 19 Feb 2026 14:20:21 +0000 Subject: [PATCH 53/90] Initial plan From c287c590637dbd5c38bca04fd84f75d0d3471686 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 19 Feb 2026 14:20:45 +0000 Subject: [PATCH 54/90] Remove packages:write permission from python-release workflow Co-authored-by: pmalarme <686568+pmalarme@users.noreply.github.com> --- .github/workflows/python-release.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/python-release.yml b/.github/workflows/python-release.yml index 695bf81..17e9c14 100644 --- a/.github/workflows/python-release.yml +++ b/.github/workflows/python-release.yml @@ -10,7 +10,6 @@ on: permissions: contents: write # create tags, releases, and upload assets - packages: write jobs: release: From 5dab66f241dfdb21e9784c432b740fc398c73aea Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 19 Feb 2026 14:21:02 +0000 Subject: [PATCH 55/90] Fix smoke test to not swallow container exit codes unconditionally Co-authored-by: pmalarme <686568+pmalarme@users.noreply.github.com> --- .github/workflows/python-docker-build.yml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.github/workflows/python-docker-build.yml b/.github/workflows/python-docker-build.yml index 7c7c650..dd9b5b9 100644 --- a/.github/workflows/python-docker-build.yml +++ b/.github/workflows/python-docker-build.yml @@ -82,4 +82,7 @@ jobs: - name: Smoke test env: AGENT: ${{ matrix.agent }} - run: docker run --rm "${AGENT}:ci" --help || true + run: | + docker run --rm "${AGENT}:ci" --help 2>&1 || \ + docker run --rm "${AGENT}:ci" --version 2>&1 || \ + echo "::warning::Smoke test could not verify entrypoint for ${AGENT}" From c1d98d353bf535e14af9e7f63fe36dcaa63bcf5f Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 19 Feb 2026 14:21:17 +0000 Subject: [PATCH 56/90] feat: add defensive guard to validate extra-args tokens in setup-python-env action Co-authored-by: pmalarme <686568+pmalarme@users.noreply.github.com> --- .github/actions/setup-python-env/README.md | 14 +++++++++----- .github/actions/setup-python-env/action.yml | 8 ++++++++ 2 files changed, 17 insertions(+), 5 deletions(-) diff --git a/.github/actions/setup-python-env/README.md b/.github/actions/setup-python-env/README.md index 2694938..695228b 100644 --- a/.github/actions/setup-python-env/README.md +++ b/.github/actions/setup-python-env/README.md @@ -18,11 +18,15 @@ and `extra-args` extend it. `extra-args` is passed to `uv sync` via an environment variable and intentionally word-split so that callers can supply multiple flags (e.g. -`--all-packages --prerelease=if-necessary-or-explicit`). Because of this word -splitting, **only hardcoded, static strings should be used**. Never pass -dynamic values sourced from issue bodies, PR descriptions, user-controlled -inputs, or any other external source, as those could introduce unexpected `uv -sync` flags and alter environment resolution behaviour. +`--all-packages --prerelease=if-necessary-or-explicit`). A defensive guard +validates each token before execution: every token must start with `-` and +contain only alphanumeric characters and safe flag characters (`=`, `.`, `:`, +`/`, `@`, `+`, `-`). Tokens that do not match this pattern cause the action to +fail immediately with an error. Despite this guard, **only hardcoded, static +strings should be used**. Never pass dynamic values sourced from issue bodies, +PR descriptions, user-controlled inputs, or any other external source, as those +could introduce unexpected `uv sync` flags and alter environment resolution +behaviour. ## Usage diff --git a/.github/actions/setup-python-env/action.yml b/.github/actions/setup-python-env/action.yml index f88bd52..8e764b5 100644 --- a/.github/actions/setup-python-env/action.yml +++ b/.github/actions/setup-python-env/action.yml @@ -34,5 +34,13 @@ runs: if [[ "$INCLUDE_DOCS" == "true" ]]; then args="$args --group docs" fi + if [[ -n "$EXTRA_ARGS" ]]; then + for arg in $EXTRA_ARGS; do + if [[ ! "$arg" =~ ^-[a-zA-Z0-9=._:/@+-]+$ ]]; then + echo "::error::Unsafe extra-args token: '$arg'. Each token must start with '-' and contain only safe flag characters." >&2 + exit 1 + fi + done + fi # shellcheck disable=SC2086 # intentional word-splitting for uv flags uv sync $args $EXTRA_ARGS From 010950bea106067f3fe3f1a5d5d61bb561476630 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 19 Feb 2026 14:22:44 +0000 Subject: [PATCH 57/90] Remove packages: write permission from python-release workflow Co-authored-by: pmalarme <686568+pmalarme@users.noreply.github.com> --- .github/workflows/python-release.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/python-release.yml b/.github/workflows/python-release.yml index 695bf81..17e9c14 100644 --- a/.github/workflows/python-release.yml +++ b/.github/workflows/python-release.yml @@ -10,7 +10,6 @@ on: permissions: contents: write # create tags, releases, and upload assets - packages: write jobs: release: From 10799dc71c85c036115a3ed8b4fd4ab5ae091442 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 19 Feb 2026 14:53:25 +0000 Subject: [PATCH 58/90] Initial plan From c1732f35b59d770f9b60aa74f91edcce00567015 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 19 Feb 2026 14:54:01 +0000 Subject: [PATCH 59/90] Initial plan From 8fa7b5519c8269d5458e87fac5e21903c8b41bba Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 19 Feb 2026 14:54:15 +0000 Subject: [PATCH 60/90] Initial plan From 3bc5d88f1878a68e8fa35a56f5ce9524d1bef150 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 19 Feb 2026 14:54:39 +0000 Subject: [PATCH 61/90] Add agent name allowlist validation in docker build workflow Co-authored-by: pmalarme <686568+pmalarme@users.noreply.github.com> --- .github/workflows/python-docker-build.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/python-docker-build.yml b/.github/workflows/python-docker-build.yml index dd9b5b9..8210313 100644 --- a/.github/workflows/python-docker-build.yml +++ b/.github/workflows/python-docker-build.yml @@ -46,6 +46,10 @@ jobs: | cut -d/ -f2 \ | sort -u \ | while read -r agent; do + if [[ ! "$agent" =~ ^[a-z0-9][a-z0-9_-]*$ ]]; then + echo "::warning::Skipping agent with invalid name: '$agent'" >&2 + continue + fi if [[ -f "agents/$agent/Dockerfile" ]]; then echo "$agent" fi From 2bfcc2f08df3fd7146c1da421cef9aa076c47ab7 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 19 Feb 2026 14:54:48 +0000 Subject: [PATCH 62/90] fix: remove -U and --prerelease flags from docs workflow extra-args Co-authored-by: pmalarme <686568+pmalarme@users.noreply.github.com> --- .github/workflows/python-docs.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/python-docs.yml b/.github/workflows/python-docs.yml index ba23873..7fdd0c4 100644 --- a/.github/workflows/python-docs.yml +++ b/.github/workflows/python-docs.yml @@ -35,7 +35,7 @@ jobs: uses: ./.github/actions/setup-python-env with: include-docs: "true" - extra-args: "--all-packages -U --prerelease=if-necessary-or-explicit" + extra-args: "--all-packages" - name: Build documentation run: uv run python scripts/generate_docs.py From 5379c4d1e0b5abf36861c88f2700872dd72f3386 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 19 Feb 2026 14:55:17 +0000 Subject: [PATCH 63/90] Add security comments to extra-args validation in setup-python-env action Co-authored-by: pmalarme <686568+pmalarme@users.noreply.github.com> --- .github/actions/setup-python-env/action.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/actions/setup-python-env/action.yml b/.github/actions/setup-python-env/action.yml index 8e764b5..f4ea0e5 100644 --- a/.github/actions/setup-python-env/action.yml +++ b/.github/actions/setup-python-env/action.yml @@ -35,6 +35,8 @@ runs: args="$args --group docs" fi if [[ -n "$EXTRA_ARGS" ]]; then + # Validate each whitespace-split token: must start with '-' and contain only safe flag characters. + # NOTE: '--' (end-of-options marker) is intentionally allowed; positional args would fail validation. for arg in $EXTRA_ARGS; do if [[ ! "$arg" =~ ^-[a-zA-Z0-9=._:/@+-]+$ ]]; then echo "::error::Unsafe extra-args token: '$arg'. Each token must start with '-' and contain only safe flag characters." >&2 @@ -42,5 +44,5 @@ runs: fi done fi - # shellcheck disable=SC2086 # intentional word-splitting for uv flags + # shellcheck disable=SC2086 # intentional word-splitting for uv flags (all tokens validated above) uv sync $args $EXTRA_ARGS From d222fccf5e7394c762be52404f3fe5c7a8c60430 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 19 Feb 2026 15:23:48 +0000 Subject: [PATCH 64/90] Initial plan From 325ffd7bb83e51d46e083cfa0ea5e3bfeec07f54 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 19 Feb 2026 15:24:33 +0000 Subject: [PATCH 65/90] Initial plan From 17faad8e1c522e21b1efdf8b71055eae50086106 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 19 Feb 2026 15:25:18 +0000 Subject: [PATCH 66/90] Add denylist for dangerous uv sync registry-override flags in setup-python-env action Co-authored-by: pmalarme <686568+pmalarme@users.noreply.github.com> --- .github/actions/setup-python-env/action.yml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.github/actions/setup-python-env/action.yml b/.github/actions/setup-python-env/action.yml index f4ea0e5..9455985 100644 --- a/.github/actions/setup-python-env/action.yml +++ b/.github/actions/setup-python-env/action.yml @@ -38,6 +38,11 @@ runs: # Validate each whitespace-split token: must start with '-' and contain only safe flag characters. # NOTE: '--' (end-of-options marker) is intentionally allowed; positional args would fail validation. for arg in $EXTRA_ARGS; do + # Denylist: block flags that could redirect dependency resolution to an attacker-controlled index. + if [[ "$arg" =~ ^--(index-url|extra-index-url|trusted-host|find-links)(=|$) ]]; then + echo "::error::Blocked dangerous extra-args token: '$arg'. Registry overrides are not permitted." >&2 + exit 1 + fi if [[ ! "$arg" =~ ^-[a-zA-Z0-9=._:/@+-]+$ ]]; then echo "::error::Unsafe extra-args token: '$arg'. Each token must start with '-' and contain only safe flag characters." >&2 exit 1 From 3f3f6e8faaee11970a34aee251ec7e203bcba1fe Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 19 Feb 2026 15:25:59 +0000 Subject: [PATCH 67/90] Add NAME/VERSION format validation in python-release.yml Co-authored-by: pmalarme <686568+pmalarme@users.noreply.github.com> --- .github/workflows/python-release.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.github/workflows/python-release.yml b/.github/workflows/python-release.yml index 17e9c14..345c3b0 100644 --- a/.github/workflows/python-release.yml +++ b/.github/workflows/python-release.yml @@ -57,6 +57,12 @@ jobs: VERSION=$(echo "$BASENAME" | cut -d- -f2) TAG="${NAME}-v${VERSION}" + # Validate NAME and VERSION to prevent unexpected values from flowing into git commands + if [[ ! "$NAME" =~ ^[a-zA-Z0-9_-]+$ ]] || [[ ! "$VERSION" =~ ^[0-9a-zA-Z._-]+$ ]]; then + echo "::error::Unexpected wheel name/version format in '${BASENAME}' — skipping." >&2 + continue + fi + # Skip if tag already exists if git rev-parse "refs/tags/${TAG}" >/dev/null 2>&1; then echo "::notice::Tag ${TAG} already exists — skipping." From 3f5304647acdacb38bf7eb25271e77addf4eb585 Mon Sep 17 00:00:00 2001 From: pmalarme Date: Thu, 19 Feb 2026 18:24:22 +0100 Subject: [PATCH 68/90] feat: refactor security review workflows and documentation - Updated security review workflow to use `workflow_call` instead of `pull_request` triggers. - Removed `add-reviewer` safe-output and adjusted review submission logic to use `APPROVE` for medium/low issues. - Enhanced cache memory handling for pull request review context and recurring security patterns. - Improved documentation for security review agent and workflows, clarifying memory usage and review processes. - Updated README to reflect changes in CI workflows and agentic workflows. - Adjusted artifact upload and download steps for cache memory. --- .github/workflows/codeql-analysis.yml | 2 +- .github/workflows/copilot-review.lock.yml | 1065 +++++++++++++++ .github/workflows/copilot-review.md | 56 + .github/workflows/pr-orchestrator.yml | 62 + .../pr-review-comment-handler.lock.yml | 1154 +++++++++++++++++ .../workflows/pr-review-comment-handler.md | 148 +++ .github/workflows/python-code-quality.yml | 3 +- .github/workflows/python-docker-build.yml | 7 +- .github/workflows/python-package-build.yml | 6 +- .github/workflows/python-tests.yml | 3 +- .github/workflows/security-review.lock.yml | 166 ++- .github/workflows/security-review.md | 49 +- README.md | 124 +- 13 files changed, 2673 insertions(+), 172 deletions(-) create mode 100644 .github/workflows/copilot-review.lock.yml create mode 100644 .github/workflows/copilot-review.md create mode 100644 .github/workflows/pr-orchestrator.yml create mode 100644 .github/workflows/pr-review-comment-handler.lock.yml create mode 100644 .github/workflows/pr-review-comment-handler.md diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index eab7aef..a16188a 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -2,7 +2,7 @@ name: "CodeQL Analysis" on: workflow_dispatch: - pull_request: + workflow_call: push: branches: [ "main" ] schedule: diff --git a/.github/workflows/copilot-review.lock.yml b/.github/workflows/copilot-review.lock.yml new file mode 100644 index 0000000..86c529d --- /dev/null +++ b/.github/workflows/copilot-review.lock.yml @@ -0,0 +1,1065 @@ +# +# ___ _ _ +# / _ \ | | (_) +# | |_| | __ _ ___ _ __ | |_ _ ___ +# | _ |/ _` |/ _ \ '_ \| __| |/ __| +# | | | | (_| | __/ | | | |_| | (__ +# \_| |_/\__, |\___|_| |_|\__|_|\___| +# __/ | +# _ _ |___/ +# | | | | / _| | +# | | | | ___ _ __ _ __| |_| | _____ ____ +# | |/\| |/ _ \ '__| |/ /| _| |/ _ \ \ /\ / / ___| +# \ /\ / (_) | | | | ( | | | | (_) \ V V /\__ \ +# \/ \/ \___/|_| |_|\_\|_| |_|\___/ \_/\_/ |___/ +# +# This file was automatically generated by gh-aw (v0.46.3). DO NOT EDIT. +# +# To update this file, edit the corresponding .md file and run: +# gh aw compile +# Not all edits will cause changes to this file. +# +# For more information: https://github.github.com/gh-aw/introduction/overview/ +# +# Adds Copilot as a reviewer on a pull request after the security review agent approves it. Triggered when any review is submitted; only acts when the review is an approval from the security reviewer. +# +# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"dfaf0658c2e150cf04605979aab0d8c0565c077861dfe5118f012afa69870313","compiler_version":"v0.46.3"} + +name: "Add Copilot Reviewer After Security Approval" +"on": + pull_request_review: + types: + - submitted + +permissions: {} + +concurrency: + group: "gh-aw-${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}" + cancel-in-progress: true + +run-name: "Add Copilot Reviewer After Security Approval" + +jobs: + activation: + needs: pre_activation + if: needs.pre_activation.outputs.activated == 'true' + runs-on: ubuntu-slim + permissions: + contents: read + outputs: + body: ${{ steps.sanitized.outputs.body }} + comment_id: "" + comment_repo: "" + text: ${{ steps.sanitized.outputs.text }} + title: ${{ steps.sanitized.outputs.title }} + steps: + - name: Setup Scripts + uses: github/gh-aw/actions/setup@a70c5eada06553e3510ac27f2c3bda9d3705bccb # v0.46.3 + with: + destination: /opt/gh-aw/actions + - name: Validate context variables + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/validate_context_variables.cjs'); + await main(); + - name: Checkout .github and .agents folders + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + sparse-checkout: | + .github + .agents + fetch-depth: 1 + persist-credentials: false + - name: Check workflow file timestamps + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_WORKFLOW_FILE: "copilot-review.lock.yml" + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/check_workflow_timestamp_api.cjs'); + await main(); + - name: Compute current body text + id: sanitized + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/compute_text.cjs'); + await main(); + - name: Create prompt with built-in context + env: + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }} + GH_AW_GITHUB_ACTOR: ${{ github.actor }} + GH_AW_GITHUB_EVENT_COMMENT_ID: ${{ github.event.comment.id }} + GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER: ${{ github.event.discussion.number }} + GH_AW_GITHUB_EVENT_ISSUE_NUMBER: ${{ github.event.issue.number }} + GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER: ${{ github.event.pull_request.number }} + GH_AW_GITHUB_EVENT_REVIEW_ID: ${{ github.event.review.id }} + GH_AW_GITHUB_REPOSITORY: ${{ github.repository }} + GH_AW_GITHUB_RUN_ID: ${{ github.run_id }} + GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }} + GH_AW_IS_PR_COMMENT: ${{ github.event.issue.pull_request && 'true' || '' }} + run: | + bash /opt/gh-aw/actions/create_prompt_first.sh + cat << 'GH_AW_PROMPT_EOF' > "$GH_AW_PROMPT" + + GH_AW_PROMPT_EOF + cat "/opt/gh-aw/prompts/xpia.md" >> "$GH_AW_PROMPT" + cat "/opt/gh-aw/prompts/temp_folder_prompt.md" >> "$GH_AW_PROMPT" + cat "/opt/gh-aw/prompts/markdown.md" >> "$GH_AW_PROMPT" + cat << 'GH_AW_PROMPT_EOF' >> "$GH_AW_PROMPT" + + GitHub API Access Instructions + + The gh CLI is NOT authenticated. Do NOT use gh commands for GitHub operations. + + + To create or modify GitHub resources (issues, discussions, pull requests, etc.), you MUST call the appropriate safe output tool. Simply writing content will NOT work - the workflow requires actual tool calls. + + Temporary IDs: Some safe output tools support a temporary ID field (usually named temporary_id) so you can reference newly-created items elsewhere in the SAME agent output (for example, using #aw_abc1 in a later body). + + **IMPORTANT - temporary_id format rules:** + - If you DON'T need to reference the item later, OMIT the temporary_id field entirely (it will be auto-generated if needed) + - If you DO need cross-references/chaining, you MUST match this EXACT validation regex: /^aw_[A-Za-z0-9]{3,8}$/i + - Format: aw_ prefix followed by 3 to 8 alphanumeric characters (A-Z, a-z, 0-9, case-insensitive) + - Valid alphanumeric characters: ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789 + - INVALID examples: aw_ab (too short), aw_123456789 (too long), aw_test-id (contains hyphen), aw_id_123 (contains underscore) + - VALID examples: aw_abc, aw_abc1, aw_Test123, aw_A1B2C3D4, aw_12345678 + - To generate valid IDs: use 3-8 random alphanumeric characters or omit the field to let the system auto-generate + + Do NOT invent other aw_* formats — downstream steps will reject them with validation errors matching against /^aw_[A-Za-z0-9]{3,8}$/i. + + Discover available tools from the safeoutputs MCP server. + + **Critical**: Tool calls write structured data that downstream jobs process. Without tool calls, follow-up actions will be skipped. + + **Note**: If you made no other safe output tool calls during this workflow execution, call the "noop" tool to provide a status message indicating completion or that no actions were needed. + + + + The following GitHub context information is available for this workflow: + {{#if __GH_AW_GITHUB_ACTOR__ }} + - **actor**: __GH_AW_GITHUB_ACTOR__ + {{/if}} + {{#if __GH_AW_GITHUB_REPOSITORY__ }} + - **repository**: __GH_AW_GITHUB_REPOSITORY__ + {{/if}} + {{#if __GH_AW_GITHUB_WORKSPACE__ }} + - **workspace**: __GH_AW_GITHUB_WORKSPACE__ + {{/if}} + {{#if __GH_AW_GITHUB_EVENT_ISSUE_NUMBER__ }} + - **issue-number**: #__GH_AW_GITHUB_EVENT_ISSUE_NUMBER__ + {{/if}} + {{#if __GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER__ }} + - **discussion-number**: #__GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER__ + {{/if}} + {{#if __GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER__ }} + - **pull-request-number**: #__GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER__ + {{/if}} + {{#if __GH_AW_GITHUB_EVENT_COMMENT_ID__ }} + - **comment-id**: __GH_AW_GITHUB_EVENT_COMMENT_ID__ + {{/if}} + {{#if __GH_AW_GITHUB_RUN_ID__ }} + - **workflow-run-id**: __GH_AW_GITHUB_RUN_ID__ + {{/if}} + + + GH_AW_PROMPT_EOF + if [ "$GITHUB_EVENT_NAME" = "issue_comment" ] && [ -n "$GH_AW_IS_PR_COMMENT" ] || [ "$GITHUB_EVENT_NAME" = "pull_request_review_comment" ] || [ "$GITHUB_EVENT_NAME" = "pull_request_review" ]; then + cat "/opt/gh-aw/prompts/pr_context_prompt.md" >> "$GH_AW_PROMPT" + fi + cat << 'GH_AW_PROMPT_EOF' >> "$GH_AW_PROMPT" + + GH_AW_PROMPT_EOF + cat << 'GH_AW_PROMPT_EOF' >> "$GH_AW_PROMPT" + {{#runtime-import .github/workflows/copilot-review.md}} + GH_AW_PROMPT_EOF + - name: Interpolate variables and render templates + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER: ${{ github.event.pull_request.number }} + GH_AW_GITHUB_EVENT_REVIEW_ID: ${{ github.event.review.id }} + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/interpolate_prompt.cjs'); + await main(); + - name: Substitute placeholders + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + GH_AW_GITHUB_ACTOR: ${{ github.actor }} + GH_AW_GITHUB_EVENT_COMMENT_ID: ${{ github.event.comment.id }} + GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER: ${{ github.event.discussion.number }} + GH_AW_GITHUB_EVENT_ISSUE_NUMBER: ${{ github.event.issue.number }} + GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER: ${{ github.event.pull_request.number }} + GH_AW_GITHUB_EVENT_REVIEW_ID: ${{ github.event.review.id }} + GH_AW_GITHUB_REPOSITORY: ${{ github.repository }} + GH_AW_GITHUB_RUN_ID: ${{ github.run_id }} + GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }} + GH_AW_IS_PR_COMMENT: ${{ github.event.issue.pull_request && 'true' || '' }} + GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_ACTIVATED: ${{ needs.pre_activation.outputs.activated }} + GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_MATCHED_COMMAND: ${{ needs.pre_activation.outputs.matched_command }} + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + + const substitutePlaceholders = require('/opt/gh-aw/actions/substitute_placeholders.cjs'); + + // Call the substitution function + return await substitutePlaceholders({ + file: process.env.GH_AW_PROMPT, + substitutions: { + GH_AW_GITHUB_ACTOR: process.env.GH_AW_GITHUB_ACTOR, + GH_AW_GITHUB_EVENT_COMMENT_ID: process.env.GH_AW_GITHUB_EVENT_COMMENT_ID, + GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER: process.env.GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER, + GH_AW_GITHUB_EVENT_ISSUE_NUMBER: process.env.GH_AW_GITHUB_EVENT_ISSUE_NUMBER, + GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER: process.env.GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER, + GH_AW_GITHUB_EVENT_REVIEW_ID: process.env.GH_AW_GITHUB_EVENT_REVIEW_ID, + GH_AW_GITHUB_REPOSITORY: process.env.GH_AW_GITHUB_REPOSITORY, + GH_AW_GITHUB_RUN_ID: process.env.GH_AW_GITHUB_RUN_ID, + GH_AW_GITHUB_WORKSPACE: process.env.GH_AW_GITHUB_WORKSPACE, + GH_AW_IS_PR_COMMENT: process.env.GH_AW_IS_PR_COMMENT, + GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_ACTIVATED: process.env.GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_ACTIVATED, + GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_MATCHED_COMMAND: process.env.GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_MATCHED_COMMAND + } + }); + - name: Validate prompt placeholders + env: + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + run: bash /opt/gh-aw/actions/validate_prompt_placeholders.sh + - name: Print prompt + env: + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + run: bash /opt/gh-aw/actions/print_prompt_summary.sh + - name: Upload prompt artifact + if: success() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + with: + name: prompt + path: /tmp/gh-aw/aw-prompts/prompt.txt + retention-days: 1 + + agent: + needs: activation + runs-on: ubuntu-latest + permissions: + contents: read + pull-requests: read + env: + DEFAULT_BRANCH: ${{ github.event.repository.default_branch }} + GH_AW_ASSETS_ALLOWED_EXTS: "" + GH_AW_ASSETS_BRANCH: "" + GH_AW_ASSETS_MAX_SIZE_KB: 0 + GH_AW_MCP_LOG_DIR: /tmp/gh-aw/mcp-logs/safeoutputs + GH_AW_SAFE_OUTPUTS: /opt/gh-aw/safeoutputs/outputs.jsonl + GH_AW_SAFE_OUTPUTS_CONFIG_PATH: /opt/gh-aw/safeoutputs/config.json + GH_AW_SAFE_OUTPUTS_TOOLS_PATH: /opt/gh-aw/safeoutputs/tools.json + GH_AW_WORKFLOW_ID_SANITIZED: copilotreview + outputs: + checkout_pr_success: ${{ steps.checkout-pr.outputs.checkout_pr_success || 'true' }} + has_patch: ${{ steps.collect_output.outputs.has_patch }} + model: ${{ steps.generate_aw_info.outputs.model }} + output: ${{ steps.collect_output.outputs.output }} + output_types: ${{ steps.collect_output.outputs.output_types }} + secret_verification_result: ${{ steps.validate-secret.outputs.verification_result }} + steps: + - name: Setup Scripts + uses: github/gh-aw/actions/setup@a70c5eada06553e3510ac27f2c3bda9d3705bccb # v0.46.3 + with: + destination: /opt/gh-aw/actions + - name: Checkout repository + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + persist-credentials: false + - name: Create gh-aw temp directory + run: bash /opt/gh-aw/actions/create_gh_aw_tmp_dir.sh + - name: Configure Git credentials + env: + REPO_NAME: ${{ github.repository }} + SERVER_URL: ${{ github.server_url }} + run: | + git config --global user.email "github-actions[bot]@users.noreply.github.com" + git config --global user.name "github-actions[bot]" + # Re-authenticate git with GitHub token + SERVER_URL_STRIPPED="${SERVER_URL#https://}" + git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git" + echo "Git configured with standard GitHub Actions identity" + - name: Checkout PR branch + id: checkout-pr + if: | + github.event.pull_request + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + with: + github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/checkout_pr_branch.cjs'); + await main(); + - name: Generate agentic run info + id: generate_aw_info + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + with: + script: | + const fs = require('fs'); + + const awInfo = { + engine_id: "copilot", + engine_name: "GitHub Copilot CLI", + model: process.env.GH_AW_MODEL_AGENT_COPILOT || "", + version: "", + agent_version: "0.0.411", + cli_version: "v0.46.3", + workflow_name: "Add Copilot Reviewer After Security Approval", + experimental: false, + supports_tools_allowlist: true, + run_id: context.runId, + run_number: context.runNumber, + run_attempt: process.env.GITHUB_RUN_ATTEMPT, + repository: context.repo.owner + '/' + context.repo.repo, + ref: context.ref, + sha: context.sha, + actor: context.actor, + event_name: context.eventName, + staged: false, + allowed_domains: ["defaults"], + firewall_enabled: true, + awf_version: "v0.20.0", + awmg_version: "v0.1.4", + steps: { + firewall: "squid" + }, + created_at: new Date().toISOString() + }; + + // Write to /tmp/gh-aw directory to avoid inclusion in PR + const tmpPath = '/tmp/gh-aw/aw_info.json'; + fs.writeFileSync(tmpPath, JSON.stringify(awInfo, null, 2)); + console.log('Generated aw_info.json at:', tmpPath); + console.log(JSON.stringify(awInfo, null, 2)); + + // Set model as output for reuse in other steps/jobs + core.setOutput('model', awInfo.model); + - name: Validate COPILOT_GITHUB_TOKEN secret + id: validate-secret + run: /opt/gh-aw/actions/validate_multi_secret.sh COPILOT_GITHUB_TOKEN 'GitHub Copilot CLI' https://github.github.com/gh-aw/reference/engines/#github-copilot-default + env: + COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }} + - name: Install GitHub Copilot CLI + run: /opt/gh-aw/actions/install_copilot_cli.sh 0.0.411 + - name: Install awf binary + run: bash /opt/gh-aw/actions/install_awf_binary.sh v0.20.0 + - name: Determine automatic lockdown mode for GitHub MCP Server + id: determine-automatic-lockdown + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_GITHUB_TOKEN: ${{ secrets.GH_AW_GITHUB_TOKEN }} + GH_AW_GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN }} + with: + script: | + const determineAutomaticLockdown = require('/opt/gh-aw/actions/determine_automatic_lockdown.cjs'); + await determineAutomaticLockdown(github, context, core); + - name: Download container images + run: bash /opt/gh-aw/actions/download_docker_images.sh ghcr.io/github/gh-aw-firewall/agent:0.20.0 ghcr.io/github/gh-aw-firewall/api-proxy:0.20.0 ghcr.io/github/gh-aw-firewall/squid:0.20.0 ghcr.io/github/gh-aw-mcpg:v0.1.4 ghcr.io/github/github-mcp-server:v0.30.3 node:lts-alpine + - name: Write Safe Outputs Config + run: | + mkdir -p /opt/gh-aw/safeoutputs + mkdir -p /tmp/gh-aw/safeoutputs + mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs + cat > /opt/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_EOF' + {"add_reviewer":{"max":1,"reviewers":["copilot"]},"missing_data":{},"missing_tool":{},"noop":{"max":1}} + GH_AW_SAFE_OUTPUTS_CONFIG_EOF + cat > /opt/gh-aw/safeoutputs/tools.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_EOF' + [ + { + "description": "Add reviewers to a GitHub pull request. Reviewers receive notifications and can approve or request changes. Use 'copilot' as a reviewer name to request the Copilot PR review bot. CONSTRAINTS: Maximum 1 reviewer(s) can be added.", + "inputSchema": { + "additionalProperties": false, + "properties": { + "pull_request_number": { + "description": "Pull request number to add reviewers to. This is the numeric ID from the GitHub URL (e.g., 876 in github.com/owner/repo/pull/876). If omitted, adds reviewers to the PR that triggered this workflow.", + "type": [ + "number", + "string" + ] + }, + "reviewers": { + "description": "GitHub usernames to add as reviewers (e.g., ['octocat', 'copilot']). Users must have access to the repository.", + "items": { + "type": "string" + }, + "type": "array" + } + }, + "required": [ + "reviewers" + ], + "type": "object" + }, + "name": "add_reviewer" + }, + { + "description": "Report that a tool or capability needed to complete the task is not available, or share any information you deem important about missing functionality or limitations. Use this when you cannot accomplish what was requested because the required functionality is missing or access is restricted.", + "inputSchema": { + "additionalProperties": false, + "properties": { + "alternatives": { + "description": "Any workarounds, manual steps, or alternative approaches the user could take (max 256 characters).", + "type": "string" + }, + "reason": { + "description": "Explanation of why this tool is needed or what information you want to share about the limitation (max 256 characters).", + "type": "string" + }, + "tool": { + "description": "Optional: Name or description of the missing tool or capability (max 128 characters). Be specific about what functionality is needed.", + "type": "string" + } + }, + "required": [ + "reason" + ], + "type": "object" + }, + "name": "missing_tool" + }, + { + "description": "Log a transparency message when no significant actions are needed. Use this to confirm workflow completion and provide visibility when analysis is complete but no changes or outputs are required (e.g., 'No issues found', 'All checks passed'). This ensures the workflow produces human-visible output even when no other actions are taken.", + "inputSchema": { + "additionalProperties": false, + "properties": { + "message": { + "description": "Status or completion message to log. Should explain what was analyzed and the outcome (e.g., 'Code review complete - no issues found', 'Analysis complete - all tests passing').", + "type": "string" + } + }, + "required": [ + "message" + ], + "type": "object" + }, + "name": "noop" + }, + { + "description": "Report that data or information needed to complete the task is not available. Use this when you cannot accomplish what was requested because required data, context, or information is missing.", + "inputSchema": { + "additionalProperties": false, + "properties": { + "alternatives": { + "description": "Any workarounds, manual steps, or alternative approaches the user could take (max 256 characters).", + "type": "string" + }, + "context": { + "description": "Additional context about the missing data or where it should come from (max 256 characters).", + "type": "string" + }, + "data_type": { + "description": "Type or description of the missing data or information (max 128 characters). Be specific about what data is needed.", + "type": "string" + }, + "reason": { + "description": "Explanation of why this data is needed to complete the task (max 256 characters).", + "type": "string" + } + }, + "required": [], + "type": "object" + }, + "name": "missing_data" + } + ] + GH_AW_SAFE_OUTPUTS_TOOLS_EOF + cat > /opt/gh-aw/safeoutputs/validation.json << 'GH_AW_SAFE_OUTPUTS_VALIDATION_EOF' + { + "add_reviewer": { + "defaultMax": 3, + "fields": { + "pull_request_number": { + "issueOrPRNumber": true + }, + "reviewers": { + "required": true, + "type": "array", + "itemType": "string", + "itemSanitize": true, + "itemMaxLength": 39 + } + } + }, + "missing_tool": { + "defaultMax": 20, + "fields": { + "alternatives": { + "type": "string", + "sanitize": true, + "maxLength": 512 + }, + "reason": { + "required": true, + "type": "string", + "sanitize": true, + "maxLength": 256 + }, + "tool": { + "type": "string", + "sanitize": true, + "maxLength": 128 + } + } + }, + "noop": { + "defaultMax": 1, + "fields": { + "message": { + "required": true, + "type": "string", + "sanitize": true, + "maxLength": 65000 + } + } + } + } + GH_AW_SAFE_OUTPUTS_VALIDATION_EOF + - name: Generate Safe Outputs MCP Server Config + id: safe-outputs-config + run: | + # Generate a secure random API key (360 bits of entropy, 40+ chars) + # Mask immediately to prevent timing vulnerabilities + API_KEY=$(openssl rand -base64 45 | tr -d '/+=') + echo "::add-mask::${API_KEY}" + + PORT=3001 + + # Set outputs for next steps + { + echo "safe_outputs_api_key=${API_KEY}" + echo "safe_outputs_port=${PORT}" + } >> "$GITHUB_OUTPUT" + + echo "Safe Outputs MCP server will run on port ${PORT}" + + - name: Start Safe Outputs MCP HTTP Server + id: safe-outputs-start + env: + DEBUG: '*' + GH_AW_SAFE_OUTPUTS_PORT: ${{ steps.safe-outputs-config.outputs.safe_outputs_port }} + GH_AW_SAFE_OUTPUTS_API_KEY: ${{ steps.safe-outputs-config.outputs.safe_outputs_api_key }} + GH_AW_SAFE_OUTPUTS_TOOLS_PATH: /opt/gh-aw/safeoutputs/tools.json + GH_AW_SAFE_OUTPUTS_CONFIG_PATH: /opt/gh-aw/safeoutputs/config.json + GH_AW_MCP_LOG_DIR: /tmp/gh-aw/mcp-logs/safeoutputs + run: | + # Environment variables are set above to prevent template injection + export DEBUG + export GH_AW_SAFE_OUTPUTS_PORT + export GH_AW_SAFE_OUTPUTS_API_KEY + export GH_AW_SAFE_OUTPUTS_TOOLS_PATH + export GH_AW_SAFE_OUTPUTS_CONFIG_PATH + export GH_AW_MCP_LOG_DIR + + bash /opt/gh-aw/actions/start_safe_outputs_server.sh + + - name: Start MCP Gateway + id: start-mcp-gateway + env: + GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }} + GH_AW_SAFE_OUTPUTS_API_KEY: ${{ steps.safe-outputs-start.outputs.api_key }} + GH_AW_SAFE_OUTPUTS_PORT: ${{ steps.safe-outputs-start.outputs.port }} + GITHUB_MCP_LOCKDOWN: ${{ steps.determine-automatic-lockdown.outputs.lockdown == 'true' && '1' || '0' }} + GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + run: | + set -eo pipefail + mkdir -p /tmp/gh-aw/mcp-config + + # Export gateway environment variables for MCP config and gateway script + export MCP_GATEWAY_PORT="80" + export MCP_GATEWAY_DOMAIN="host.docker.internal" + MCP_GATEWAY_API_KEY=$(openssl rand -base64 45 | tr -d '/+=') + echo "::add-mask::${MCP_GATEWAY_API_KEY}" + export MCP_GATEWAY_API_KEY + export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" + mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export DEBUG="*" + + export GH_AW_ENGINE="copilot" + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.4' + + mkdir -p /home/runner/.copilot + cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh + { + "mcpServers": { + "github": { + "type": "stdio", + "container": "ghcr.io/github/github-mcp-server:v0.30.3", + "env": { + "GITHUB_LOCKDOWN_MODE": "$GITHUB_MCP_LOCKDOWN", + "GITHUB_PERSONAL_ACCESS_TOKEN": "\${GITHUB_MCP_SERVER_TOKEN}", + "GITHUB_READ_ONLY": "1", + "GITHUB_TOOLSETS": "repos,pull_requests" + } + }, + "safeoutputs": { + "type": "http", + "url": "http://host.docker.internal:$GH_AW_SAFE_OUTPUTS_PORT", + "headers": { + "Authorization": "\${GH_AW_SAFE_OUTPUTS_API_KEY}" + } + } + }, + "gateway": { + "port": $MCP_GATEWAY_PORT, + "domain": "${MCP_GATEWAY_DOMAIN}", + "apiKey": "${MCP_GATEWAY_API_KEY}", + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + } + } + GH_AW_MCP_CONFIG_EOF + - name: Generate workflow overview + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + with: + script: | + const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); + await generateWorkflowOverview(core); + - name: Download prompt artifact + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + with: + name: prompt + path: /tmp/gh-aw/aw-prompts + - name: Clean git credentials + run: bash /opt/gh-aw/actions/clean_git_credentials.sh + - name: Execute GitHub Copilot CLI + id: agentic_execution + # Copilot CLI tool arguments (sorted): + timeout-minutes: 20 + run: | + set -o pipefail + sudo -E awf --env-all --container-workdir "${GITHUB_WORKSPACE}" --allow-domains api.business.githubcopilot.com,api.enterprise.githubcopilot.com,api.github.com,api.githubcopilot.com,api.individual.githubcopilot.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,github.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,ppa.launchpad.net,raw.githubusercontent.com,registry.npmjs.org,s.symcb.com,s.symcd.com,security.ubuntu.com,telemetry.enterprise.githubcopilot.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --enable-host-access --image-tag 0.20.0 --skip-pull --enable-api-proxy \ + -- /bin/bash -c '/usr/local/bin/copilot --add-dir /tmp/gh-aw/ --log-level all --log-dir /tmp/gh-aw/sandbox/agent/logs/ --add-dir "${GITHUB_WORKSPACE}" --disable-builtin-mcps --allow-all-tools --allow-all-paths --share /tmp/gh-aw/sandbox/agent/logs/conversation.md --prompt "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"${GH_AW_MODEL_AGENT_COPILOT:+ --model "$GH_AW_MODEL_AGENT_COPILOT"}' 2>&1 | tee -a /tmp/gh-aw/agent-stdio.log + env: + COPILOT_AGENT_RUNNER_TYPE: STANDALONE + COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }} + GH_AW_MCP_CONFIG: /home/runner/.copilot/mcp-config.json + GH_AW_MODEL_AGENT_COPILOT: ${{ vars.GH_AW_MODEL_AGENT_COPILOT || '' }} + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }} + GITHUB_HEAD_REF: ${{ github.head_ref }} + GITHUB_REF_NAME: ${{ github.ref_name }} + GITHUB_STEP_SUMMARY: ${{ env.GITHUB_STEP_SUMMARY }} + GITHUB_WORKSPACE: ${{ github.workspace }} + XDG_CONFIG_HOME: /home/runner + - name: Configure Git credentials + env: + REPO_NAME: ${{ github.repository }} + SERVER_URL: ${{ github.server_url }} + run: | + git config --global user.email "github-actions[bot]@users.noreply.github.com" + git config --global user.name "github-actions[bot]" + # Re-authenticate git with GitHub token + SERVER_URL_STRIPPED="${SERVER_URL#https://}" + git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git" + echo "Git configured with standard GitHub Actions identity" + - name: Copy Copilot session state files to logs + if: always() + continue-on-error: true + run: | + # Copy Copilot session state files to logs folder for artifact collection + # This ensures they are in /tmp/gh-aw/ where secret redaction can scan them + SESSION_STATE_DIR="$HOME/.copilot/session-state" + LOGS_DIR="/tmp/gh-aw/sandbox/agent/logs" + + if [ -d "$SESSION_STATE_DIR" ]; then + echo "Copying Copilot session state files from $SESSION_STATE_DIR to $LOGS_DIR" + mkdir -p "$LOGS_DIR" + cp -v "$SESSION_STATE_DIR"/*.jsonl "$LOGS_DIR/" 2>/dev/null || true + echo "Session state files copied successfully" + else + echo "No session-state directory found at $SESSION_STATE_DIR" + fi + - name: Stop MCP Gateway + if: always() + continue-on-error: true + env: + MCP_GATEWAY_PORT: ${{ steps.start-mcp-gateway.outputs.gateway-port }} + MCP_GATEWAY_API_KEY: ${{ steps.start-mcp-gateway.outputs.gateway-api-key }} + GATEWAY_PID: ${{ steps.start-mcp-gateway.outputs.gateway-pid }} + run: | + bash /opt/gh-aw/actions/stop_mcp_gateway.sh "$GATEWAY_PID" + - name: Redact secrets in logs + if: always() + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/redact_secrets.cjs'); + await main(); + env: + GH_AW_SECRET_NAMES: 'COPILOT_GITHUB_TOKEN,GH_AW_GITHUB_MCP_SERVER_TOKEN,GH_AW_GITHUB_TOKEN,GITHUB_TOKEN' + SECRET_COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }} + SECRET_GH_AW_GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN }} + SECRET_GH_AW_GITHUB_TOKEN: ${{ secrets.GH_AW_GITHUB_TOKEN }} + SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Upload Safe Outputs + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + with: + name: safe-output + path: ${{ env.GH_AW_SAFE_OUTPUTS }} + if-no-files-found: warn + - name: Ingest agent output + id: collect_output + if: always() + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }} + GH_AW_ALLOWED_DOMAINS: "api.business.githubcopilot.com,api.enterprise.githubcopilot.com,api.github.com,api.githubcopilot.com,api.individual.githubcopilot.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,github.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,ppa.launchpad.net,raw.githubusercontent.com,registry.npmjs.org,s.symcb.com,s.symcd.com,security.ubuntu.com,telemetry.enterprise.githubcopilot.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com" + GITHUB_SERVER_URL: ${{ github.server_url }} + GITHUB_API_URL: ${{ github.api_url }} + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/collect_ndjson_output.cjs'); + await main(); + - name: Upload sanitized agent output + if: always() && env.GH_AW_AGENT_OUTPUT + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + with: + name: agent-output + path: ${{ env.GH_AW_AGENT_OUTPUT }} + if-no-files-found: warn + - name: Upload engine output files + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + with: + name: agent_outputs + path: | + /tmp/gh-aw/sandbox/agent/logs/ + /tmp/gh-aw/redacted-urls.log + if-no-files-found: ignore + - name: Parse agent logs for step summary + if: always() + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_AGENT_OUTPUT: /tmp/gh-aw/sandbox/agent/logs/ + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/parse_copilot_log.cjs'); + await main(); + - name: Parse MCP Gateway logs for step summary + if: always() + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/parse_mcp_gateway_log.cjs'); + await main(); + - name: Print firewall logs + if: always() + continue-on-error: true + env: + AWF_LOGS_DIR: /tmp/gh-aw/sandbox/firewall/logs + run: | + # Fix permissions on firewall logs so they can be uploaded as artifacts + # AWF runs with sudo, creating files owned by root + sudo chmod -R a+r /tmp/gh-aw/sandbox/firewall/logs 2>/dev/null || true + # Only run awf logs summary if awf command exists (it may not be installed if workflow failed before install step) + if command -v awf &> /dev/null; then + awf logs summary | tee -a "$GITHUB_STEP_SUMMARY" + else + echo 'AWF binary not installed, skipping firewall log summary' + fi + - name: Upload agent artifacts + if: always() + continue-on-error: true + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + with: + name: agent-artifacts + path: | + /tmp/gh-aw/aw-prompts/prompt.txt + /tmp/gh-aw/aw_info.json + /tmp/gh-aw/mcp-logs/ + /tmp/gh-aw/sandbox/firewall/logs/ + /tmp/gh-aw/agent-stdio.log + /tmp/gh-aw/agent/ + if-no-files-found: ignore + + conclusion: + needs: + - activation + - agent + - detection + - safe_outputs + if: (always()) && (needs.agent.result != 'skipped') + runs-on: ubuntu-slim + permissions: + contents: read + pull-requests: write + outputs: + noop_message: ${{ steps.noop.outputs.noop_message }} + tools_reported: ${{ steps.missing_tool.outputs.tools_reported }} + total_count: ${{ steps.missing_tool.outputs.total_count }} + steps: + - name: Setup Scripts + uses: github/gh-aw/actions/setup@a70c5eada06553e3510ac27f2c3bda9d3705bccb # v0.46.3 + with: + destination: /opt/gh-aw/actions + - name: Download agent output artifact + continue-on-error: true + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + with: + name: agent-output + path: /tmp/gh-aw/safeoutputs/ + - name: Setup agent output environment variable + run: | + mkdir -p /tmp/gh-aw/safeoutputs/ + find "/tmp/gh-aw/safeoutputs/" -type f -print + echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV" + - name: Process No-Op Messages + id: noop + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }} + GH_AW_NOOP_MAX: 1 + GH_AW_WORKFLOW_NAME: "Add Copilot Reviewer After Security Approval" + with: + github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/noop.cjs'); + await main(); + - name: Record Missing Tool + id: missing_tool + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }} + GH_AW_WORKFLOW_NAME: "Add Copilot Reviewer After Security Approval" + with: + github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/missing_tool.cjs'); + await main(); + - name: Handle Agent Failure + id: handle_agent_failure + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }} + GH_AW_WORKFLOW_NAME: "Add Copilot Reviewer After Security Approval" + GH_AW_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + GH_AW_AGENT_CONCLUSION: ${{ needs.agent.result }} + GH_AW_WORKFLOW_ID: "copilot-review" + GH_AW_SECRET_VERIFICATION_RESULT: ${{ needs.agent.outputs.secret_verification_result }} + GH_AW_CHECKOUT_PR_SUCCESS: ${{ needs.agent.outputs.checkout_pr_success }} + GH_AW_GROUP_REPORTS: "false" + with: + github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/handle_agent_failure.cjs'); + await main(); + - name: Handle No-Op Message + id: handle_noop_message + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }} + GH_AW_WORKFLOW_NAME: "Add Copilot Reviewer After Security Approval" + GH_AW_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + GH_AW_AGENT_CONCLUSION: ${{ needs.agent.result }} + GH_AW_NOOP_MESSAGE: ${{ steps.noop.outputs.noop_message }} + GH_AW_NOOP_REPORT_AS_ISSUE: "true" + with: + github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/handle_noop_message.cjs'); + await main(); + + detection: + needs: agent + if: needs.agent.outputs.output_types != '' || needs.agent.outputs.has_patch == 'true' + runs-on: ubuntu-latest + permissions: {} + timeout-minutes: 10 + outputs: + success: ${{ steps.parse_results.outputs.success }} + steps: + - name: Setup Scripts + uses: github/gh-aw/actions/setup@a70c5eada06553e3510ac27f2c3bda9d3705bccb # v0.46.3 + with: + destination: /opt/gh-aw/actions + - name: Download agent artifacts + continue-on-error: true + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + with: + name: agent-artifacts + path: /tmp/gh-aw/threat-detection/ + - name: Download agent output artifact + continue-on-error: true + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + with: + name: agent-output + path: /tmp/gh-aw/threat-detection/ + - name: Echo agent output types + env: + AGENT_OUTPUT_TYPES: ${{ needs.agent.outputs.output_types }} + run: | + echo "Agent output-types: $AGENT_OUTPUT_TYPES" + - name: Setup threat detection + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + WORKFLOW_NAME: "Add Copilot Reviewer After Security Approval" + WORKFLOW_DESCRIPTION: "Adds Copilot as a reviewer on a pull request after the security review agent approves it. Triggered when any review is submitted; only acts when the review is an approval from the security reviewer." + HAS_PATCH: ${{ needs.agent.outputs.has_patch }} + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/setup_threat_detection.cjs'); + await main(); + - name: Ensure threat-detection directory and log + run: | + mkdir -p /tmp/gh-aw/threat-detection + touch /tmp/gh-aw/threat-detection/detection.log + - name: Validate COPILOT_GITHUB_TOKEN secret + id: validate-secret + run: /opt/gh-aw/actions/validate_multi_secret.sh COPILOT_GITHUB_TOKEN 'GitHub Copilot CLI' https://github.github.com/gh-aw/reference/engines/#github-copilot-default + env: + COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }} + - name: Install GitHub Copilot CLI + run: /opt/gh-aw/actions/install_copilot_cli.sh 0.0.411 + - name: Execute GitHub Copilot CLI + id: agentic_execution + # Copilot CLI tool arguments (sorted): + # --allow-tool shell(cat) + # --allow-tool shell(grep) + # --allow-tool shell(head) + # --allow-tool shell(jq) + # --allow-tool shell(ls) + # --allow-tool shell(tail) + # --allow-tool shell(wc) + timeout-minutes: 20 + run: | + set -o pipefail + COPILOT_CLI_INSTRUCTION="$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" + mkdir -p /tmp/ + mkdir -p /tmp/gh-aw/ + mkdir -p /tmp/gh-aw/agent/ + mkdir -p /tmp/gh-aw/sandbox/agent/logs/ + copilot --add-dir /tmp/ --add-dir /tmp/gh-aw/ --add-dir /tmp/gh-aw/agent/ --log-level all --log-dir /tmp/gh-aw/sandbox/agent/logs/ --disable-builtin-mcps --allow-tool 'shell(cat)' --allow-tool 'shell(grep)' --allow-tool 'shell(head)' --allow-tool 'shell(jq)' --allow-tool 'shell(ls)' --allow-tool 'shell(tail)' --allow-tool 'shell(wc)' --share /tmp/gh-aw/sandbox/agent/logs/conversation.md --prompt "$COPILOT_CLI_INSTRUCTION"${GH_AW_MODEL_DETECTION_COPILOT:+ --model "$GH_AW_MODEL_DETECTION_COPILOT"} 2>&1 | tee /tmp/gh-aw/threat-detection/detection.log + env: + COPILOT_AGENT_RUNNER_TYPE: STANDALONE + COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }} + GH_AW_MODEL_DETECTION_COPILOT: ${{ vars.GH_AW_MODEL_DETECTION_COPILOT || '' }} + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + GITHUB_HEAD_REF: ${{ github.head_ref }} + GITHUB_REF_NAME: ${{ github.ref_name }} + GITHUB_STEP_SUMMARY: ${{ env.GITHUB_STEP_SUMMARY }} + GITHUB_WORKSPACE: ${{ github.workspace }} + XDG_CONFIG_HOME: /home/runner + - name: Parse threat detection results + id: parse_results + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/parse_threat_detection_results.cjs'); + await main(); + - name: Upload threat detection log + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + with: + name: threat-detection.log + path: /tmp/gh-aw/threat-detection/detection.log + if-no-files-found: ignore + + pre_activation: + runs-on: ubuntu-slim + outputs: + activated: ${{ steps.check_membership.outputs.is_team_member == 'true' }} + steps: + - name: Setup Scripts + uses: github/gh-aw/actions/setup@a70c5eada06553e3510ac27f2c3bda9d3705bccb # v0.46.3 + with: + destination: /opt/gh-aw/actions + - name: Check team membership for workflow + id: check_membership + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_REQUIRED_ROLES: admin,maintainer,write + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/check_membership.cjs'); + await main(); + + safe_outputs: + needs: + - agent + - detection + if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (needs.detection.outputs.success == 'true') + runs-on: ubuntu-slim + permissions: + contents: read + pull-requests: write + timeout-minutes: 15 + env: + GH_AW_ENGINE_ID: "copilot" + GH_AW_WORKFLOW_ID: "copilot-review" + GH_AW_WORKFLOW_NAME: "Add Copilot Reviewer After Security Approval" + outputs: + add_reviewer_reviewers_added: ${{ steps.process_safe_outputs.outputs.reviewers_added }} + create_discussion_error_count: ${{ steps.process_safe_outputs.outputs.create_discussion_error_count }} + create_discussion_errors: ${{ steps.process_safe_outputs.outputs.create_discussion_errors }} + process_safe_outputs_processed_count: ${{ steps.process_safe_outputs.outputs.processed_count }} + process_safe_outputs_temporary_id_map: ${{ steps.process_safe_outputs.outputs.temporary_id_map }} + steps: + - name: Setup Scripts + uses: github/gh-aw/actions/setup@a70c5eada06553e3510ac27f2c3bda9d3705bccb # v0.46.3 + with: + destination: /opt/gh-aw/actions + - name: Download agent output artifact + continue-on-error: true + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + with: + name: agent-output + path: /tmp/gh-aw/safeoutputs/ + - name: Setup agent output environment variable + run: | + mkdir -p /tmp/gh-aw/safeoutputs/ + find "/tmp/gh-aw/safeoutputs/" -type f -print + echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV" + - name: Process Safe Outputs + id: process_safe_outputs + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }} + GH_AW_SAFE_OUTPUTS_HANDLER_CONFIG: "{\"add_reviewer\":{\"allowed\":[\"copilot\"],\"max\":1,\"target\":\"triggering\"},\"missing_data\":{},\"missing_tool\":{}}" + with: + github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/safe_output_handler_manager.cjs'); + await main(); diff --git a/.github/workflows/copilot-review.md b/.github/workflows/copilot-review.md new file mode 100644 index 0000000..da058a7 --- /dev/null +++ b/.github/workflows/copilot-review.md @@ -0,0 +1,56 @@ +--- +description: Adds Copilot as a reviewer on a pull request after the security review + agent approves it. Triggered when any review is submitted; only acts when the + review is an approval from the security reviewer. + +on: + pull_request_review: + types: [submitted] + +permissions: + contents: read + pull-requests: read + +tools: + github: + toolsets: [repos, pull_requests] + +safe-outputs: + add-reviewer: + reviewers: [copilot] + max: 1 + target: "triggering" + github-token: ${{ secrets.GH_AW_AGENT_TOKEN }} + noop: + max: 1 +--- + +# Add Copilot Reviewer After Security Approval + +A review was just submitted on pull request +#${{ github.event.pull_request.number }}. + +## Instructions + +1. **Fetch the review that was just submitted.** Use the GitHub API to get the + details of the review that triggered this workflow (review ID: + ${{ github.event.review.id }}) on PR + #${{ github.event.pull_request.number }}. + +2. **Check if this is a security review approval.** Determine whether: + - The review was submitted by the security review bot (look for a user + whose login contains "github-actions" or whose review body references + security review categories / security posture analysis). + - The review state is `APPROVED`. + - If **both** conditions are met, proceed to step 3. + - If **either** condition is not met, use `noop` — this review is not + relevant. + +3. **Check if Copilot is already a reviewer.** Fetch the list of requested + reviewers for PR #${{ github.event.pull_request.number }}. If Copilot + (`copilot`) is already in the reviewer list, use `noop` — no action + needed. + +4. **Add Copilot as a reviewer.** If the security review approved the PR and + Copilot is not yet a reviewer, add Copilot as a reviewer on the pull + request. diff --git a/.github/workflows/pr-orchestrator.yml b/.github/workflows/pr-orchestrator.yml new file mode 100644 index 0000000..b6a772c --- /dev/null +++ b/.github/workflows/pr-orchestrator.yml @@ -0,0 +1,62 @@ +name: "PR: orchestrator" + +on: + pull_request: + branches: ["main", "feature*", "fix*"] + +permissions: + contents: read + actions: read + pull-requests: read + security-events: write + +jobs: + # ── Stage 1: Code Quality ──────────────────────────────────────────── + code-quality: + name: code quality + uses: ./.github/workflows/python-code-quality.yml + permissions: + contents: read + + # ── Stage 2: Tests + Build Validation (parallel, after quality) ───── + tests: + name: tests + needs: code-quality + uses: ./.github/workflows/python-tests.yml + permissions: + contents: read + + package-build: + name: package build + needs: code-quality + uses: ./.github/workflows/python-package-build.yml + permissions: + contents: read + + docker-build: + name: docker build + needs: code-quality + uses: ./.github/workflows/python-docker-build.yml + permissions: + contents: read + + # ── Stage 3: CodeQL (after tests + builds pass) ───────────────────── + codeql: + name: codeql + needs: [tests, package-build, docker-build] + uses: ./.github/workflows/codeql-analysis.yml + permissions: + actions: read + contents: read + security-events: write + + # ── Stage 4: Security Review (after CodeQL) ───────────────────────── + security-review: + name: security review + needs: codeql + uses: ./.github/workflows/security-review.lock.yml + permissions: + contents: read + pull-requests: read + with: + pr_number: ${{ github.event.pull_request.number }} diff --git a/.github/workflows/pr-review-comment-handler.lock.yml b/.github/workflows/pr-review-comment-handler.lock.yml new file mode 100644 index 0000000..f8eb026 --- /dev/null +++ b/.github/workflows/pr-review-comment-handler.lock.yml @@ -0,0 +1,1154 @@ +# +# ___ _ _ +# / _ \ | | (_) +# | |_| | __ _ ___ _ __ | |_ _ ___ +# | _ |/ _` |/ _ \ '_ \| __| |/ __| +# | | | | (_| | __/ | | | |_| | (__ +# \_| |_/\__, |\___|_| |_|\__|_|\___| +# __/ | +# _ _ |___/ +# | | | | / _| | +# | | | | ___ _ __ _ __| |_| | _____ ____ +# | |/\| |/ _ \ '__| |/ /| _| |/ _ \ \ /\ / / ___| +# \ /\ / (_) | | | | ( | | | | (_) \ V V /\__ \ +# \/ \/ \___/|_| |_|\_\|_| |_|\___/ \_/\_/ |___/ +# +# This file was automatically generated by gh-aw (v0.46.3). DO NOT EDIT. +# +# To update this file, edit the corresponding .md file and run: +# gh aw compile +# Not all edits will cause changes to this file. +# +# For more information: https://github.github.com/gh-aw/introduction/overview/ +# +# Triages PR review comments. If the comment raises an issue that needs fixing, replies tagging @copilot to fix it directly on the PR. If low priority, creates an issue for later. If not relevant, resolves with a reply. +# +# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"0b44b9f4cc4d0349fe23357a62776d7483607c761560b73dffce75e24c0219c3","compiler_version":"v0.46.3"} + +name: "PR Review Comment Handler" +"on": + pull_request_review_comment: + types: + - created + +permissions: {} + +concurrency: + group: "gh-aw-${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}" + cancel-in-progress: true + +run-name: "PR Review Comment Handler" + +jobs: + activation: + needs: pre_activation + if: needs.pre_activation.outputs.activated == 'true' + runs-on: ubuntu-slim + permissions: + contents: read + outputs: + body: ${{ steps.sanitized.outputs.body }} + comment_id: "" + comment_repo: "" + text: ${{ steps.sanitized.outputs.text }} + title: ${{ steps.sanitized.outputs.title }} + steps: + - name: Setup Scripts + uses: github/gh-aw/actions/setup@a70c5eada06553e3510ac27f2c3bda9d3705bccb # v0.46.3 + with: + destination: /opt/gh-aw/actions + - name: Validate context variables + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/validate_context_variables.cjs'); + await main(); + - name: Checkout .github and .agents folders + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + sparse-checkout: | + .github + .agents + fetch-depth: 1 + persist-credentials: false + - name: Check workflow file timestamps + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_WORKFLOW_FILE: "pr-review-comment-handler.lock.yml" + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/check_workflow_timestamp_api.cjs'); + await main(); + - name: Compute current body text + id: sanitized + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/compute_text.cjs'); + await main(); + - name: Create prompt with built-in context + env: + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }} + GH_AW_GITHUB_ACTOR: ${{ github.actor }} + GH_AW_GITHUB_EVENT_COMMENT_ID: ${{ github.event.comment.id }} + GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER: ${{ github.event.discussion.number }} + GH_AW_GITHUB_EVENT_ISSUE_NUMBER: ${{ github.event.issue.number }} + GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER: ${{ github.event.pull_request.number }} + GH_AW_GITHUB_REPOSITORY: ${{ github.repository }} + GH_AW_GITHUB_RUN_ID: ${{ github.run_id }} + GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }} + GH_AW_IS_PR_COMMENT: ${{ github.event.issue.pull_request && 'true' || '' }} + run: | + bash /opt/gh-aw/actions/create_prompt_first.sh + cat << 'GH_AW_PROMPT_EOF' > "$GH_AW_PROMPT" + + GH_AW_PROMPT_EOF + cat "/opt/gh-aw/prompts/xpia.md" >> "$GH_AW_PROMPT" + cat "/opt/gh-aw/prompts/temp_folder_prompt.md" >> "$GH_AW_PROMPT" + cat "/opt/gh-aw/prompts/markdown.md" >> "$GH_AW_PROMPT" + cat << 'GH_AW_PROMPT_EOF' >> "$GH_AW_PROMPT" + + GitHub API Access Instructions + + The gh CLI is NOT authenticated. Do NOT use gh commands for GitHub operations. + + + To create or modify GitHub resources (issues, discussions, pull requests, etc.), you MUST call the appropriate safe output tool. Simply writing content will NOT work - the workflow requires actual tool calls. + + Temporary IDs: Some safe output tools support a temporary ID field (usually named temporary_id) so you can reference newly-created items elsewhere in the SAME agent output (for example, using #aw_abc1 in a later body). + + **IMPORTANT - temporary_id format rules:** + - If you DON'T need to reference the item later, OMIT the temporary_id field entirely (it will be auto-generated if needed) + - If you DO need cross-references/chaining, you MUST match this EXACT validation regex: /^aw_[A-Za-z0-9]{3,8}$/i + - Format: aw_ prefix followed by 3 to 8 alphanumeric characters (A-Z, a-z, 0-9, case-insensitive) + - Valid alphanumeric characters: ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789 + - INVALID examples: aw_ab (too short), aw_123456789 (too long), aw_test-id (contains hyphen), aw_id_123 (contains underscore) + - VALID examples: aw_abc, aw_abc1, aw_Test123, aw_A1B2C3D4, aw_12345678 + - To generate valid IDs: use 3-8 random alphanumeric characters or omit the field to let the system auto-generate + + Do NOT invent other aw_* formats — downstream steps will reject them with validation errors matching against /^aw_[A-Za-z0-9]{3,8}$/i. + + Discover available tools from the safeoutputs MCP server. + + **Critical**: Tool calls write structured data that downstream jobs process. Without tool calls, follow-up actions will be skipped. + + **Note**: If you made no other safe output tool calls during this workflow execution, call the "noop" tool to provide a status message indicating completion or that no actions were needed. + + + + The following GitHub context information is available for this workflow: + {{#if __GH_AW_GITHUB_ACTOR__ }} + - **actor**: __GH_AW_GITHUB_ACTOR__ + {{/if}} + {{#if __GH_AW_GITHUB_REPOSITORY__ }} + - **repository**: __GH_AW_GITHUB_REPOSITORY__ + {{/if}} + {{#if __GH_AW_GITHUB_WORKSPACE__ }} + - **workspace**: __GH_AW_GITHUB_WORKSPACE__ + {{/if}} + {{#if __GH_AW_GITHUB_EVENT_ISSUE_NUMBER__ }} + - **issue-number**: #__GH_AW_GITHUB_EVENT_ISSUE_NUMBER__ + {{/if}} + {{#if __GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER__ }} + - **discussion-number**: #__GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER__ + {{/if}} + {{#if __GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER__ }} + - **pull-request-number**: #__GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER__ + {{/if}} + {{#if __GH_AW_GITHUB_EVENT_COMMENT_ID__ }} + - **comment-id**: __GH_AW_GITHUB_EVENT_COMMENT_ID__ + {{/if}} + {{#if __GH_AW_GITHUB_RUN_ID__ }} + - **workflow-run-id**: __GH_AW_GITHUB_RUN_ID__ + {{/if}} + + + GH_AW_PROMPT_EOF + if [ "$GITHUB_EVENT_NAME" = "issue_comment" ] && [ -n "$GH_AW_IS_PR_COMMENT" ] || [ "$GITHUB_EVENT_NAME" = "pull_request_review_comment" ] || [ "$GITHUB_EVENT_NAME" = "pull_request_review" ]; then + cat "/opt/gh-aw/prompts/pr_context_prompt.md" >> "$GH_AW_PROMPT" + fi + cat << 'GH_AW_PROMPT_EOF' >> "$GH_AW_PROMPT" + + GH_AW_PROMPT_EOF + cat << 'GH_AW_PROMPT_EOF' >> "$GH_AW_PROMPT" + {{#runtime-import .github/workflows/pr-review-comment-handler.md}} + GH_AW_PROMPT_EOF + - name: Interpolate variables and render templates + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + GH_AW_GITHUB_EVENT_COMMENT_ID: ${{ github.event.comment.id }} + GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER: ${{ github.event.pull_request.number }} + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/interpolate_prompt.cjs'); + await main(); + - name: Substitute placeholders + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + GH_AW_GITHUB_ACTOR: ${{ github.actor }} + GH_AW_GITHUB_EVENT_COMMENT_ID: ${{ github.event.comment.id }} + GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER: ${{ github.event.discussion.number }} + GH_AW_GITHUB_EVENT_ISSUE_NUMBER: ${{ github.event.issue.number }} + GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER: ${{ github.event.pull_request.number }} + GH_AW_GITHUB_REPOSITORY: ${{ github.repository }} + GH_AW_GITHUB_RUN_ID: ${{ github.run_id }} + GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }} + GH_AW_IS_PR_COMMENT: ${{ github.event.issue.pull_request && 'true' || '' }} + GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_ACTIVATED: ${{ needs.pre_activation.outputs.activated }} + GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_MATCHED_COMMAND: ${{ needs.pre_activation.outputs.matched_command }} + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + + const substitutePlaceholders = require('/opt/gh-aw/actions/substitute_placeholders.cjs'); + + // Call the substitution function + return await substitutePlaceholders({ + file: process.env.GH_AW_PROMPT, + substitutions: { + GH_AW_GITHUB_ACTOR: process.env.GH_AW_GITHUB_ACTOR, + GH_AW_GITHUB_EVENT_COMMENT_ID: process.env.GH_AW_GITHUB_EVENT_COMMENT_ID, + GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER: process.env.GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER, + GH_AW_GITHUB_EVENT_ISSUE_NUMBER: process.env.GH_AW_GITHUB_EVENT_ISSUE_NUMBER, + GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER: process.env.GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER, + GH_AW_GITHUB_REPOSITORY: process.env.GH_AW_GITHUB_REPOSITORY, + GH_AW_GITHUB_RUN_ID: process.env.GH_AW_GITHUB_RUN_ID, + GH_AW_GITHUB_WORKSPACE: process.env.GH_AW_GITHUB_WORKSPACE, + GH_AW_IS_PR_COMMENT: process.env.GH_AW_IS_PR_COMMENT, + GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_ACTIVATED: process.env.GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_ACTIVATED, + GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_MATCHED_COMMAND: process.env.GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_MATCHED_COMMAND + } + }); + - name: Validate prompt placeholders + env: + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + run: bash /opt/gh-aw/actions/validate_prompt_placeholders.sh + - name: Print prompt + env: + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + run: bash /opt/gh-aw/actions/print_prompt_summary.sh + - name: Upload prompt artifact + if: success() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + with: + name: prompt + path: /tmp/gh-aw/aw-prompts/prompt.txt + retention-days: 1 + + agent: + needs: activation + runs-on: ubuntu-latest + permissions: + contents: read + issues: read + pull-requests: read + env: + DEFAULT_BRANCH: ${{ github.event.repository.default_branch }} + GH_AW_ASSETS_ALLOWED_EXTS: "" + GH_AW_ASSETS_BRANCH: "" + GH_AW_ASSETS_MAX_SIZE_KB: 0 + GH_AW_MCP_LOG_DIR: /tmp/gh-aw/mcp-logs/safeoutputs + GH_AW_SAFE_OUTPUTS: /opt/gh-aw/safeoutputs/outputs.jsonl + GH_AW_SAFE_OUTPUTS_CONFIG_PATH: /opt/gh-aw/safeoutputs/config.json + GH_AW_SAFE_OUTPUTS_TOOLS_PATH: /opt/gh-aw/safeoutputs/tools.json + GH_AW_WORKFLOW_ID_SANITIZED: prreviewcommenthandler + outputs: + checkout_pr_success: ${{ steps.checkout-pr.outputs.checkout_pr_success || 'true' }} + has_patch: ${{ steps.collect_output.outputs.has_patch }} + model: ${{ steps.generate_aw_info.outputs.model }} + output: ${{ steps.collect_output.outputs.output }} + output_types: ${{ steps.collect_output.outputs.output_types }} + secret_verification_result: ${{ steps.validate-secret.outputs.verification_result }} + steps: + - name: Setup Scripts + uses: github/gh-aw/actions/setup@a70c5eada06553e3510ac27f2c3bda9d3705bccb # v0.46.3 + with: + destination: /opt/gh-aw/actions + - name: Checkout repository + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + persist-credentials: false + - name: Create gh-aw temp directory + run: bash /opt/gh-aw/actions/create_gh_aw_tmp_dir.sh + - name: Configure Git credentials + env: + REPO_NAME: ${{ github.repository }} + SERVER_URL: ${{ github.server_url }} + run: | + git config --global user.email "github-actions[bot]@users.noreply.github.com" + git config --global user.name "github-actions[bot]" + # Re-authenticate git with GitHub token + SERVER_URL_STRIPPED="${SERVER_URL#https://}" + git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git" + echo "Git configured with standard GitHub Actions identity" + - name: Checkout PR branch + id: checkout-pr + if: | + github.event.pull_request + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + with: + github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/checkout_pr_branch.cjs'); + await main(); + - name: Generate agentic run info + id: generate_aw_info + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + with: + script: | + const fs = require('fs'); + + const awInfo = { + engine_id: "copilot", + engine_name: "GitHub Copilot CLI", + model: process.env.GH_AW_MODEL_AGENT_COPILOT || "", + version: "", + agent_version: "0.0.411", + cli_version: "v0.46.3", + workflow_name: "PR Review Comment Handler", + experimental: false, + supports_tools_allowlist: true, + run_id: context.runId, + run_number: context.runNumber, + run_attempt: process.env.GITHUB_RUN_ATTEMPT, + repository: context.repo.owner + '/' + context.repo.repo, + ref: context.ref, + sha: context.sha, + actor: context.actor, + event_name: context.eventName, + staged: false, + allowed_domains: ["defaults"], + firewall_enabled: true, + awf_version: "v0.20.0", + awmg_version: "v0.1.4", + steps: { + firewall: "squid" + }, + created_at: new Date().toISOString() + }; + + // Write to /tmp/gh-aw directory to avoid inclusion in PR + const tmpPath = '/tmp/gh-aw/aw_info.json'; + fs.writeFileSync(tmpPath, JSON.stringify(awInfo, null, 2)); + console.log('Generated aw_info.json at:', tmpPath); + console.log(JSON.stringify(awInfo, null, 2)); + + // Set model as output for reuse in other steps/jobs + core.setOutput('model', awInfo.model); + - name: Validate COPILOT_GITHUB_TOKEN secret + id: validate-secret + run: /opt/gh-aw/actions/validate_multi_secret.sh COPILOT_GITHUB_TOKEN 'GitHub Copilot CLI' https://github.github.com/gh-aw/reference/engines/#github-copilot-default + env: + COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }} + - name: Install GitHub Copilot CLI + run: /opt/gh-aw/actions/install_copilot_cli.sh 0.0.411 + - name: Install awf binary + run: bash /opt/gh-aw/actions/install_awf_binary.sh v0.20.0 + - name: Determine automatic lockdown mode for GitHub MCP Server + id: determine-automatic-lockdown + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_GITHUB_TOKEN: ${{ secrets.GH_AW_GITHUB_TOKEN }} + GH_AW_GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN }} + with: + script: | + const determineAutomaticLockdown = require('/opt/gh-aw/actions/determine_automatic_lockdown.cjs'); + await determineAutomaticLockdown(github, context, core); + - name: Download container images + run: bash /opt/gh-aw/actions/download_docker_images.sh ghcr.io/github/gh-aw-firewall/agent:0.20.0 ghcr.io/github/gh-aw-firewall/api-proxy:0.20.0 ghcr.io/github/gh-aw-firewall/squid:0.20.0 ghcr.io/github/gh-aw-mcpg:v0.1.4 ghcr.io/github/github-mcp-server:v0.30.3 node:lts-alpine + - name: Write Safe Outputs Config + run: | + mkdir -p /opt/gh-aw/safeoutputs + mkdir -p /tmp/gh-aw/safeoutputs + mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs + cat > /opt/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_EOF' + {"create_issue":{"max":10},"missing_data":{},"missing_tool":{},"noop":{"max":10},"resolve_pull_request_review_thread":{"max":10}} + GH_AW_SAFE_OUTPUTS_CONFIG_EOF + cat > /opt/gh-aw/safeoutputs/tools.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_EOF' + [ + { + "description": "Create a new GitHub issue for tracking bugs, feature requests, or tasks. Use this for actionable work items that need assignment, labeling, and status tracking. For reports, announcements, or status updates that don't require task tracking, use create_discussion instead. CONSTRAINTS: Maximum 10 issue(s) can be created.", + "inputSchema": { + "additionalProperties": false, + "properties": { + "body": { + "description": "Detailed issue description in Markdown. Do NOT repeat the title as a heading since it already appears as the issue's h1. Include context, reproduction steps, or acceptance criteria as appropriate.", + "type": "string" + }, + "labels": { + "description": "Labels to categorize the issue (e.g., 'bug', 'enhancement'). Labels must exist in the repository.", + "items": { + "type": "string" + }, + "type": "array" + }, + "parent": { + "description": "Parent issue number for creating sub-issues. This is the numeric ID from the GitHub URL (e.g., 42 in github.com/owner/repo/issues/42). Can also be a temporary_id (e.g., 'aw_abc123', 'aw_Test123') from a previously created issue in the same workflow run.", + "type": [ + "number", + "string" + ] + }, + "temporary_id": { + "description": "Unique temporary identifier for referencing this issue before it's created. Format: 'aw_' followed by 3 to 8 alphanumeric characters (e.g., 'aw_abc1', 'aw_Test123'). Use '#aw_ID' in body text to reference other issues by their temporary_id; these are replaced with actual issue numbers after creation.", + "pattern": "^aw_[A-Za-z0-9]{3,8}$", + "type": "string" + }, + "title": { + "description": "Concise issue title summarizing the bug, feature, or task. The title appears as the main heading, so keep it brief and descriptive.", + "type": "string" + } + }, + "required": [ + "title", + "body" + ], + "type": "object" + }, + "name": "create_issue" + }, + { + "description": "Reply to an existing review comment on a pull request. Use this to respond to feedback, answer questions, or acknowledge review comments. The comment_id must be the numeric ID of an existing review comment. CONSTRAINTS: Maximum 10 reply/replies can be created.", + "inputSchema": { + "additionalProperties": false, + "properties": { + "body": { + "description": "The reply text in Markdown format. Provide a clear response to the review comment.", + "type": "string" + }, + "comment_id": { + "description": "The numeric ID of the review comment to reply to (e.g., 42853901 from the comment URL or API response).", + "type": [ + "number", + "string" + ] + }, + "pull_request_number": { + "description": "Pull request number to reply on. This is the numeric ID from the GitHub URL (e.g., 876 in github.com/owner/repo/pull/876). If omitted, replies on the PR that triggered this workflow.", + "type": [ + "number", + "string" + ] + } + }, + "required": [ + "comment_id", + "body" + ], + "type": "object" + }, + "name": "reply_to_pull_request_review_comment" + }, + { + "description": "Resolve a review thread on a pull request. Use this to mark a review conversation as resolved after addressing the feedback. The thread_id must be the node ID of the review thread (e.g., PRRT_kwDO...). CONSTRAINTS: Maximum 10 review thread(s) can be resolved.", + "inputSchema": { + "additionalProperties": false, + "properties": { + "thread_id": { + "description": "The node ID of the review thread to resolve (e.g., 'PRRT_kwDOABCD...'). This is the GraphQL node ID, not a numeric ID.", + "type": "string" + } + }, + "required": [ + "thread_id" + ], + "type": "object" + }, + "name": "resolve_pull_request_review_thread" + }, + { + "description": "Report that a tool or capability needed to complete the task is not available, or share any information you deem important about missing functionality or limitations. Use this when you cannot accomplish what was requested because the required functionality is missing or access is restricted.", + "inputSchema": { + "additionalProperties": false, + "properties": { + "alternatives": { + "description": "Any workarounds, manual steps, or alternative approaches the user could take (max 256 characters).", + "type": "string" + }, + "reason": { + "description": "Explanation of why this tool is needed or what information you want to share about the limitation (max 256 characters).", + "type": "string" + }, + "tool": { + "description": "Optional: Name or description of the missing tool or capability (max 128 characters). Be specific about what functionality is needed.", + "type": "string" + } + }, + "required": [ + "reason" + ], + "type": "object" + }, + "name": "missing_tool" + }, + { + "description": "Log a transparency message when no significant actions are needed. Use this to confirm workflow completion and provide visibility when analysis is complete but no changes or outputs are required (e.g., 'No issues found', 'All checks passed'). This ensures the workflow produces human-visible output even when no other actions are taken.", + "inputSchema": { + "additionalProperties": false, + "properties": { + "message": { + "description": "Status or completion message to log. Should explain what was analyzed and the outcome (e.g., 'Code review complete - no issues found', 'Analysis complete - all tests passing').", + "type": "string" + } + }, + "required": [ + "message" + ], + "type": "object" + }, + "name": "noop" + }, + { + "description": "Report that data or information needed to complete the task is not available. Use this when you cannot accomplish what was requested because required data, context, or information is missing.", + "inputSchema": { + "additionalProperties": false, + "properties": { + "alternatives": { + "description": "Any workarounds, manual steps, or alternative approaches the user could take (max 256 characters).", + "type": "string" + }, + "context": { + "description": "Additional context about the missing data or where it should come from (max 256 characters).", + "type": "string" + }, + "data_type": { + "description": "Type or description of the missing data or information (max 128 characters). Be specific about what data is needed.", + "type": "string" + }, + "reason": { + "description": "Explanation of why this data is needed to complete the task (max 256 characters).", + "type": "string" + } + }, + "required": [], + "type": "object" + }, + "name": "missing_data" + } + ] + GH_AW_SAFE_OUTPUTS_TOOLS_EOF + cat > /opt/gh-aw/safeoutputs/validation.json << 'GH_AW_SAFE_OUTPUTS_VALIDATION_EOF' + { + "create_issue": { + "defaultMax": 1, + "fields": { + "body": { + "required": true, + "type": "string", + "sanitize": true, + "maxLength": 65000 + }, + "labels": { + "type": "array", + "itemType": "string", + "itemSanitize": true, + "itemMaxLength": 128 + }, + "parent": { + "issueOrPRNumber": true + }, + "repo": { + "type": "string", + "maxLength": 256 + }, + "temporary_id": { + "type": "string" + }, + "title": { + "required": true, + "type": "string", + "sanitize": true, + "maxLength": 128 + } + } + }, + "missing_tool": { + "defaultMax": 20, + "fields": { + "alternatives": { + "type": "string", + "sanitize": true, + "maxLength": 512 + }, + "reason": { + "required": true, + "type": "string", + "sanitize": true, + "maxLength": 256 + }, + "tool": { + "type": "string", + "sanitize": true, + "maxLength": 128 + } + } + }, + "noop": { + "defaultMax": 1, + "fields": { + "message": { + "required": true, + "type": "string", + "sanitize": true, + "maxLength": 65000 + } + } + }, + "resolve_pull_request_review_thread": { + "defaultMax": 10, + "fields": { + "thread_id": { + "required": true, + "type": "string" + } + } + } + } + GH_AW_SAFE_OUTPUTS_VALIDATION_EOF + - name: Generate Safe Outputs MCP Server Config + id: safe-outputs-config + run: | + # Generate a secure random API key (360 bits of entropy, 40+ chars) + # Mask immediately to prevent timing vulnerabilities + API_KEY=$(openssl rand -base64 45 | tr -d '/+=') + echo "::add-mask::${API_KEY}" + + PORT=3001 + + # Set outputs for next steps + { + echo "safe_outputs_api_key=${API_KEY}" + echo "safe_outputs_port=${PORT}" + } >> "$GITHUB_OUTPUT" + + echo "Safe Outputs MCP server will run on port ${PORT}" + + - name: Start Safe Outputs MCP HTTP Server + id: safe-outputs-start + env: + DEBUG: '*' + GH_AW_SAFE_OUTPUTS_PORT: ${{ steps.safe-outputs-config.outputs.safe_outputs_port }} + GH_AW_SAFE_OUTPUTS_API_KEY: ${{ steps.safe-outputs-config.outputs.safe_outputs_api_key }} + GH_AW_SAFE_OUTPUTS_TOOLS_PATH: /opt/gh-aw/safeoutputs/tools.json + GH_AW_SAFE_OUTPUTS_CONFIG_PATH: /opt/gh-aw/safeoutputs/config.json + GH_AW_MCP_LOG_DIR: /tmp/gh-aw/mcp-logs/safeoutputs + run: | + # Environment variables are set above to prevent template injection + export DEBUG + export GH_AW_SAFE_OUTPUTS_PORT + export GH_AW_SAFE_OUTPUTS_API_KEY + export GH_AW_SAFE_OUTPUTS_TOOLS_PATH + export GH_AW_SAFE_OUTPUTS_CONFIG_PATH + export GH_AW_MCP_LOG_DIR + + bash /opt/gh-aw/actions/start_safe_outputs_server.sh + + - name: Start MCP Gateway + id: start-mcp-gateway + env: + GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }} + GH_AW_SAFE_OUTPUTS_API_KEY: ${{ steps.safe-outputs-start.outputs.api_key }} + GH_AW_SAFE_OUTPUTS_PORT: ${{ steps.safe-outputs-start.outputs.port }} + GITHUB_MCP_LOCKDOWN: ${{ steps.determine-automatic-lockdown.outputs.lockdown == 'true' && '1' || '0' }} + GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + run: | + set -eo pipefail + mkdir -p /tmp/gh-aw/mcp-config + + # Export gateway environment variables for MCP config and gateway script + export MCP_GATEWAY_PORT="80" + export MCP_GATEWAY_DOMAIN="host.docker.internal" + MCP_GATEWAY_API_KEY=$(openssl rand -base64 45 | tr -d '/+=') + echo "::add-mask::${MCP_GATEWAY_API_KEY}" + export MCP_GATEWAY_API_KEY + export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" + mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export DEBUG="*" + + export GH_AW_ENGINE="copilot" + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.4' + + mkdir -p /home/runner/.copilot + cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh + { + "mcpServers": { + "github": { + "type": "stdio", + "container": "ghcr.io/github/github-mcp-server:v0.30.3", + "env": { + "GITHUB_LOCKDOWN_MODE": "$GITHUB_MCP_LOCKDOWN", + "GITHUB_PERSONAL_ACCESS_TOKEN": "\${GITHUB_MCP_SERVER_TOKEN}", + "GITHUB_READ_ONLY": "1", + "GITHUB_TOOLSETS": "repos,pull_requests,issues" + } + }, + "safeoutputs": { + "type": "http", + "url": "http://host.docker.internal:$GH_AW_SAFE_OUTPUTS_PORT", + "headers": { + "Authorization": "\${GH_AW_SAFE_OUTPUTS_API_KEY}" + } + } + }, + "gateway": { + "port": $MCP_GATEWAY_PORT, + "domain": "${MCP_GATEWAY_DOMAIN}", + "apiKey": "${MCP_GATEWAY_API_KEY}", + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + } + } + GH_AW_MCP_CONFIG_EOF + - name: Generate workflow overview + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + with: + script: | + const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); + await generateWorkflowOverview(core); + - name: Download prompt artifact + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + with: + name: prompt + path: /tmp/gh-aw/aw-prompts + - name: Clean git credentials + run: bash /opt/gh-aw/actions/clean_git_credentials.sh + - name: Execute GitHub Copilot CLI + id: agentic_execution + # Copilot CLI tool arguments (sorted): + timeout-minutes: 20 + run: | + set -o pipefail + sudo -E awf --env-all --container-workdir "${GITHUB_WORKSPACE}" --allow-domains api.business.githubcopilot.com,api.enterprise.githubcopilot.com,api.github.com,api.githubcopilot.com,api.individual.githubcopilot.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,github.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,ppa.launchpad.net,raw.githubusercontent.com,registry.npmjs.org,s.symcb.com,s.symcd.com,security.ubuntu.com,telemetry.enterprise.githubcopilot.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --enable-host-access --image-tag 0.20.0 --skip-pull --enable-api-proxy \ + -- /bin/bash -c '/usr/local/bin/copilot --add-dir /tmp/gh-aw/ --log-level all --log-dir /tmp/gh-aw/sandbox/agent/logs/ --add-dir "${GITHUB_WORKSPACE}" --disable-builtin-mcps --allow-all-tools --allow-all-paths --share /tmp/gh-aw/sandbox/agent/logs/conversation.md --prompt "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"${GH_AW_MODEL_AGENT_COPILOT:+ --model "$GH_AW_MODEL_AGENT_COPILOT"}' 2>&1 | tee -a /tmp/gh-aw/agent-stdio.log + env: + COPILOT_AGENT_RUNNER_TYPE: STANDALONE + COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }} + GH_AW_MCP_CONFIG: /home/runner/.copilot/mcp-config.json + GH_AW_MODEL_AGENT_COPILOT: ${{ vars.GH_AW_MODEL_AGENT_COPILOT || '' }} + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }} + GITHUB_HEAD_REF: ${{ github.head_ref }} + GITHUB_REF_NAME: ${{ github.ref_name }} + GITHUB_STEP_SUMMARY: ${{ env.GITHUB_STEP_SUMMARY }} + GITHUB_WORKSPACE: ${{ github.workspace }} + XDG_CONFIG_HOME: /home/runner + - name: Configure Git credentials + env: + REPO_NAME: ${{ github.repository }} + SERVER_URL: ${{ github.server_url }} + run: | + git config --global user.email "github-actions[bot]@users.noreply.github.com" + git config --global user.name "github-actions[bot]" + # Re-authenticate git with GitHub token + SERVER_URL_STRIPPED="${SERVER_URL#https://}" + git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git" + echo "Git configured with standard GitHub Actions identity" + - name: Copy Copilot session state files to logs + if: always() + continue-on-error: true + run: | + # Copy Copilot session state files to logs folder for artifact collection + # This ensures they are in /tmp/gh-aw/ where secret redaction can scan them + SESSION_STATE_DIR="$HOME/.copilot/session-state" + LOGS_DIR="/tmp/gh-aw/sandbox/agent/logs" + + if [ -d "$SESSION_STATE_DIR" ]; then + echo "Copying Copilot session state files from $SESSION_STATE_DIR to $LOGS_DIR" + mkdir -p "$LOGS_DIR" + cp -v "$SESSION_STATE_DIR"/*.jsonl "$LOGS_DIR/" 2>/dev/null || true + echo "Session state files copied successfully" + else + echo "No session-state directory found at $SESSION_STATE_DIR" + fi + - name: Stop MCP Gateway + if: always() + continue-on-error: true + env: + MCP_GATEWAY_PORT: ${{ steps.start-mcp-gateway.outputs.gateway-port }} + MCP_GATEWAY_API_KEY: ${{ steps.start-mcp-gateway.outputs.gateway-api-key }} + GATEWAY_PID: ${{ steps.start-mcp-gateway.outputs.gateway-pid }} + run: | + bash /opt/gh-aw/actions/stop_mcp_gateway.sh "$GATEWAY_PID" + - name: Redact secrets in logs + if: always() + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/redact_secrets.cjs'); + await main(); + env: + GH_AW_SECRET_NAMES: 'COPILOT_GITHUB_TOKEN,GH_AW_GITHUB_MCP_SERVER_TOKEN,GH_AW_GITHUB_TOKEN,GITHUB_TOKEN' + SECRET_COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }} + SECRET_GH_AW_GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN }} + SECRET_GH_AW_GITHUB_TOKEN: ${{ secrets.GH_AW_GITHUB_TOKEN }} + SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Upload Safe Outputs + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + with: + name: safe-output + path: ${{ env.GH_AW_SAFE_OUTPUTS }} + if-no-files-found: warn + - name: Ingest agent output + id: collect_output + if: always() + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }} + GH_AW_ALLOWED_DOMAINS: "api.business.githubcopilot.com,api.enterprise.githubcopilot.com,api.github.com,api.githubcopilot.com,api.individual.githubcopilot.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,github.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,ppa.launchpad.net,raw.githubusercontent.com,registry.npmjs.org,s.symcb.com,s.symcd.com,security.ubuntu.com,telemetry.enterprise.githubcopilot.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com" + GITHUB_SERVER_URL: ${{ github.server_url }} + GITHUB_API_URL: ${{ github.api_url }} + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/collect_ndjson_output.cjs'); + await main(); + - name: Upload sanitized agent output + if: always() && env.GH_AW_AGENT_OUTPUT + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + with: + name: agent-output + path: ${{ env.GH_AW_AGENT_OUTPUT }} + if-no-files-found: warn + - name: Upload engine output files + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + with: + name: agent_outputs + path: | + /tmp/gh-aw/sandbox/agent/logs/ + /tmp/gh-aw/redacted-urls.log + if-no-files-found: ignore + - name: Parse agent logs for step summary + if: always() + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_AGENT_OUTPUT: /tmp/gh-aw/sandbox/agent/logs/ + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/parse_copilot_log.cjs'); + await main(); + - name: Parse MCP Gateway logs for step summary + if: always() + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/parse_mcp_gateway_log.cjs'); + await main(); + - name: Print firewall logs + if: always() + continue-on-error: true + env: + AWF_LOGS_DIR: /tmp/gh-aw/sandbox/firewall/logs + run: | + # Fix permissions on firewall logs so they can be uploaded as artifacts + # AWF runs with sudo, creating files owned by root + sudo chmod -R a+r /tmp/gh-aw/sandbox/firewall/logs 2>/dev/null || true + # Only run awf logs summary if awf command exists (it may not be installed if workflow failed before install step) + if command -v awf &> /dev/null; then + awf logs summary | tee -a "$GITHUB_STEP_SUMMARY" + else + echo 'AWF binary not installed, skipping firewall log summary' + fi + - name: Upload agent artifacts + if: always() + continue-on-error: true + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + with: + name: agent-artifacts + path: | + /tmp/gh-aw/aw-prompts/prompt.txt + /tmp/gh-aw/aw_info.json + /tmp/gh-aw/mcp-logs/ + /tmp/gh-aw/sandbox/firewall/logs/ + /tmp/gh-aw/agent-stdio.log + /tmp/gh-aw/agent/ + if-no-files-found: ignore + + conclusion: + needs: + - activation + - agent + - detection + - safe_outputs + if: (always()) && (needs.agent.result != 'skipped') + runs-on: ubuntu-slim + permissions: + contents: read + issues: write + pull-requests: write + outputs: + noop_message: ${{ steps.noop.outputs.noop_message }} + tools_reported: ${{ steps.missing_tool.outputs.tools_reported }} + total_count: ${{ steps.missing_tool.outputs.total_count }} + steps: + - name: Setup Scripts + uses: github/gh-aw/actions/setup@a70c5eada06553e3510ac27f2c3bda9d3705bccb # v0.46.3 + with: + destination: /opt/gh-aw/actions + - name: Download agent output artifact + continue-on-error: true + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + with: + name: agent-output + path: /tmp/gh-aw/safeoutputs/ + - name: Setup agent output environment variable + run: | + mkdir -p /tmp/gh-aw/safeoutputs/ + find "/tmp/gh-aw/safeoutputs/" -type f -print + echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV" + - name: Process No-Op Messages + id: noop + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }} + GH_AW_NOOP_MAX: 10 + GH_AW_WORKFLOW_NAME: "PR Review Comment Handler" + with: + github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/noop.cjs'); + await main(); + - name: Record Missing Tool + id: missing_tool + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }} + GH_AW_WORKFLOW_NAME: "PR Review Comment Handler" + with: + github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/missing_tool.cjs'); + await main(); + - name: Handle Agent Failure + id: handle_agent_failure + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }} + GH_AW_WORKFLOW_NAME: "PR Review Comment Handler" + GH_AW_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + GH_AW_AGENT_CONCLUSION: ${{ needs.agent.result }} + GH_AW_WORKFLOW_ID: "pr-review-comment-handler" + GH_AW_SECRET_VERIFICATION_RESULT: ${{ needs.agent.outputs.secret_verification_result }} + GH_AW_CHECKOUT_PR_SUCCESS: ${{ needs.agent.outputs.checkout_pr_success }} + GH_AW_GROUP_REPORTS: "false" + with: + github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/handle_agent_failure.cjs'); + await main(); + - name: Handle No-Op Message + id: handle_noop_message + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }} + GH_AW_WORKFLOW_NAME: "PR Review Comment Handler" + GH_AW_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + GH_AW_AGENT_CONCLUSION: ${{ needs.agent.result }} + GH_AW_NOOP_MESSAGE: ${{ steps.noop.outputs.noop_message }} + GH_AW_NOOP_REPORT_AS_ISSUE: "true" + with: + github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/handle_noop_message.cjs'); + await main(); + + detection: + needs: agent + if: needs.agent.outputs.output_types != '' || needs.agent.outputs.has_patch == 'true' + runs-on: ubuntu-latest + permissions: {} + timeout-minutes: 10 + outputs: + success: ${{ steps.parse_results.outputs.success }} + steps: + - name: Setup Scripts + uses: github/gh-aw/actions/setup@a70c5eada06553e3510ac27f2c3bda9d3705bccb # v0.46.3 + with: + destination: /opt/gh-aw/actions + - name: Download agent artifacts + continue-on-error: true + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + with: + name: agent-artifacts + path: /tmp/gh-aw/threat-detection/ + - name: Download agent output artifact + continue-on-error: true + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + with: + name: agent-output + path: /tmp/gh-aw/threat-detection/ + - name: Echo agent output types + env: + AGENT_OUTPUT_TYPES: ${{ needs.agent.outputs.output_types }} + run: | + echo "Agent output-types: $AGENT_OUTPUT_TYPES" + - name: Setup threat detection + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + WORKFLOW_NAME: "PR Review Comment Handler" + WORKFLOW_DESCRIPTION: "Triages PR review comments. If the comment raises an issue that needs fixing, replies tagging @copilot to fix it directly on the PR. If low priority, creates an issue for later. If not relevant, resolves with a reply." + HAS_PATCH: ${{ needs.agent.outputs.has_patch }} + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/setup_threat_detection.cjs'); + await main(); + - name: Ensure threat-detection directory and log + run: | + mkdir -p /tmp/gh-aw/threat-detection + touch /tmp/gh-aw/threat-detection/detection.log + - name: Validate COPILOT_GITHUB_TOKEN secret + id: validate-secret + run: /opt/gh-aw/actions/validate_multi_secret.sh COPILOT_GITHUB_TOKEN 'GitHub Copilot CLI' https://github.github.com/gh-aw/reference/engines/#github-copilot-default + env: + COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }} + - name: Install GitHub Copilot CLI + run: /opt/gh-aw/actions/install_copilot_cli.sh 0.0.411 + - name: Execute GitHub Copilot CLI + id: agentic_execution + # Copilot CLI tool arguments (sorted): + # --allow-tool shell(cat) + # --allow-tool shell(grep) + # --allow-tool shell(head) + # --allow-tool shell(jq) + # --allow-tool shell(ls) + # --allow-tool shell(tail) + # --allow-tool shell(wc) + timeout-minutes: 20 + run: | + set -o pipefail + COPILOT_CLI_INSTRUCTION="$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" + mkdir -p /tmp/ + mkdir -p /tmp/gh-aw/ + mkdir -p /tmp/gh-aw/agent/ + mkdir -p /tmp/gh-aw/sandbox/agent/logs/ + copilot --add-dir /tmp/ --add-dir /tmp/gh-aw/ --add-dir /tmp/gh-aw/agent/ --log-level all --log-dir /tmp/gh-aw/sandbox/agent/logs/ --disable-builtin-mcps --allow-tool 'shell(cat)' --allow-tool 'shell(grep)' --allow-tool 'shell(head)' --allow-tool 'shell(jq)' --allow-tool 'shell(ls)' --allow-tool 'shell(tail)' --allow-tool 'shell(wc)' --share /tmp/gh-aw/sandbox/agent/logs/conversation.md --prompt "$COPILOT_CLI_INSTRUCTION"${GH_AW_MODEL_DETECTION_COPILOT:+ --model "$GH_AW_MODEL_DETECTION_COPILOT"} 2>&1 | tee /tmp/gh-aw/threat-detection/detection.log + env: + COPILOT_AGENT_RUNNER_TYPE: STANDALONE + COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }} + GH_AW_MODEL_DETECTION_COPILOT: ${{ vars.GH_AW_MODEL_DETECTION_COPILOT || '' }} + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + GITHUB_HEAD_REF: ${{ github.head_ref }} + GITHUB_REF_NAME: ${{ github.ref_name }} + GITHUB_STEP_SUMMARY: ${{ env.GITHUB_STEP_SUMMARY }} + GITHUB_WORKSPACE: ${{ github.workspace }} + XDG_CONFIG_HOME: /home/runner + - name: Parse threat detection results + id: parse_results + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/parse_threat_detection_results.cjs'); + await main(); + - name: Upload threat detection log + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + with: + name: threat-detection.log + path: /tmp/gh-aw/threat-detection/detection.log + if-no-files-found: ignore + + pre_activation: + runs-on: ubuntu-slim + outputs: + activated: ${{ steps.check_membership.outputs.is_team_member == 'true' }} + steps: + - name: Setup Scripts + uses: github/gh-aw/actions/setup@a70c5eada06553e3510ac27f2c3bda9d3705bccb # v0.46.3 + with: + destination: /opt/gh-aw/actions + - name: Check team membership for workflow + id: check_membership + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_REQUIRED_ROLES: admin,maintainer,write + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/check_membership.cjs'); + await main(); + + safe_outputs: + needs: + - agent + - detection + if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (needs.detection.outputs.success == 'true') + runs-on: ubuntu-slim + permissions: + contents: read + issues: write + pull-requests: write + timeout-minutes: 15 + env: + GH_AW_ENGINE_ID: "copilot" + GH_AW_WORKFLOW_ID: "pr-review-comment-handler" + GH_AW_WORKFLOW_NAME: "PR Review Comment Handler" + outputs: + create_discussion_error_count: ${{ steps.process_safe_outputs.outputs.create_discussion_error_count }} + create_discussion_errors: ${{ steps.process_safe_outputs.outputs.create_discussion_errors }} + process_safe_outputs_processed_count: ${{ steps.process_safe_outputs.outputs.processed_count }} + process_safe_outputs_temporary_id_map: ${{ steps.process_safe_outputs.outputs.temporary_id_map }} + steps: + - name: Setup Scripts + uses: github/gh-aw/actions/setup@a70c5eada06553e3510ac27f2c3bda9d3705bccb # v0.46.3 + with: + destination: /opt/gh-aw/actions + - name: Download agent output artifact + continue-on-error: true + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + with: + name: agent-output + path: /tmp/gh-aw/safeoutputs/ + - name: Setup agent output environment variable + run: | + mkdir -p /tmp/gh-aw/safeoutputs/ + find "/tmp/gh-aw/safeoutputs/" -type f -print + echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV" + - name: Process Safe Outputs + id: process_safe_outputs + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }} + GH_AW_SAFE_OUTPUTS_HANDLER_CONFIG: "{\"create_issue\":{\"max\":10},\"missing_data\":{},\"missing_tool\":{},\"reply_to_pull_request_review_comment\":{\"max\":10},\"resolve_pull_request_review_thread\":{\"max\":10}}" + with: + github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/safe_output_handler_manager.cjs'); + await main(); diff --git a/.github/workflows/pr-review-comment-handler.md b/.github/workflows/pr-review-comment-handler.md new file mode 100644 index 0000000..31942fd --- /dev/null +++ b/.github/workflows/pr-review-comment-handler.md @@ -0,0 +1,148 @@ +--- +description: Triages PR review comments. If the comment raises an issue that needs + fixing, replies tagging @copilot to fix it directly on the PR. If low priority, + creates an issue for later. If not relevant, resolves with a reply. + +on: + pull_request_review_comment: + types: [created] + +permissions: + contents: read + pull-requests: read + issues: read + +tools: + github: + toolsets: [repos, pull_requests, issues] + +safe-outputs: + reply-to-pull-request-review-comment: + max: 10 + resolve-pull-request-review-thread: + max: 10 + create-issue: + max: 10 + noop: + max: 10 +--- + +# PR Review Comment Handler + +You are an AI agent that triages pull request review comments on an open PR. +You read the comment, assess its importance, and take the appropriate action: +tag @copilot to fix it, create a low-priority issue for later, or resolve +it directly with a reply. + +## Context + +- **Pull Request**: #${{ github.event.pull_request.number }} +- **Review Comment ID**: ${{ github.event.comment.id }} + +## Your Task + +1. **Fetch the review comment** details using the GitHub API with comment + ID ${{ github.event.comment.id }} on + PR #${{ github.event.pull_request.number }}. Retrieve the comment body, + author, file path, line number, and diff hunk. + +2. **Fetch the full pull request details and diff** for + PR #${{ github.event.pull_request.number }} to understand the broader + context of the changes. + +3. **Classify the comment** into one of these categories: + + - **Needs fixing**: The comment identifies a genuine issue that must be + addressed — a bug, security concern, logic error, missing validation, + style violation against project standards, performance problem, etc. + - **Low priority**: The comment raises a valid but minor point (small + refactor, optional improvement, cosmetic suggestion, or a medium/low + severity security concern) that does not need to be fixed right now. + - **Not relevant**: The comment is praise, a question already answered by + the code, a subjective preference with no clear benefit, a + misunderstanding of the code's intent, or otherwise does not require any + action. + +4. **Act based on your classification**: + + ### If the comment needs fixing + + Reply to the review comment on the PR tagging `@copilot` and asking it + to fix the issue. The reply must include: + - A brief acknowledgement that the reviewer's concern is valid. + - A clear description of what needs to be fixed. + - The tag `@copilot` so Copilot picks it up and applies the fix + directly on this PR. + + Example reply: + > Valid point — this needs to be fixed. + > @copilot Please fix this: expected behavior should be>. + + Do **not** resolve the thread — leave it open for Copilot to address. + + ### If the comment is low priority + + 1. Create a GitHub issue with: + - A clear title summarizing the suggestion. + - A body that includes: + - A description of the suggested improvement. + - The file path and line number(s) involved. + - A link back to the PR: `Related PR: #${{ github.event.pull_request.number }}`. + - The review comment text for context. + - Apply the `low-priority` label. + 2. Reply to the review comment on the PR with: + - A message explaining this is a valid but low-priority point, tracked + in the created issue for later (e.g., "Good point — tracked as a + low-priority item in # for a future iteration."). + 3. Resolve the review thread. + + ### If the comment is not relevant + + 1. Reply to the review comment with a clear, respectful explanation of why + no change is needed. Reference the relevant code, project standards, or + PR context to justify your reasoning. Do **not** create an issue. + 2. Resolve the review thread. + +## Classification Guidelines + +When deciding how to classify a comment, consider: + +- **Project standards**: This is a Python monorepo using Ruff, Pyright strict + mode, Bandit, and pytest. Code must follow the standards in + `CODING_STANDARDS.md`. If the comment aligns with these standards, it + likely needs fixing. +- **Severity**: Correctness, security, and maintainability issues need + fixing. Cosmetic or style-only preferences are low priority at most. +- **Concreteness**: A comment with a specific, reproducible concern is more + important than a vague suggestion. +- **When in doubt**: Err on the side of treating it as needing a fix or + low priority rather than dismissing a comment. + +## Response Format + +- Keep replies concise and professional. +- When tagging @copilot, be specific about what needs to change so Copilot + can act on it immediately. +- When creating a low-priority issue, include the issue number in the reply. +- When explaining why a comment is not relevant, cite the specific code or + standard that supports your reasoning. +- Do not be dismissive — acknowledge the reviewer's perspective even when + disagreeing. + +## Safe Outputs + +- **Needs fixing**: Use `reply-to-pull-request-review-comment` to reply + tagging `@copilot` with a fix request. Do not resolve the thread. +- **Low priority**: Use `create-issue` to create a low-priority issue + (linked to the PR), then `reply-to-pull-request-review-comment` to reply + with the issue link, then `resolve-pull-request-review-thread` to + resolve the thread. +- **Not relevant**: Use `reply-to-pull-request-review-comment` to reply + with an explanation, then `resolve-pull-request-review-thread` to + resolve the thread. +- **Unclassifiable** (empty or incomprehensible comment): Use + `reply-to-pull-request-review-comment` to reply tagging the PR author so + they can review the comment manually (e.g., "I couldn't determine what + action is needed here. @, could you take a look?"). Look up + the PR author from the pull request details fetched earlier. diff --git a/.github/workflows/python-code-quality.yml b/.github/workflows/python-code-quality.yml index 6263ac3..927fe76 100644 --- a/.github/workflows/python-code-quality.yml +++ b/.github/workflows/python-code-quality.yml @@ -2,8 +2,7 @@ name: "Python: code quality" on: workflow_dispatch: - pull_request: - branches: ["main", "feature*", "fix*"] + workflow_call: permissions: contents: read diff --git a/.github/workflows/python-docker-build.yml b/.github/workflows/python-docker-build.yml index 919e8c3..34bc024 100644 --- a/.github/workflows/python-docker-build.yml +++ b/.github/workflows/python-docker-build.yml @@ -2,12 +2,7 @@ name: "Python: docker build & smoke test" on: workflow_dispatch: - pull_request: - branches: ["main", "feature*", "fix*"] - paths: - - "agents/*/Dockerfile" - - "agents/*/src/**" - - "agents/*/pyproject.toml" + workflow_call: permissions: contents: read diff --git a/.github/workflows/python-package-build.yml b/.github/workflows/python-package-build.yml index 06687ef..357e045 100644 --- a/.github/workflows/python-package-build.yml +++ b/.github/workflows/python-package-build.yml @@ -2,11 +2,7 @@ name: "Python: package build" on: workflow_dispatch: - pull_request: - branches: ["main", "feature*", "fix*"] - paths: - - "agents/*/pyproject.toml" - - "agents/*/src/**" + workflow_call: permissions: contents: read diff --git a/.github/workflows/python-tests.yml b/.github/workflows/python-tests.yml index c08caed..0e2e8d5 100644 --- a/.github/workflows/python-tests.yml +++ b/.github/workflows/python-tests.yml @@ -2,8 +2,7 @@ name: "Python: tests" on: workflow_dispatch: - pull_request: - branches: ["main", "feature*", "fix*"] + workflow_call: permissions: contents: read diff --git a/.github/workflows/security-review.lock.yml b/.github/workflows/security-review.lock.yml index 0bd42da..324b935 100644 --- a/.github/workflows/security-review.lock.yml +++ b/.github/workflows/security-review.lock.yml @@ -21,39 +21,31 @@ # # For more information: https://github.github.com/gh-aw/introduction/overview/ # -# Automated security review for pull requests. Analyzes changed files against 15 security posture categories and posts inline review comments on findings, then requests Copilot code review. +# Automated security review for pull requests. Analyzes changed files against 15 security posture categories and posts inline review comments on findings. # -# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"fcf4ec8e200afb9cf705aa7eca88dd83e01c9f2db564ba8a54ebb4d187c9f530","compiler_version":"v0.46.3"} +# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"bee08f1c7f9ba74b7900257d5c00f796abd591c727efef7457b83fefabf6c144","compiler_version":"v0.46.3"} name: "Security Review" "on": - pull_request: - types: - - opened - - synchronize + workflow_call: permissions: {} concurrency: - group: "gh-aw-${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}" - cancel-in-progress: true + group: "gh-aw-${{ github.workflow }}" run-name: "Security Review" jobs: activation: needs: pre_activation - if: > - (needs.pre_activation.outputs.activated == 'true') && ((github.event_name != 'pull_request') || (github.event.pull_request.head.repo.id == github.repository_id)) + if: needs.pre_activation.outputs.activated == 'true' runs-on: ubuntu-slim permissions: contents: read outputs: - body: ${{ steps.sanitized.outputs.body }} comment_id: "" comment_repo: "" - text: ${{ steps.sanitized.outputs.text }} - title: ${{ steps.sanitized.outputs.title }} steps: - name: Setup Scripts uses: github/gh-aw/actions/setup@a70c5eada06553e3510ac27f2c3bda9d3705bccb # v0.46.3 @@ -85,15 +77,6 @@ jobs: setupGlobals(core, github, context, exec, io); const { main } = require('/opt/gh-aw/actions/check_workflow_timestamp_api.cjs'); await main(); - - name: Compute current body text - id: sanitized - uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 - with: - script: | - const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); - setupGlobals(core, github, context, exec, io); - const { main } = require('/opt/gh-aw/actions/compute_text.cjs'); - await main(); - name: Create prompt with built-in context env: GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt @@ -114,7 +97,7 @@ jobs: cat "/opt/gh-aw/prompts/xpia.md" >> "$GH_AW_PROMPT" cat "/opt/gh-aw/prompts/temp_folder_prompt.md" >> "$GH_AW_PROMPT" cat "/opt/gh-aw/prompts/markdown.md" >> "$GH_AW_PROMPT" - cat "/opt/gh-aw/prompts/cache_memory_prompt.md" >> "$GH_AW_PROMPT" + cat "/opt/gh-aw/prompts/cache_memory_prompt_multi.md" >> "$GH_AW_PROMPT" cat << 'GH_AW_PROMPT_EOF' >> "$GH_AW_PROMPT" GitHub API Access Instructions @@ -195,8 +178,22 @@ jobs: env: GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt GH_AW_ALLOWED_EXTENSIONS: '' - GH_AW_CACHE_DESCRIPTION: '' - GH_AW_CACHE_DIR: '/tmp/gh-aw/cache-memory/' + GH_AW_CACHE_EXAMPLES: '- `/tmp/gh-aw/cache-memory-pull-request-review-context/notes.txt` - general notes and observations +- `/tmp/gh-aw/cache-memory-pull-request-review-context/notes.md` - markdown formatted notes +- `/tmp/gh-aw/cache-memory-pull-request-review-context/preferences.json` - user preferences and settings +- `/tmp/gh-aw/cache-memory-pull-request-review-context/history.jsonl` - activity history in JSON Lines format +- `/tmp/gh-aw/cache-memory-pull-request-review-context/data.csv` - tabular data +- `/tmp/gh-aw/cache-memory-pull-request-review-context/state/` - organized state files in subdirectories (with allowed file types) +- `/tmp/gh-aw/cache-memory-review patterns/notes.txt` - general notes and observations +- `/tmp/gh-aw/cache-memory-review patterns/notes.md` - markdown formatted notes +- `/tmp/gh-aw/cache-memory-review patterns/preferences.json` - user preferences and settings +- `/tmp/gh-aw/cache-memory-review patterns/history.jsonl` - activity history in JSON Lines format +- `/tmp/gh-aw/cache-memory-review patterns/data.csv` - tabular data +- `/tmp/gh-aw/cache-memory-review patterns/state/` - organized state files in subdirectories (with allowed file types) +' + GH_AW_CACHE_LIST: '- **pull-request-review-context**: `/tmp/gh-aw/cache-memory-pull-request-review-context/` +- **review patterns**: `/tmp/gh-aw/cache-memory-review patterns/` +' GH_AW_GITHUB_ACTOR: ${{ github.actor }} GH_AW_GITHUB_EVENT_COMMENT_ID: ${{ github.event.comment.id }} GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER: ${{ github.event.discussion.number }} @@ -219,8 +216,8 @@ jobs: file: process.env.GH_AW_PROMPT, substitutions: { GH_AW_ALLOWED_EXTENSIONS: process.env.GH_AW_ALLOWED_EXTENSIONS, - GH_AW_CACHE_DESCRIPTION: process.env.GH_AW_CACHE_DESCRIPTION, - GH_AW_CACHE_DIR: process.env.GH_AW_CACHE_DIR, + GH_AW_CACHE_EXAMPLES: process.env.GH_AW_CACHE_EXAMPLES, + GH_AW_CACHE_LIST: process.env.GH_AW_CACHE_LIST, GH_AW_GITHUB_ACTOR: process.env.GH_AW_GITHUB_ACTOR, GH_AW_GITHUB_EVENT_COMMENT_ID: process.env.GH_AW_GITHUB_EVENT_COMMENT_ID, GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER: process.env.GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER, @@ -255,6 +252,8 @@ jobs: permissions: contents: read pull-requests: read + concurrency: + group: "gh-aw-copilot-${{ github.workflow }}" env: DEFAULT_BRANCH: ${{ github.event.repository.default_branch }} GH_AW_ASSETS_ALLOWED_EXTS: "" @@ -284,15 +283,26 @@ jobs: - name: Create gh-aw temp directory run: bash /opt/gh-aw/actions/create_gh_aw_tmp_dir.sh # Cache memory file share configuration from frontmatter processed below - - name: Create cache-memory directory - run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - - name: Restore cache-memory file share data + - name: Create cache-memory directory (pull-request-review-context) + run: | + mkdir -p /tmp/gh-aw/cache-memory-pull-request-review-context + - name: Restore cache-memory file share data (pull-request-review-context) + uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + with: + key: security-review-pr-${{ github.event.pull_request.number }}-${{ github.run_id }} + path: /tmp/gh-aw/cache-memory-pull-request-review-context + restore-keys: | + security-review-pr-${{ github.event.pull_request.number }}- + - name: Create cache-memory directory (review patterns) + run: | + mkdir -p /tmp/gh-aw/cache-memory-review patterns + - name: Restore cache-memory file share data (review patterns) uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 with: - key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} - path: /tmp/gh-aw/cache-memory + key: security-review-patterns-${{ github.run_id }} + path: /tmp/gh-aw/cache-memory-review patterns restore-keys: | - memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}- + security-review-patterns- - name: Configure Git credentials env: REPO_NAME: ${{ github.repository }} @@ -389,7 +399,7 @@ jobs: mkdir -p /tmp/gh-aw/safeoutputs mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs cat > /opt/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_EOF' - {"add_reviewer":{"max":3,"reviewers":["copilot"]},"create_pull_request_review_comment":{"max":20},"missing_data":{},"missing_tool":{},"noop":{"max":1},"submit_pull_request_review":{"max":1}} + {"create_pull_request_review_comment":{"max":20},"missing_data":{},"missing_tool":{},"noop":{"max":1},"submit_pull_request_review":{"max":1}} GH_AW_SAFE_OUTPUTS_CONFIG_EOF cat > /opt/gh-aw/safeoutputs/tools.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_EOF' [ @@ -461,33 +471,6 @@ jobs: }, "name": "submit_pull_request_review" }, - { - "description": "Add reviewers to a GitHub pull request. Reviewers receive notifications and can approve or request changes. Use 'copilot' as a reviewer name to request the Copilot PR review bot. CONSTRAINTS: Maximum 3 reviewer(s) can be added.", - "inputSchema": { - "additionalProperties": false, - "properties": { - "pull_request_number": { - "description": "Pull request number to add reviewers to. This is the numeric ID from the GitHub URL (e.g., 876 in github.com/owner/repo/pull/876). If omitted, adds reviewers to the PR that triggered this workflow.", - "type": [ - "number", - "string" - ] - }, - "reviewers": { - "description": "GitHub usernames to add as reviewers (e.g., ['octocat', 'copilot']). Users must have access to the repository.", - "items": { - "type": "string" - }, - "type": "array" - } - }, - "required": [ - "reviewers" - ], - "type": "object" - }, - "name": "add_reviewer" - }, { "description": "Report that a tool or capability needed to complete the task is not available, or share any information you deem important about missing functionality or limitations. Use this when you cannot accomplish what was requested because the required functionality is missing or access is restricted.", "inputSchema": { @@ -561,21 +544,6 @@ jobs: GH_AW_SAFE_OUTPUTS_TOOLS_EOF cat > /opt/gh-aw/safeoutputs/validation.json << 'GH_AW_SAFE_OUTPUTS_VALIDATION_EOF' { - "add_reviewer": { - "defaultMax": 3, - "fields": { - "pull_request_number": { - "issueOrPRNumber": true - }, - "reviewers": { - "required": true, - "type": "array", - "itemType": "string", - "itemSanitize": true, - "itemMaxLength": 39 - } - } - }, "create_pull_request_review_comment": { "defaultMax": 1, "fields": { @@ -771,7 +739,7 @@ jobs: run: | set -o pipefail sudo -E awf --env-all --container-workdir "${GITHUB_WORKSPACE}" --allow-domains api.business.githubcopilot.com,api.enterprise.githubcopilot.com,api.github.com,api.githubcopilot.com,api.individual.githubcopilot.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,github.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,ppa.launchpad.net,raw.githubusercontent.com,registry.npmjs.org,s.symcb.com,s.symcd.com,security.ubuntu.com,telemetry.enterprise.githubcopilot.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --enable-host-access --image-tag 0.20.0 --skip-pull --enable-api-proxy \ - -- /bin/bash -c '/usr/local/bin/copilot --add-dir /tmp/gh-aw/ --log-level all --log-dir /tmp/gh-aw/sandbox/agent/logs/ --add-dir "${GITHUB_WORKSPACE}" --disable-builtin-mcps --agent security-reviewer --allow-all-tools --add-dir /tmp/gh-aw/cache-memory/ --allow-all-paths --share /tmp/gh-aw/sandbox/agent/logs/conversation.md --prompt "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"${GH_AW_MODEL_AGENT_COPILOT:+ --model "$GH_AW_MODEL_AGENT_COPILOT"}' 2>&1 | tee -a /tmp/gh-aw/agent-stdio.log + -- /bin/bash -c '/usr/local/bin/copilot --add-dir /tmp/gh-aw/ --log-level all --log-dir /tmp/gh-aw/sandbox/agent/logs/ --add-dir "${GITHUB_WORKSPACE}" --disable-builtin-mcps --agent security-reviewer --allow-all-tools --add-dir /tmp/gh-aw/cache-memory-pull-request-review-context/ --add-dir '\''/tmp/gh-aw/cache-memory-review patterns/'\'' --allow-all-paths --share /tmp/gh-aw/sandbox/agent/logs/conversation.md --prompt "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"${GH_AW_MODEL_AGENT_COPILOT:+ --model "$GH_AW_MODEL_AGENT_COPILOT"}' 2>&1 | tee -a /tmp/gh-aw/agent-stdio.log env: COPILOT_AGENT_RUNNER_TYPE: STANDALONE COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }} @@ -908,12 +876,19 @@ jobs: else echo 'AWF binary not installed, skipping firewall log summary' fi - - name: Upload cache-memory data as artifact + - name: Upload cache-memory data as artifact (pull-request-review-context) + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + if: always() + with: + name: cache-memory-pull-request-review-context + path: /tmp/gh-aw/cache-memory-pull-request-review-context + - name: Upload cache-memory data as artifact (review patterns) uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 if: always() with: - name: cache-memory - path: /tmp/gh-aw/cache-memory + name: cache-memory-review patterns + path: /tmp/gh-aw/cache-memory-review patterns + retention-days: 30 - name: Upload agent artifacts if: always() continue-on-error: true @@ -1030,6 +1005,8 @@ jobs: if: needs.agent.outputs.output_types != '' || needs.agent.outputs.has_patch == 'true' runs-on: ubuntu-latest permissions: {} + concurrency: + group: "gh-aw-copilot-${{ github.workflow }}" timeout-minutes: 10 outputs: success: ${{ steps.parse_results.outputs.success }} @@ -1059,7 +1036,7 @@ jobs: uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 env: WORKFLOW_NAME: "Security Review" - WORKFLOW_DESCRIPTION: "Automated security review for pull requests. Analyzes changed files against 15 security posture categories and posts inline review comments on findings, then requests Copilot code review." + WORKFLOW_DESCRIPTION: "Automated security review for pull requests. Analyzes changed files against 15 security posture categories and posts inline review comments on findings." HAS_PATCH: ${{ needs.agent.outputs.has_patch }} with: script: | @@ -1125,7 +1102,6 @@ jobs: if-no-files-found: ignore pre_activation: - if: (github.event_name != 'pull_request') || (github.event.pull_request.head.repo.id == github.repository_id) runs-on: ubuntu-slim outputs: activated: ${{ steps.check_membership.outputs.is_team_member == 'true' }} @@ -1162,7 +1138,6 @@ jobs: GH_AW_WORKFLOW_ID: "security-review" GH_AW_WORKFLOW_NAME: "Security Review" outputs: - add_reviewer_reviewers_added: ${{ steps.process_safe_outputs.outputs.reviewers_added }} create_discussion_error_count: ${{ steps.process_safe_outputs.outputs.create_discussion_error_count }} create_discussion_errors: ${{ steps.process_safe_outputs.outputs.create_discussion_errors }} process_safe_outputs_processed_count: ${{ steps.process_safe_outputs.outputs.processed_count }} @@ -1188,7 +1163,7 @@ jobs: uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 env: GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }} - GH_AW_SAFE_OUTPUTS_HANDLER_CONFIG: "{\"add_reviewer\":{\"allowed\":[\"copilot\"],\"max\":3,\"target\":\"triggering\"},\"create_pull_request_review_comment\":{\"max\":20,\"side\":\"RIGHT\",\"target\":\"triggering\"},\"missing_data\":{},\"missing_tool\":{},\"submit_pull_request_review\":{\"footer\":\"none\",\"max\":1}}" + GH_AW_SAFE_OUTPUTS_HANDLER_CONFIG: "{\"create_pull_request_review_comment\":{\"max\":20,\"side\":\"RIGHT\",\"target\":\"triggering\"},\"missing_data\":{},\"missing_tool\":{},\"submit_pull_request_review\":{\"footer\":\"none\",\"max\":1}}" with: github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} script: | @@ -1209,14 +1184,25 @@ jobs: uses: github/gh-aw/actions/setup@a70c5eada06553e3510ac27f2c3bda9d3705bccb # v0.46.3 with: destination: /opt/gh-aw/actions - - name: Download cache-memory artifact (default) + - name: Download cache-memory artifact (pull-request-review-context) + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + continue-on-error: true + with: + name: cache-memory-pull-request-review-context + path: /tmp/gh-aw/cache-memory-pull-request-review-context + - name: Save cache-memory to cache (pull-request-review-context) + uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + with: + key: security-review-pr-${{ github.event.pull_request.number }}-${{ github.run_id }} + path: /tmp/gh-aw/cache-memory-pull-request-review-context + - name: Download cache-memory artifact (review patterns) uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 continue-on-error: true with: - name: cache-memory - path: /tmp/gh-aw/cache-memory - - name: Save cache-memory to cache (default) + name: cache-memory-review patterns + path: /tmp/gh-aw/cache-memory-review patterns + - name: Save cache-memory to cache (review patterns) uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 with: - key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} - path: /tmp/gh-aw/cache-memory + key: security-review-patterns-${{ github.run_id }} + path: /tmp/gh-aw/cache-memory-review patterns diff --git a/.github/workflows/security-review.md b/.github/workflows/security-review.md index 93da5eb..e0d9ec3 100644 --- a/.github/workflows/security-review.md +++ b/.github/workflows/security-review.md @@ -1,11 +1,9 @@ --- description: Automated security review for pull requests. Analyzes changed files against - 15 security posture categories and posts inline review comments on findings, - then requests Copilot code review. + 15 security posture categories and posts inline review comments on findings. on: - pull_request: - types: [opened, synchronize] + workflow_call: permissions: contents: read @@ -16,7 +14,12 @@ engine: agent: security-reviewer tools: - cache-memory: true + cache-memory: + - id: pull-request-review-context + key: "security-review-pr-${{ github.event.pull_request.number }}" + - id: review patterns + key: "security-review-patterns" + retention-days: 30 github: toolsets: [repos, pull_requests] @@ -27,11 +30,6 @@ safe-outputs: submit-pull-request-review: max: 1 footer: false - add-reviewer: - reviewers: [copilot] - max: 3 - target: "triggering" - github-token: ${{ secrets.GH_AW_AGENT_TOKEN }} --- # Security Review @@ -45,11 +43,10 @@ agent instructions. 1. **Access memory first.** Use cache memory at `/tmp/gh-aw/cache-memory/` to: - Check prior review context for this PR at - `/tmp/gh-aw/cache-memory/security-review-pr-${{ github.event.pull_request.number }}.json` + `/tmp/gh-aw/cache-memory/security-review-pr-${{ github.event.pull_request.number }}.json`. This correspond to the cache memory tool with id `pull-request-review-context` and can contain information about previous review findings, categories, files reviewed, and timestamps for this PR. - Identify recurring security patterns in this repository from - `/tmp/gh-aw/cache-memory/security-review-patterns.json` - - Avoid repeating the same inline comments from previous reviews unless the - issue remains unresolved in newly changed lines + `/tmp/gh-aw/cache-memory/security-review-patterns.json`. This correspond to the cache memory tool with id `review-patterns` and can contain information about recurring security issues and patterns in the repository. + - Avoid repeating the same inline comments from previous reviews if the previous comment is not resolved yet nor outdated (e.g., if the same issue is still present in the code or if the code has not changed since the last review). 2. **Fetch the pull request diff.** Read the pull request details and all changed files for PR #${{ github.event.pull_request.number }}. @@ -69,21 +66,31 @@ agent instructions. 5. **Submit the review.** After posting all inline comments: - If you found any **critical** or **high** severity issues, submit the review with `REQUEST_CHANGES` and a summary body listing the top findings. - - If you found only **medium** or **low** issues, submit with `COMMENT` and - a brief summary. - - If no issues were found, submit with `COMMENT` and a body stating the + - If you found only **medium** or **low** issues, submit with `APPROVE` and + a brief summary noting the medium/low findings. These are not blocking. + - If no issues were found, submit with `APPROVE` and a body stating the changes look secure. + - **Supersede previous review if resolved.** Check the cache memory for + this PR to see if a previous security review submitted + `REQUEST_CHANGES`. If it did, compare the previous findings against the + current diff. If the previously flagged issues have been fixed and no + new critical/high issues are found, submit the new review as `APPROVE` + with a detailed body that includes: + - A summary stating the previous issues have been resolved. + - A list of the previously flagged findings and how each was addressed + (e.g., "**Input Validation** (high): User input is now sanitized in + `validators.py` — resolved."). + - Any remaining medium/low findings from the current review, if any. + - This replaces the old `REQUEST_CHANGES` review and unblocks the PR. 6. **Update memory.** After submitting the review: - Write/update PR-specific memory at `/tmp/gh-aw/cache-memory/security-review-pr-${{ github.event.pull_request.number }}.json` including review timestamp, findings summary, categories found, and files - reviewed + reviewed. The id of the cache memory tool for this is `pull-request-review-context`. - Update shared pattern memory at `/tmp/gh-aw/cache-memory/security-review-patterns.json` with recurring - issue themes and counts - -7. **Request Copilot review.** After submitting the security review, add Copilot as a reviewer on the pull request for an additional code quality review. + issue themes and counts. The id of the cache memory tool for this is `review-patterns`. ## Review Guidelines diff --git a/README.md b/README.md index b6ad502..f018fd9 100644 --- a/README.md +++ b/README.md @@ -51,6 +51,8 @@ flowchart TB B1[Wheel build] B2[Docker build + smoke test] end + L3a --> L3b + L3a --> L3c end subgraph L4["4. CI - Security Scanning"] @@ -87,9 +89,9 @@ Each layer catches different classes of issues: | --- | --- | --- | | **Editor** | As you type | Type errors, formatting, AI-aware context via custom instructions | | **Pre-commit** | On `git commit` (staged files) | Style drift, security anti-patterns, broken configs, stale lockfiles | -| **CI quality gate** | On PR | Lock verification, full repo-wide type safety, code quality, test regressions, coverage, build validation. Split into three sub-layers: *code quality* (lock-verify, format, lint, type checks, Bandit, markdown lint), *tests* (PyTest + coverage), and *build validation* (wheel build + Docker build & smoke test, both path-filtered) | -| **CI security** | On PR / push to main / schedule | Dataflow vulnerabilities, outdated dependencies, security posture gaps | -| **Copilot Review** | On PR (after security review) | AI-powered code review with suggestions and inline comments | +| **CI quality gate** | On PR | Code quality runs first (lock-verify, format, lint, type checks, Bandit, markdown lint), then tests (PyTest + coverage) and build validation (wheel build + Docker build & smoke test) run in parallel | +| **CI security** | On PR (after quality gate) | CodeQL SAST, Dependabot dependency updates, Copilot security review agent (15 posture categories) | +| **Copilot Review** | On PR (after security review approves) | AI-powered code review with suggestions and inline comments | | **Release** | On push to main or manual | Agent release: builds changed agents, creates `-v` tags with wheel assets. Monorepo release: tags shared infra changes as `v` | --- @@ -127,44 +129,70 @@ Repo root ├─ .github/ # GitHub configuration and automation │ ├─ actions/ # reusable composite actions │ │ └─ setup-python-env/ # set up uv + install dependencies -│ ├─ workflows/ # GitHub Actions workflows -│ │ ├─ python-code-quality.yml # format, lint, type-check, security scan -│ │ ├─ python-tests.yml # pytest across Python matrix -│ │ ├─ python-docs.yml # build Sphinx docs, deploy to GitHub Pages -│ │ ├─ python-release.yml # build and publish agent packages -│ │ ├─ python-package-build.yml # build changed agent wheels on PR -│ │ ├─ python-docker-build.yml # build and smoke-test agent Docker images -│ │ ├─ monorepo-release.yml # tag and release shared monorepo infra -│ │ ├─ codeql-analysis.yml # CodeQL security scanning -│ │ ├─ security-review.md # agentic workflow (security review) -│ │ └─ security-review.lock.yml # compiled agentic workflow (generated) +│ │ ├─ action.yml # composite action definition +│ │ └─ README.md # action usage docs │ ├─ agents/ # Copilot custom agents (*.agent.md) -│ │ ├─ security-reviewer.agent.md # security reviewer agent -│ │ └─ agentic-workflows.agent.md # dispatcher agent (gh aw init) +│ │ ├─ agentic-workflows.agent.md # dispatcher agent (gh aw init) +│ │ └─ security-reviewer.agent.md # security reviewer agent +│ ├─ aw/ # agentic workflow lock data (generated) +│ │ └─ actions-lock.json # compiled action references (generated) │ ├─ instructions/ # Copilot custom instructions -│ │ ├─ python.instructions.md # Python coding conventions +│ │ ├─ agentic-workflows.instructions.md # agentic workflow authoring │ │ ├─ agents.instructions.md # agent development guidelines +│ │ ├─ copilot-agents.instructions.md # Copilot agent file format │ │ ├─ docs.instructions.md # documentation conventions -│ │ ├─ agentic-workflows.instructions.md # agentic workflow authoring -│ │ └─ copilot-agents.instructions.md # Copilot agent file format +│ │ └─ python.instructions.md # Python coding conventions │ ├─ ISSUE_TEMPLATE/ # issue templates (bug, feature) -│ ├─ pull_request_template.md # PR template -│ ├─ dependabot.yml # Dependabot config +│ │ ├─ bug_report.yml # bug report template +│ │ └─ feature_request.yml # feature request template +│ ├─ workflows/ # GitHub Actions workflows +│ │ ├─ codeql-analysis.yml # CodeQL security scanning +│ │ ├─ copilot-review.lock.yml # compiled agentic workflow (generated) +│ │ ├─ copilot-review.md # agentic workflow (add Copilot reviewer) +│ │ ├─ monorepo-release.yml # tag and release shared monorepo infra +│ │ ├─ pr-orchestrator.yml # PR pipeline: quality → tests+build → CodeQL +│ │ ├─ pr-review-comment-handler.lock.yml # compiled agentic workflow (generated) +│ │ ├─ pr-review-comment-handler.md # agentic workflow (triage review comments) +│ │ ├─ python-code-quality.yml # format, lint, type-check, security scan +│ │ ├─ python-docker-build.yml # build and smoke-test agent Docker images +│ │ ├─ python-docs.yml # build Sphinx docs, deploy to GitHub Pages +│ │ ├─ python-package-build.yml # build changed agent wheels on PR +│ │ ├─ python-release.yml # build and publish agent packages +│ │ ├─ python-tests.yml # pytest across Python matrix +│ │ ├─ security-review.lock.yml # compiled agentic workflow (generated) +│ │ └─ security-review.md # agentic workflow (security review) +│ ├─ CODEOWNERS # code ownership rules │ ├─ copilot-instructions.md # global Copilot instructions -│ └─ aw/ # agentic workflow lock data (generated) +│ ├─ dependabot.yml # Dependabot config +│ └─ pull_request_template.md # PR template +├─ .vscode/ # VS Code workspace settings +│ ├─ extensions.json # recommended extensions +│ ├─ launch.json # debug configurations +│ ├─ settings.json # editor and tool settings +│ └─ tasks.json # task runner definitions ├─ agents/ │ └─ / +│ ├─ docs/source/ # Sphinx sources │ ├─ src//agents// # agent code │ ├─ tests/ # agent tests -│ ├─ docs/source/ # Sphinx sources │ ├─ Dockerfile # container image -│ ├─ pyproject.toml # agent config, deps, version -│ └─ LICENSE # agent-specific license +│ ├─ LICENSE # agent-specific license +│ └─ pyproject.toml # agent config, deps, version ├─ docs/ # unified Sphinx sources + output ├─ scripts/ # shared helpers for tasks/CI +├─ .gitattributes # Git attributes (line endings, diff) +├─ .gitignore # Git ignore rules +├─ .pre-commit-config.yaml # pre-commit hook definitions +├─ CODE_OF_CONDUCT.md # contributor code of conduct +├─ CODING_STANDARDS.md # coding standards and conventions +├─ CONTRIBUTING.md # contribution guidelines +├─ DEVELOPMENT.md # development guide +├─ LICENSE # project license +├─ README.md # project overview (this file) +├─ SECURITY.md # security policy ├─ pyproject.toml # root config, deps, poe tasks ├─ shared_tasks.toml # poe tasks shared by all agents -└─ .pre-commit-config.yaml # pre-commit hook definitions +└─ uv.lock # locked dependency versions ``` ### Scripts (`scripts/`) @@ -244,7 +272,7 @@ flowchart TD ### CI workflows — on every PR -Every pull request triggers up to six parallel workflows. Code quality and tests run on all PRs across a Python 3.10–3.13 matrix. Package build and Docker build are path-filtered — they only run when agent source code, pyproject files, or Dockerfiles change. CodeQL and the Copilot security agent provide additional security coverage. +Every pull request triggers the PR orchestrator, which runs workflows sequentially. Code quality must pass before tests and build validation run in parallel. CodeQL runs after tests and builds succeed. Finally, the Copilot security review agent analyses the changes against 15 security posture categories. ```mermaid flowchart TD @@ -253,12 +281,7 @@ flowchart TD T1["PR opened / sync"] end - trigger --> CQ_QUAL["python-code-quality.yml
Python 3.10–3.13 matrix"] - trigger --> CQ_TEST["python-tests.yml
Python 3.10–3.13 matrix"] - trigger --> PB["python-package-build.yml
Wheel build
(path-filtered)"] - trigger --> DK["python-docker-build.yml
Docker build & smoke test
(path-filtered)"] - trigger --> CQ["codeql-analysis.yml
CodeQL SAST
(PR + push to main only)"] - trigger --> SR["security-review.md
Copilot security agent
(PR only)"] + trigger --> CQ_QUAL["Stage 1 · python-code-quality.yml
Python 3.10–3.13 matrix"] CQ_QUAL --> CQ_QUAL1["uv sync"] CQ_QUAL1 --> CQ_QUAL1b["Lock verify"] @@ -266,6 +289,10 @@ flowchart TD CQ_QUAL2 --> CQ_QUAL3["Pyright + MyPy"] CQ_QUAL3 --> CQ_QUAL4["Bandit + Markdown lint"] + CQ_QUAL4 --> CQ_TEST["Stage 2 · python-tests.yml
Python 3.10–3.13 matrix"] + CQ_QUAL4 --> PB["Stage 2 · python-package-build.yml
Wheel build"] + CQ_QUAL4 --> DK["Stage 2 · python-docker-build.yml
Docker build & smoke test"] + CQ_TEST --> CQ_TEST1["uv sync"] CQ_TEST1 --> CQ_TEST2["poe test"] @@ -277,15 +304,19 @@ flowchart TD DK1 --> DK2["docker build"] DK2 --> DK3["Smoke test
(--help)"] + CQ_TEST2 --> CQ["Stage 3 · codeql-analysis.yml
CodeQL SAST"] + PB3 --> CQ + DK3 --> CQ + CQ --> CQ1["CodeQL init
(Python + Actions)"] CQ1 --> CQ2["Autobuild"] CQ2 --> CQ3["CodeQL analyze"] + CQ3 --> SR["Stage 4 · security-review.md
Copilot security agent"] SR --> SR1["Read PR diff"] SR1 --> SR2["Review 15 security
posture categories"] SR2 --> SR3["Post inline review
comments"] - SR3 --> SR4["Submit review
(REQUEST_CHANGES
or COMMENT)"] - SR4 --> SR5["Assign Copilot
as PR reviewer"] + SR3 --> SR4["Submit review
(REQUEST_CHANGES
or APPROVE)"] ``` ### Release workflow — on push to main or manual dispatch @@ -517,7 +548,7 @@ Publishing is **commented out** by default — the workflow only creates tags an ## Agentic workflows -The repository includes a [GitHub Agentic Workflow](https://github.github.com/gh-aw/) that automates security review on every pull request. +The repository includes [GitHub Agentic Workflows](https://github.github.com/gh-aw/) that automate security review, Copilot code review, and PR review comment triage on every pull request. ### Security review agent @@ -525,34 +556,37 @@ A Copilot custom agent defined in [`.github/agents/security-reviewer.agent.md`]( ### Security review workflow -The agentic workflow at [`.github/workflows/security-review.md`](.github/workflows/security-review.md) imports the security review agent and runs on every `pull_request` event (`opened`, `synchronize`). It: +The agentic workflow at [`.github/workflows/security-review.md`](.github/workflows/security-review.md) imports the security review agent and runs as part of the PR orchestrator pipeline (after all CI checks pass). It: 1. Reads the pull request diff. 2. Reviews changed files against all 15 security posture categories. 3. Posts inline review comments on specific code lines where issues are found. -4. Submits a consolidated review (`REQUEST_CHANGES` for critical/high, `COMMENT` otherwise). -5. Requests Copilot as a reviewer for additional code quality coverage. +4. Submits a consolidated review (`REQUEST_CHANGES` for critical/high, `APPROVE` otherwise). >[!IMPORTANT] > The `security-review.md` workflow is using the custom agent `.github/agents/security-reviewer.agent.md` which is defined in this repository. To be able to use this agent with `copilot` AI Engine, `COPILOT_GITHUB_TOKEN` secret must be added to the repository with a fine-grained PAT that has `Copilot Requests: Read-only` scope on public repositories. For more information see the [documentation](https://github.github.com/gh-aw/reference/auth/#copilot_github_token). ### Copilot code review -The [`add-reviewer` safe-output](https://github.github.com/gh-aw/reference/safe-outputs/#add-reviewer-add-reviewer) in the workflow assigns Copilot as a PR reviewer after the security review completes. This requires a fine-grained PAT stored as the [`GH_AW_AGENT_TOKEN` repository secret]https://github.github.com/gh-aw/reference/auth/#gh_aw_agent_token) with: +The agentic workflow at [`.github/workflows/copilot-review.md`](.github/workflows/copilot-review.md) triggers when a PR review is submitted. It checks whether the security review agent approved the PR and, if so, adds Copilot as a reviewer for additional code quality coverage. This requires a fine-grained PAT stored as the [`GH_AW_AGENT_TOKEN` repository secret](https://github.github.com/gh-aw/reference/auth/#gh_aw_agent_token) with: - Resource owner: Your user account -- Repository access: “Public repositories” or select specific repos +- Repository access: "Public repositories" or select specific repos - Repository permissions: - Actions: Write - Contents: Write - Issues: Write - Pull requests: Write - +Comments that cannot be classified are escalated by tagging the PR author. ### Compiling agentic workflows From 8c56f63106799bc70196440dab55a98f5f8f647a Mon Sep 17 00:00:00 2001 From: pmalarme Date: Thu, 19 Feb 2026 18:45:25 +0100 Subject: [PATCH 69/90] fix: remove unnecessary input from security review job in PR orchestrator --- .github/workflows/pr-orchestrator.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.github/workflows/pr-orchestrator.yml b/.github/workflows/pr-orchestrator.yml index b6a772c..e18d7c2 100644 --- a/.github/workflows/pr-orchestrator.yml +++ b/.github/workflows/pr-orchestrator.yml @@ -58,5 +58,3 @@ jobs: permissions: contents: read pull-requests: read - with: - pr_number: ${{ github.event.pull_request.number }} From 32c0a898f759b0e4aad7fafa1dc1dfa875587860 Mon Sep 17 00:00:00 2001 From: pmalarme Date: Thu, 19 Feb 2026 18:46:20 +0100 Subject: [PATCH 70/90] Add PR write for the Code Review Workflow --- .github/workflows/pr-orchestrator.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pr-orchestrator.yml b/.github/workflows/pr-orchestrator.yml index e18d7c2..2d001cf 100644 --- a/.github/workflows/pr-orchestrator.yml +++ b/.github/workflows/pr-orchestrator.yml @@ -57,4 +57,4 @@ jobs: uses: ./.github/workflows/security-review.lock.yml permissions: contents: read - pull-requests: read + pull-requests: write From 913241e04292261d889b6df23034f3b0a27f2ad4 Mon Sep 17 00:00:00 2001 From: pmalarme Date: Thu, 19 Feb 2026 19:21:30 +0100 Subject: [PATCH 71/90] feat: reorganize PR orchestrator workflow stages and enhance documentation for clarity --- .github/workflows/pr-orchestrator.yml | 35 ++++++++++++++------------- README.md | 27 +++++++++++---------- 2 files changed, 32 insertions(+), 30 deletions(-) diff --git a/.github/workflows/pr-orchestrator.yml b/.github/workflows/pr-orchestrator.yml index 2d001cf..a512669 100644 --- a/.github/workflows/pr-orchestrator.yml +++ b/.github/workflows/pr-orchestrator.yml @@ -11,50 +11,51 @@ permissions: security-events: write jobs: - # ── Stage 1: Code Quality ──────────────────────────────────────────── + # ── Stage 1a: Code Quality ────────────────────────────────────────── code-quality: name: code quality uses: ./.github/workflows/python-code-quality.yml permissions: contents: read - # ── Stage 2: Tests + Build Validation (parallel, after quality) ───── + # ── Stage 1b: CodeQL (after code quality) ─────────────────────────── + codeql: + name: codeql + needs: code-quality + uses: ./.github/workflows/codeql-analysis.yml + permissions: + actions: read + contents: read + security-events: write + + # ── Stage 2: Tests + Build Validation (parallel, after static analysis) ─ tests: name: tests - needs: code-quality + needs: codeql uses: ./.github/workflows/python-tests.yml permissions: contents: read package-build: name: package build - needs: code-quality + needs: codeql uses: ./.github/workflows/python-package-build.yml permissions: contents: read docker-build: name: docker build - needs: code-quality + needs: codeql uses: ./.github/workflows/python-docker-build.yml permissions: contents: read - # ── Stage 3: CodeQL (after tests + builds pass) ───────────────────── - codeql: - name: codeql - needs: [tests, package-build, docker-build] - uses: ./.github/workflows/codeql-analysis.yml - permissions: - actions: read - contents: read - security-events: write - - # ── Stage 4: Security Review (after CodeQL) ───────────────────────── + # ── Stage 3: Security Review (after tests + builds pass) ──────────── security-review: name: security review - needs: codeql + needs: [tests, package-build, docker-build] uses: ./.github/workflows/security-review.lock.yml + secrets: inherit permissions: contents: read pull-requests: write diff --git a/README.md b/README.md index f018fd9..36bd8a4 100644 --- a/README.md +++ b/README.md @@ -272,7 +272,7 @@ flowchart TD ### CI workflows — on every PR -Every pull request triggers the PR orchestrator, which runs workflows sequentially. Code quality must pass before tests and build validation run in parallel. CodeQL runs after tests and builds succeed. Finally, the Copilot security review agent analyses the changes against 15 security posture categories. +Every pull request triggers the PR orchestrator, which runs workflows in three stages. Code quality checks run first, then CodeQL SAST runs after quality passes — ensuring only clean code gets the expensive security scan. Tests and build validation run in parallel after all static analysis passes. Finally, the Copilot security review agent analyses the changes against 15 security posture categories. ```mermaid flowchart TD @@ -281,7 +281,7 @@ flowchart TD T1["PR opened / sync"] end - trigger --> CQ_QUAL["Stage 1 · python-code-quality.yml
Python 3.10–3.13 matrix"] + trigger --> CQ_QUAL["Stage 1a · python-code-quality.yml
Python 3.10–3.13 matrix"] CQ_QUAL --> CQ_QUAL1["uv sync"] CQ_QUAL1 --> CQ_QUAL1b["Lock verify"] @@ -289,9 +289,15 @@ flowchart TD CQ_QUAL2 --> CQ_QUAL3["Pyright + MyPy"] CQ_QUAL3 --> CQ_QUAL4["Bandit + Markdown lint"] - CQ_QUAL4 --> CQ_TEST["Stage 2 · python-tests.yml
Python 3.10–3.13 matrix"] - CQ_QUAL4 --> PB["Stage 2 · python-package-build.yml
Wheel build"] - CQ_QUAL4 --> DK["Stage 2 · python-docker-build.yml
Docker build & smoke test"] + CQ_QUAL4 --> CQ["Stage 1b · codeql-analysis.yml
CodeQL SAST"] + + CQ --> CQ1["CodeQL init
(Python + Actions)"] + CQ1 --> CQ2["Autobuild"] + CQ2 --> CQ3["CodeQL analyze"] + + CQ3 --> CQ_TEST["Stage 2 · python-tests.yml
Python 3.10–3.13 matrix"] + CQ3 --> PB["Stage 2 · python-package-build.yml
Wheel build"] + CQ3 --> DK["Stage 2 · python-docker-build.yml
Docker build & smoke test"] CQ_TEST --> CQ_TEST1["uv sync"] CQ_TEST1 --> CQ_TEST2["poe test"] @@ -304,15 +310,10 @@ flowchart TD DK1 --> DK2["docker build"] DK2 --> DK3["Smoke test
(--help)"] - CQ_TEST2 --> CQ["Stage 3 · codeql-analysis.yml
CodeQL SAST"] - PB3 --> CQ - DK3 --> CQ - - CQ --> CQ1["CodeQL init
(Python + Actions)"] - CQ1 --> CQ2["Autobuild"] - CQ2 --> CQ3["CodeQL analyze"] + CQ_TEST2 --> SR["Stage 3 · security-review.md
Copilot security agent"] + PB3 --> SR + DK3 --> SR - CQ3 --> SR["Stage 4 · security-review.md
Copilot security agent"] SR --> SR1["Read PR diff"] SR1 --> SR2["Review 15 security
posture categories"] SR2 --> SR3["Post inline review
comments"] From 98f9b13d36c8adb531e8c42c9824470a8ecaff35 Mon Sep 17 00:00:00 2001 From: pmalarme Date: Thu, 19 Feb 2026 20:02:59 +0100 Subject: [PATCH 72/90] feat: update workflows to trigger on pull requests and enhance security review process --- .github/workflows/codeql-analysis.yml | 2 +- .github/workflows/pr-orchestrator.yml | 61 ---------------------- .github/workflows/python-code-quality.yml | 3 +- .github/workflows/python-docker-build.yml | 7 ++- .github/workflows/python-package-build.yml | 6 ++- .github/workflows/python-tests.yml | 3 +- .github/workflows/security-review.lock.yml | 30 ++++++++--- .github/workflows/security-review.md | 3 +- README.md | 30 +++++------ 9 files changed, 52 insertions(+), 93 deletions(-) delete mode 100644 .github/workflows/pr-orchestrator.yml diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index a16188a..eab7aef 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -2,7 +2,7 @@ name: "CodeQL Analysis" on: workflow_dispatch: - workflow_call: + pull_request: push: branches: [ "main" ] schedule: diff --git a/.github/workflows/pr-orchestrator.yml b/.github/workflows/pr-orchestrator.yml deleted file mode 100644 index a512669..0000000 --- a/.github/workflows/pr-orchestrator.yml +++ /dev/null @@ -1,61 +0,0 @@ -name: "PR: orchestrator" - -on: - pull_request: - branches: ["main", "feature*", "fix*"] - -permissions: - contents: read - actions: read - pull-requests: read - security-events: write - -jobs: - # ── Stage 1a: Code Quality ────────────────────────────────────────── - code-quality: - name: code quality - uses: ./.github/workflows/python-code-quality.yml - permissions: - contents: read - - # ── Stage 1b: CodeQL (after code quality) ─────────────────────────── - codeql: - name: codeql - needs: code-quality - uses: ./.github/workflows/codeql-analysis.yml - permissions: - actions: read - contents: read - security-events: write - - # ── Stage 2: Tests + Build Validation (parallel, after static analysis) ─ - tests: - name: tests - needs: codeql - uses: ./.github/workflows/python-tests.yml - permissions: - contents: read - - package-build: - name: package build - needs: codeql - uses: ./.github/workflows/python-package-build.yml - permissions: - contents: read - - docker-build: - name: docker build - needs: codeql - uses: ./.github/workflows/python-docker-build.yml - permissions: - contents: read - - # ── Stage 3: Security Review (after tests + builds pass) ──────────── - security-review: - name: security review - needs: [tests, package-build, docker-build] - uses: ./.github/workflows/security-review.lock.yml - secrets: inherit - permissions: - contents: read - pull-requests: write diff --git a/.github/workflows/python-code-quality.yml b/.github/workflows/python-code-quality.yml index 927fe76..6263ac3 100644 --- a/.github/workflows/python-code-quality.yml +++ b/.github/workflows/python-code-quality.yml @@ -2,7 +2,8 @@ name: "Python: code quality" on: workflow_dispatch: - workflow_call: + pull_request: + branches: ["main", "feature*", "fix*"] permissions: contents: read diff --git a/.github/workflows/python-docker-build.yml b/.github/workflows/python-docker-build.yml index 34bc024..919e8c3 100644 --- a/.github/workflows/python-docker-build.yml +++ b/.github/workflows/python-docker-build.yml @@ -2,7 +2,12 @@ name: "Python: docker build & smoke test" on: workflow_dispatch: - workflow_call: + pull_request: + branches: ["main", "feature*", "fix*"] + paths: + - "agents/*/Dockerfile" + - "agents/*/src/**" + - "agents/*/pyproject.toml" permissions: contents: read diff --git a/.github/workflows/python-package-build.yml b/.github/workflows/python-package-build.yml index 357e045..06687ef 100644 --- a/.github/workflows/python-package-build.yml +++ b/.github/workflows/python-package-build.yml @@ -2,7 +2,11 @@ name: "Python: package build" on: workflow_dispatch: - workflow_call: + pull_request: + branches: ["main", "feature*", "fix*"] + paths: + - "agents/*/pyproject.toml" + - "agents/*/src/**" permissions: contents: read diff --git a/.github/workflows/python-tests.yml b/.github/workflows/python-tests.yml index 0e2e8d5..c08caed 100644 --- a/.github/workflows/python-tests.yml +++ b/.github/workflows/python-tests.yml @@ -2,7 +2,8 @@ name: "Python: tests" on: workflow_dispatch: - workflow_call: + pull_request: + branches: ["main", "feature*", "fix*"] permissions: contents: read diff --git a/.github/workflows/security-review.lock.yml b/.github/workflows/security-review.lock.yml index 324b935..41be770 100644 --- a/.github/workflows/security-review.lock.yml +++ b/.github/workflows/security-review.lock.yml @@ -23,29 +23,37 @@ # # Automated security review for pull requests. Analyzes changed files against 15 security posture categories and posts inline review comments on findings. # -# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"bee08f1c7f9ba74b7900257d5c00f796abd591c727efef7457b83fefabf6c144","compiler_version":"v0.46.3"} +# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"c87e44a92ddf8b44433cf05429a3822db7b56f91a35785c5e652ed91cb2abf54","compiler_version":"v0.46.3"} name: "Security Review" "on": - workflow_call: + pull_request: + types: + - opened + - synchronize permissions: {} concurrency: - group: "gh-aw-${{ github.workflow }}" + group: "gh-aw-${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}" + cancel-in-progress: true run-name: "Security Review" jobs: activation: needs: pre_activation - if: needs.pre_activation.outputs.activated == 'true' + if: > + (needs.pre_activation.outputs.activated == 'true') && ((github.event_name != 'pull_request') || (github.event.pull_request.head.repo.id == github.repository_id)) runs-on: ubuntu-slim permissions: contents: read outputs: + body: ${{ steps.sanitized.outputs.body }} comment_id: "" comment_repo: "" + text: ${{ steps.sanitized.outputs.text }} + title: ${{ steps.sanitized.outputs.title }} steps: - name: Setup Scripts uses: github/gh-aw/actions/setup@a70c5eada06553e3510ac27f2c3bda9d3705bccb # v0.46.3 @@ -77,6 +85,15 @@ jobs: setupGlobals(core, github, context, exec, io); const { main } = require('/opt/gh-aw/actions/check_workflow_timestamp_api.cjs'); await main(); + - name: Compute current body text + id: sanitized + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/compute_text.cjs'); + await main(); - name: Create prompt with built-in context env: GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt @@ -252,8 +269,6 @@ jobs: permissions: contents: read pull-requests: read - concurrency: - group: "gh-aw-copilot-${{ github.workflow }}" env: DEFAULT_BRANCH: ${{ github.event.repository.default_branch }} GH_AW_ASSETS_ALLOWED_EXTS: "" @@ -1005,8 +1020,6 @@ jobs: if: needs.agent.outputs.output_types != '' || needs.agent.outputs.has_patch == 'true' runs-on: ubuntu-latest permissions: {} - concurrency: - group: "gh-aw-copilot-${{ github.workflow }}" timeout-minutes: 10 outputs: success: ${{ steps.parse_results.outputs.success }} @@ -1102,6 +1115,7 @@ jobs: if-no-files-found: ignore pre_activation: + if: (github.event_name != 'pull_request') || (github.event.pull_request.head.repo.id == github.repository_id) runs-on: ubuntu-slim outputs: activated: ${{ steps.check_membership.outputs.is_team_member == 'true' }} diff --git a/.github/workflows/security-review.md b/.github/workflows/security-review.md index e0d9ec3..7d4471f 100644 --- a/.github/workflows/security-review.md +++ b/.github/workflows/security-review.md @@ -3,7 +3,8 @@ description: Automated security review for pull requests. Analyzes changed files 15 security posture categories and posts inline review comments on findings. on: - workflow_call: + pull_request: + types: [opened, synchronize] permissions: contents: read diff --git a/README.md b/README.md index 36bd8a4..e12a15d 100644 --- a/README.md +++ b/README.md @@ -89,8 +89,8 @@ Each layer catches different classes of issues: | --- | --- | --- | | **Editor** | As you type | Type errors, formatting, AI-aware context via custom instructions | | **Pre-commit** | On `git commit` (staged files) | Style drift, security anti-patterns, broken configs, stale lockfiles | -| **CI quality gate** | On PR | Code quality runs first (lock-verify, format, lint, type checks, Bandit, markdown lint), then tests (PyTest + coverage) and build validation (wheel build + Docker build & smoke test) run in parallel | -| **CI security** | On PR (after quality gate) | CodeQL SAST, Dependabot dependency updates, Copilot security review agent (15 posture categories) | +| **CI quality gate** | On PR | Lock verification, full repo-wide type safety, code quality, test regressions, coverage, build validation. Split into three sub-layers: *code quality* (lock-verify, format, lint, type checks, Bandit, markdown lint), *tests* (PyTest + coverage), and *build validation* (wheel build + Docker build & smoke test, both path-filtered) | +| **CI security** | On PR / push to main / schedule | CodeQL SAST, Dependabot dependency updates, Copilot security review agent (15 posture categories) | | **Copilot Review** | On PR (after security review approves) | AI-powered code review with suggestions and inline comments | | **Release** | On push to main or manual | Agent release: builds changed agents, creates `-v` tags with wheel assets. Monorepo release: tags shared infra changes as `v` | @@ -150,7 +150,6 @@ Repo root │ │ ├─ copilot-review.lock.yml # compiled agentic workflow (generated) │ │ ├─ copilot-review.md # agentic workflow (add Copilot reviewer) │ │ ├─ monorepo-release.yml # tag and release shared monorepo infra -│ │ ├─ pr-orchestrator.yml # PR pipeline: quality → tests+build → CodeQL │ │ ├─ pr-review-comment-handler.lock.yml # compiled agentic workflow (generated) │ │ ├─ pr-review-comment-handler.md # agentic workflow (triage review comments) │ │ ├─ python-code-quality.yml # format, lint, type-check, security scan @@ -272,7 +271,7 @@ flowchart TD ### CI workflows — on every PR -Every pull request triggers the PR orchestrator, which runs workflows in three stages. Code quality checks run first, then CodeQL SAST runs after quality passes — ensuring only clean code gets the expensive security scan. Tests and build validation run in parallel after all static analysis passes. Finally, the Copilot security review agent analyses the changes against 15 security posture categories. +Every pull request triggers up to six parallel workflows. Code quality and tests run on all PRs across a Python 3.10–3.13 matrix. Package build and Docker build are path-filtered — they only run when agent source code, pyproject files, or Dockerfiles change. CodeQL and the Copilot security agent provide additional security coverage. ```mermaid flowchart TD @@ -281,7 +280,12 @@ flowchart TD T1["PR opened / sync"] end - trigger --> CQ_QUAL["Stage 1a · python-code-quality.yml
Python 3.10–3.13 matrix"] + trigger --> CQ_QUAL["python-code-quality.yml
Python 3.10–3.13 matrix"] + trigger --> CQ_TEST["python-tests.yml
Python 3.10–3.13 matrix"] + trigger --> PB["python-package-build.yml
Wheel build
(path-filtered)"] + trigger --> DK["python-docker-build.yml
Docker build & smoke test
(path-filtered)"] + trigger --> CQ["codeql-analysis.yml
CodeQL SAST
(PR + push to main only)"] + trigger --> SR["security-review.md
Copilot security agent
(PR only)"] CQ_QUAL --> CQ_QUAL1["uv sync"] CQ_QUAL1 --> CQ_QUAL1b["Lock verify"] @@ -289,16 +293,6 @@ flowchart TD CQ_QUAL2 --> CQ_QUAL3["Pyright + MyPy"] CQ_QUAL3 --> CQ_QUAL4["Bandit + Markdown lint"] - CQ_QUAL4 --> CQ["Stage 1b · codeql-analysis.yml
CodeQL SAST"] - - CQ --> CQ1["CodeQL init
(Python + Actions)"] - CQ1 --> CQ2["Autobuild"] - CQ2 --> CQ3["CodeQL analyze"] - - CQ3 --> CQ_TEST["Stage 2 · python-tests.yml
Python 3.10–3.13 matrix"] - CQ3 --> PB["Stage 2 · python-package-build.yml
Wheel build"] - CQ3 --> DK["Stage 2 · python-docker-build.yml
Docker build & smoke test"] - CQ_TEST --> CQ_TEST1["uv sync"] CQ_TEST1 --> CQ_TEST2["poe test"] @@ -310,9 +304,9 @@ flowchart TD DK1 --> DK2["docker build"] DK2 --> DK3["Smoke test
(--help)"] - CQ_TEST2 --> SR["Stage 3 · security-review.md
Copilot security agent"] - PB3 --> SR - DK3 --> SR + CQ --> CQ1["CodeQL init
(Python + Actions)"] + CQ1 --> CQ2["Autobuild"] + CQ2 --> CQ3["CodeQL analyze"] SR --> SR1["Read PR diff"] SR1 --> SR2["Review 15 security
posture categories"] From e28474193d5c7ce8f5f02fdf40ee2c4b0eb40c4a Mon Sep 17 00:00:00 2001 From: pmalarme Date: Thu, 19 Feb 2026 20:26:16 +0100 Subject: [PATCH 73/90] feat: enhance security review workflows and documentation for clarity --- .github/workflows/security-review.lock.yml | 87 ++++++---------------- .github/workflows/security-review.md | 15 ++-- README.md | 2 +- 3 files changed, 28 insertions(+), 76 deletions(-) diff --git a/.github/workflows/security-review.lock.yml b/.github/workflows/security-review.lock.yml index 41be770..0b39007 100644 --- a/.github/workflows/security-review.lock.yml +++ b/.github/workflows/security-review.lock.yml @@ -23,7 +23,7 @@ # # Automated security review for pull requests. Analyzes changed files against 15 security posture categories and posts inline review comments on findings. # -# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"c87e44a92ddf8b44433cf05429a3822db7b56f91a35785c5e652ed91cb2abf54","compiler_version":"v0.46.3"} +# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"422dc49af5e1e204a6f7318c550060cb008c2adddb5a1e68f9046345705da4d0","compiler_version":"v0.46.3"} name: "Security Review" "on": @@ -114,7 +114,7 @@ jobs: cat "/opt/gh-aw/prompts/xpia.md" >> "$GH_AW_PROMPT" cat "/opt/gh-aw/prompts/temp_folder_prompt.md" >> "$GH_AW_PROMPT" cat "/opt/gh-aw/prompts/markdown.md" >> "$GH_AW_PROMPT" - cat "/opt/gh-aw/prompts/cache_memory_prompt_multi.md" >> "$GH_AW_PROMPT" + cat "/opt/gh-aw/prompts/cache_memory_prompt.md" >> "$GH_AW_PROMPT" cat << 'GH_AW_PROMPT_EOF' >> "$GH_AW_PROMPT" GitHub API Access Instructions @@ -195,22 +195,8 @@ jobs: env: GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt GH_AW_ALLOWED_EXTENSIONS: '' - GH_AW_CACHE_EXAMPLES: '- `/tmp/gh-aw/cache-memory-pull-request-review-context/notes.txt` - general notes and observations -- `/tmp/gh-aw/cache-memory-pull-request-review-context/notes.md` - markdown formatted notes -- `/tmp/gh-aw/cache-memory-pull-request-review-context/preferences.json` - user preferences and settings -- `/tmp/gh-aw/cache-memory-pull-request-review-context/history.jsonl` - activity history in JSON Lines format -- `/tmp/gh-aw/cache-memory-pull-request-review-context/data.csv` - tabular data -- `/tmp/gh-aw/cache-memory-pull-request-review-context/state/` - organized state files in subdirectories (with allowed file types) -- `/tmp/gh-aw/cache-memory-review patterns/notes.txt` - general notes and observations -- `/tmp/gh-aw/cache-memory-review patterns/notes.md` - markdown formatted notes -- `/tmp/gh-aw/cache-memory-review patterns/preferences.json` - user preferences and settings -- `/tmp/gh-aw/cache-memory-review patterns/history.jsonl` - activity history in JSON Lines format -- `/tmp/gh-aw/cache-memory-review patterns/data.csv` - tabular data -- `/tmp/gh-aw/cache-memory-review patterns/state/` - organized state files in subdirectories (with allowed file types) -' - GH_AW_CACHE_LIST: '- **pull-request-review-context**: `/tmp/gh-aw/cache-memory-pull-request-review-context/` -- **review patterns**: `/tmp/gh-aw/cache-memory-review patterns/` -' + GH_AW_CACHE_DESCRIPTION: '' + GH_AW_CACHE_DIR: '/tmp/gh-aw/cache-memory/' GH_AW_GITHUB_ACTOR: ${{ github.actor }} GH_AW_GITHUB_EVENT_COMMENT_ID: ${{ github.event.comment.id }} GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER: ${{ github.event.discussion.number }} @@ -233,8 +219,8 @@ jobs: file: process.env.GH_AW_PROMPT, substitutions: { GH_AW_ALLOWED_EXTENSIONS: process.env.GH_AW_ALLOWED_EXTENSIONS, - GH_AW_CACHE_EXAMPLES: process.env.GH_AW_CACHE_EXAMPLES, - GH_AW_CACHE_LIST: process.env.GH_AW_CACHE_LIST, + GH_AW_CACHE_DESCRIPTION: process.env.GH_AW_CACHE_DESCRIPTION, + GH_AW_CACHE_DIR: process.env.GH_AW_CACHE_DIR, GH_AW_GITHUB_ACTOR: process.env.GH_AW_GITHUB_ACTOR, GH_AW_GITHUB_EVENT_COMMENT_ID: process.env.GH_AW_GITHUB_EVENT_COMMENT_ID, GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER: process.env.GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER, @@ -298,26 +284,15 @@ jobs: - name: Create gh-aw temp directory run: bash /opt/gh-aw/actions/create_gh_aw_tmp_dir.sh # Cache memory file share configuration from frontmatter processed below - - name: Create cache-memory directory (pull-request-review-context) - run: | - mkdir -p /tmp/gh-aw/cache-memory-pull-request-review-context - - name: Restore cache-memory file share data (pull-request-review-context) + - name: Create cache-memory directory + run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh + - name: Restore cache-memory file share data uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 with: - key: security-review-pr-${{ github.event.pull_request.number }}-${{ github.run_id }} - path: /tmp/gh-aw/cache-memory-pull-request-review-context + key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} + path: /tmp/gh-aw/cache-memory restore-keys: | - security-review-pr-${{ github.event.pull_request.number }}- - - name: Create cache-memory directory (review patterns) - run: | - mkdir -p /tmp/gh-aw/cache-memory-review patterns - - name: Restore cache-memory file share data (review patterns) - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 - with: - key: security-review-patterns-${{ github.run_id }} - path: /tmp/gh-aw/cache-memory-review patterns - restore-keys: | - security-review-patterns- + memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}- - name: Configure Git credentials env: REPO_NAME: ${{ github.repository }} @@ -754,7 +729,7 @@ jobs: run: | set -o pipefail sudo -E awf --env-all --container-workdir "${GITHUB_WORKSPACE}" --allow-domains api.business.githubcopilot.com,api.enterprise.githubcopilot.com,api.github.com,api.githubcopilot.com,api.individual.githubcopilot.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,github.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,ppa.launchpad.net,raw.githubusercontent.com,registry.npmjs.org,s.symcb.com,s.symcd.com,security.ubuntu.com,telemetry.enterprise.githubcopilot.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --enable-host-access --image-tag 0.20.0 --skip-pull --enable-api-proxy \ - -- /bin/bash -c '/usr/local/bin/copilot --add-dir /tmp/gh-aw/ --log-level all --log-dir /tmp/gh-aw/sandbox/agent/logs/ --add-dir "${GITHUB_WORKSPACE}" --disable-builtin-mcps --agent security-reviewer --allow-all-tools --add-dir /tmp/gh-aw/cache-memory-pull-request-review-context/ --add-dir '\''/tmp/gh-aw/cache-memory-review patterns/'\'' --allow-all-paths --share /tmp/gh-aw/sandbox/agent/logs/conversation.md --prompt "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"${GH_AW_MODEL_AGENT_COPILOT:+ --model "$GH_AW_MODEL_AGENT_COPILOT"}' 2>&1 | tee -a /tmp/gh-aw/agent-stdio.log + -- /bin/bash -c '/usr/local/bin/copilot --add-dir /tmp/gh-aw/ --log-level all --log-dir /tmp/gh-aw/sandbox/agent/logs/ --add-dir "${GITHUB_WORKSPACE}" --disable-builtin-mcps --agent security-reviewer --allow-all-tools --add-dir /tmp/gh-aw/cache-memory/ --allow-all-paths --share /tmp/gh-aw/sandbox/agent/logs/conversation.md --prompt "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"${GH_AW_MODEL_AGENT_COPILOT:+ --model "$GH_AW_MODEL_AGENT_COPILOT"}' 2>&1 | tee -a /tmp/gh-aw/agent-stdio.log env: COPILOT_AGENT_RUNNER_TYPE: STANDALONE COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }} @@ -891,19 +866,12 @@ jobs: else echo 'AWF binary not installed, skipping firewall log summary' fi - - name: Upload cache-memory data as artifact (pull-request-review-context) + - name: Upload cache-memory data as artifact uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 if: always() with: - name: cache-memory-pull-request-review-context - path: /tmp/gh-aw/cache-memory-pull-request-review-context - - name: Upload cache-memory data as artifact (review patterns) - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 - if: always() - with: - name: cache-memory-review patterns - path: /tmp/gh-aw/cache-memory-review patterns - retention-days: 30 + name: cache-memory + path: /tmp/gh-aw/cache-memory - name: Upload agent artifacts if: always() continue-on-error: true @@ -1198,25 +1166,14 @@ jobs: uses: github/gh-aw/actions/setup@a70c5eada06553e3510ac27f2c3bda9d3705bccb # v0.46.3 with: destination: /opt/gh-aw/actions - - name: Download cache-memory artifact (pull-request-review-context) - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 - continue-on-error: true - with: - name: cache-memory-pull-request-review-context - path: /tmp/gh-aw/cache-memory-pull-request-review-context - - name: Save cache-memory to cache (pull-request-review-context) - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 - with: - key: security-review-pr-${{ github.event.pull_request.number }}-${{ github.run_id }} - path: /tmp/gh-aw/cache-memory-pull-request-review-context - - name: Download cache-memory artifact (review patterns) + - name: Download cache-memory artifact (default) uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 continue-on-error: true with: - name: cache-memory-review patterns - path: /tmp/gh-aw/cache-memory-review patterns - - name: Save cache-memory to cache (review patterns) + name: cache-memory + path: /tmp/gh-aw/cache-memory + - name: Save cache-memory to cache (default) uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 with: - key: security-review-patterns-${{ github.run_id }} - path: /tmp/gh-aw/cache-memory-review patterns + key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} + path: /tmp/gh-aw/cache-memory diff --git a/.github/workflows/security-review.md b/.github/workflows/security-review.md index 7d4471f..1a1d8da 100644 --- a/.github/workflows/security-review.md +++ b/.github/workflows/security-review.md @@ -15,12 +15,7 @@ engine: agent: security-reviewer tools: - cache-memory: - - id: pull-request-review-context - key: "security-review-pr-${{ github.event.pull_request.number }}" - - id: review patterns - key: "security-review-patterns" - retention-days: 30 + cache-memory: true github: toolsets: [repos, pull_requests] @@ -44,9 +39,9 @@ agent instructions. 1. **Access memory first.** Use cache memory at `/tmp/gh-aw/cache-memory/` to: - Check prior review context for this PR at - `/tmp/gh-aw/cache-memory/security-review-pr-${{ github.event.pull_request.number }}.json`. This correspond to the cache memory tool with id `pull-request-review-context` and can contain information about previous review findings, categories, files reviewed, and timestamps for this PR. + `/tmp/gh-aw/cache-memory/security-review-pr-${{ github.event.pull_request.number }}.json`. It can contain information about previous review findings, categories, files reviewed, and timestamps for this PR. - Identify recurring security patterns in this repository from - `/tmp/gh-aw/cache-memory/security-review-patterns.json`. This correspond to the cache memory tool with id `review-patterns` and can contain information about recurring security issues and patterns in the repository. + `/tmp/gh-aw/cache-memory/security-review-patterns.json`. It can contain information about recurring security issues and patterns in the repository. - Avoid repeating the same inline comments from previous reviews if the previous comment is not resolved yet nor outdated (e.g., if the same issue is still present in the code or if the code has not changed since the last review). 2. **Fetch the pull request diff.** Read the pull request details and all @@ -88,10 +83,10 @@ agent instructions. - Write/update PR-specific memory at `/tmp/gh-aw/cache-memory/security-review-pr-${{ github.event.pull_request.number }}.json` including review timestamp, findings summary, categories found, and files - reviewed. The id of the cache memory tool for this is `pull-request-review-context`. + reviewed. - Update shared pattern memory at `/tmp/gh-aw/cache-memory/security-review-patterns.json` with recurring - issue themes and counts. The id of the cache memory tool for this is `review-patterns`. + issue themes and counts. ## Review Guidelines diff --git a/README.md b/README.md index f5f5a51..4076ef8 100644 --- a/README.md +++ b/README.md @@ -553,7 +553,7 @@ A Copilot custom agent defined in [`.github/agents/security-reviewer.agent.md`]( ### Security review workflow -The agentic workflow at [`.github/workflows/security-review.md`](.github/workflows/security-review.md) imports the security review agent and runs as part of the PR orchestrator pipeline (after all CI checks pass). It: +The agentic workflow at [`.github/workflows/security-review.md`](.github/workflows/security-review.md) imports the security review agent and runs on every pull request (triggered by `pull_request: [opened, synchronize]`). It: 1. Reads the pull request diff. 2. Reviews changed files against all 15 security posture categories. From c6317703c9f8e6d355320bd049e1f26051a0264e Mon Sep 17 00:00:00 2001 From: pmalarme Date: Thu, 19 Feb 2026 21:00:50 +0100 Subject: [PATCH 74/90] feat: update workflows and documentation to enhance security posture and agentic functionality --- .github/workflows/pr-review-comment-handler.lock.yml | 2 +- .github/workflows/pr-review-comment-handler.md | 2 ++ .github/workflows/security-review.md | 3 +++ 3 files changed, 6 insertions(+), 1 deletion(-) diff --git a/.github/workflows/pr-review-comment-handler.lock.yml b/.github/workflows/pr-review-comment-handler.lock.yml index f8eb026..d1f5023 100644 --- a/.github/workflows/pr-review-comment-handler.lock.yml +++ b/.github/workflows/pr-review-comment-handler.lock.yml @@ -23,7 +23,7 @@ # # Triages PR review comments. If the comment raises an issue that needs fixing, replies tagging @copilot to fix it directly on the PR. If low priority, creates an issue for later. If not relevant, resolves with a reply. # -# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"0b44b9f4cc4d0349fe23357a62776d7483607c761560b73dffce75e24c0219c3","compiler_version":"v0.46.3"} +# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"1e751f45efe3a0408cd1bd2d316ec761b0ef46c81d6cfe28848f96a71f247df1","compiler_version":"v0.46.3"} name: "PR Review Comment Handler" "on": diff --git a/.github/workflows/pr-review-comment-handler.md b/.github/workflows/pr-review-comment-handler.md index 31942fd..855eebc 100644 --- a/.github/workflows/pr-review-comment-handler.md +++ b/.github/workflows/pr-review-comment-handler.md @@ -19,8 +19,10 @@ tools: safe-outputs: reply-to-pull-request-review-comment: max: 10 + github-token: ${{ secrets.GH_AW_AGENT_TOKEN }} resolve-pull-request-review-thread: max: 10 + github-token: ${{ secrets.GH_AW_AGENT_TOKEN }} create-issue: max: 10 noop: diff --git a/.github/workflows/security-review.md b/.github/workflows/security-review.md index 1a1d8da..0a66fce 100644 --- a/.github/workflows/security-review.md +++ b/.github/workflows/security-review.md @@ -92,6 +92,9 @@ agent instructions. - **Only review changed lines.** Do not flag pre-existing issues in untouched code. +- **Skip auto-generated files.** Do not review `*.lock.yml` files (agentic + workflow lock files generated by `gh aw compile`). These are machine-generated + and not intended for manual editing or security review. - **Be specific and actionable.** Each finding must include a concrete fix. - **Prioritize by severity.** Focus on critical and high issues first. - **Use the project context.** This is a Python monorepo using Ruff, Pyright From d5c5d79a3b8ad734daf60a45e37aadfac7fafa0b Mon Sep 17 00:00:00 2001 From: Pierre Malarme Date: Thu, 19 Feb 2026 21:46:10 +0100 Subject: [PATCH 75/90] Update agents/agent1/README.md Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- agents/agent1/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/agents/agent1/README.md b/agents/agent1/README.md index d2b24e0..f877af2 100644 --- a/agents/agent1/README.md +++ b/agents/agent1/README.md @@ -51,7 +51,7 @@ docker push .azurecr.io/agent1: export UV_PUBLISH_TOKEN= ``` -- Publish from the agent dir (`agents/agent1`): `uv run poe publish` (uploads the built wheel/sdist). From repo root use `uv run poe -C agents/agent1 publish`. +- Publish from the repo root: `uv run poe publish` (uploads the built wheel/sdist from `agents/agent1/dist/`). - Package namespace: `python_agent_template.agents.agent1` uses a namespace root without `__init__.py` so multiple agents can coexist (PyPA guidance: https://packaging.python.org/en/latest/guides/packaging-namespace-packages/). ## Anatomy From 33767c0220274ef6e766aa1d6447ce62a35fbbec Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 19 Feb 2026 20:48:33 +0000 Subject: [PATCH 76/90] Initial plan From d4b5d60eda2833ab539d39a8c05d6198290e4a00 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 19 Feb 2026 20:48:38 +0000 Subject: [PATCH 77/90] Initial plan From 141da558a8a720425631b9c8dcbd2925c83215a7 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 19 Feb 2026 20:49:12 +0000 Subject: [PATCH 78/90] Initial plan From 32b1a86f15ad36d9dd2d08b427d2193edf3a5d6f Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 19 Feb 2026 20:49:52 +0000 Subject: [PATCH 79/90] docs: update publish section - publishing disabled by default, no GitHub Packages Co-authored-by: pmalarme <686568+pmalarme@users.noreply.github.com> --- DEVELOPMENT.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/DEVELOPMENT.md b/DEVELOPMENT.md index e24a01f..c845101 100644 --- a/DEVELOPMENT.md +++ b/DEVELOPMENT.md @@ -378,7 +378,7 @@ Publish all packages in `dist/` to the configured package index: uv run poe publish ``` -This runs a single `uv publish` from the workspace root, uploading everything in `dist/`. The target index is configured in `pyproject.toml` under `[tool.uv.index]` (defaults to GitHub Packages). +This runs a single `uv publish` from the workspace root, uploading everything in `dist/`. Publishing is **disabled by default** — no registry is configured out of the box because GitHub Packages does not support a Python/pip registry. To enable publishing, uncomment one of the `[[tool.uv.index]]` blocks in `pyproject.toml` (Azure Artifacts for private packages or PyPI for public packages) and set the matching `UV_PUBLISH_URL` / `UV_PUBLISH_TOKEN` in the release workflow. Each agent has its own version; the registry rejects duplicate versions, so only agents whose version was bumped actually get uploaded. From 7adf6cd9d7864ea00c145eb6d67d5312f8df4e58 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 19 Feb 2026 20:50:17 +0000 Subject: [PATCH 80/90] Initial plan From 31dd195a9a521654e27dafbe69e17708feca311f Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 19 Feb 2026 20:50:38 +0000 Subject: [PATCH 81/90] Fix incorrect GitHub Packages default in DEVELOPMENT.md GitHub Packages does not support a Python/pip registry. Update the publish target section to clarify this and provide both Azure Artifacts and PyPI configuration options that align with the commented-out blocks in pyproject.toml and the python-release.yml workflow. Co-authored-by: pmalarme <686568+pmalarme@users.noreply.github.com> --- DEVELOPMENT.md | 45 ++++++++++++++++++++++++++------------------- 1 file changed, 26 insertions(+), 19 deletions(-) diff --git a/DEVELOPMENT.md b/DEVELOPMENT.md index e24a01f..3d1ad78 100644 --- a/DEVELOPMENT.md +++ b/DEVELOPMENT.md @@ -532,31 +532,38 @@ The [release workflow](.github/workflows/python-release.yml) triggers on pushes To release an agent: bump its version in `agents//pyproject.toml`, merge to main, and create a GitHub release. -#### Changing the publish target +#### Configuring the publish target -By default, packages are published to GitHub Packages. To publish to a different registry (e.g., PyPI, a private Artifactory, or Azure Artifacts), update two places: +> **Note:** GitHub Packages does **not** support a Python/pip registry. You must configure either Azure Artifacts or PyPI. -1. **`pyproject.toml`** — update the `[[tool.uv.index]]` section at the bottom of the file: +Choose one of the following registries and update two places: - ```toml - [[tool.uv.index]] - name = "pypi" # or your registry name - url = "https://pypi.org/simple/" - publish-url = "https://upload.pypi.org/legacy/" - explicit = true - ``` +1. **`pyproject.toml`** — uncomment the matching `[[tool.uv.index]]` block at the bottom of the file: -2. **`.github/workflows/python-release.yml`** — update the environment variables in the publish step: + - **Azure Artifacts** (recommended for private packages): - ```yaml - - name: Publish to PyPI - env: - UV_PUBLISH_URL: https://upload.pypi.org/legacy/ - UV_PUBLISH_TOKEN: ${{ secrets.PYPI_TOKEN }} - run: uv run poe publish - ``` + ```toml + [[tool.uv.index]] + name = "azure" + url = "https://pkgs.dev.azure.com///_packaging//pypi/simple/" + publish-url = "https://pkgs.dev.azure.com///_packaging//pypi/upload/" + explicit = true + ``` - For PyPI, create an API token and store it as a repository secret (`PYPI_TOKEN`). For GitHub Packages, the built-in `GITHUB_TOKEN` is used automatically. + - **PyPI** (public packages): + + ```toml + [[tool.uv.index]] + name = "pypi" + url = "https://pypi.org/simple/" + publish-url = "https://upload.pypi.org/legacy/" + explicit = true + ``` + +2. **`.github/workflows/python-release.yml`** — uncomment the matching publish step: + + - **Azure Artifacts**: generate a PAT with Packaging > Read & Write scope, store it as `AZURE_ARTIFACTS_TOKEN`, and uncomment the Azure Artifacts block. + - **PyPI**: create an API token at , store it as `PYPI_TOKEN`, and uncomment the PyPI block. ### Documentation From aef698c84d29a14bbdefd9af50b3ba97754a76a8 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 19 Feb 2026 20:51:34 +0000 Subject: [PATCH 82/90] feat: always include docs group in dev install for security checks Co-authored-by: pmalarme <686568+pmalarme@users.noreply.github.com> --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index c36f8b6..be14f2b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -215,7 +215,7 @@ lock-verify = "uv lock --locked" test = "python scripts/run_tasks_in_agents_if_exists.py test" markdown-code-lint = "uv run python scripts/check_md_code_blocks.py README.md docs/manual/*.md agents/**/README.md .github/instructions/*.md" pre-commit-install = "uv run pre-commit install --install-hooks --overwrite" -install = "uv sync --all-packages --all-extras --dev -U --prerelease=if-necessary-or-explicit --no-group=docs" +install = "uv sync --all-packages --all-extras --dev -U --prerelease=if-necessary-or-explicit --group docs" docs = "uv run python scripts/generate_docs.py" docs-install = "uv sync --all-packages --all-extras --dev -U --prerelease=if-necessary-or-explicit --group docs" check = ["lock-verify", "fmt", "lint", "pyright", "mypy", "bandit", "test", "markdown-code-lint"] From ee9fcb60b39d88caf2c488200841892ec7e7bd8a Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 19 Feb 2026 20:56:59 +0000 Subject: [PATCH 83/90] feat: raise validators coverage threshold to 100% with complete edge case tests Co-authored-by: pmalarme <686568+pmalarme@users.noreply.github.com> --- agents/agent1/pyproject.toml | 4 ++-- .../tests/test_blank_string_validator.py | 19 +++++++++++++++++++ 2 files changed, 21 insertions(+), 2 deletions(-) diff --git a/agents/agent1/pyproject.toml b/agents/agent1/pyproject.toml index 3141f2a..a260e2f 100644 --- a/agents/agent1/pyproject.toml +++ b/agents/agent1/pyproject.toml @@ -50,7 +50,7 @@ timeout = 120 extend = "../../pyproject.toml" [tool.coverage.run] -omit = ["**/__init__.py"] +omit = ["**/__init__.py", "**/__main__.py"] [tool.pyright] extends = "../../pyproject.toml" @@ -83,7 +83,7 @@ include = "../../shared_tasks.toml" # Project-specific overrides to scope checks/coverage to this agent; shared tasks remain available for cross-agent runs mypy = "uv run mypy --config-file $POE_ROOT/pyproject.toml src" bandit = "uv run bandit -c pyproject.toml -r src/python_agent_template/agents/agent1 docs/source" -test = "uv run pytest --cov=python_agent_template.agents.agent1 --cov-report=term-missing:skip-covered" +test = "uv run pytest --cov=python_agent_template.agents.agent1 --cov-report=term-missing:skip-covered --cov-fail-under=100" docs = "uv run python ../../scripts/generate_docs.py --agents-only --agents agent1" [build-system] diff --git a/agents/agent1/tests/test_blank_string_validator.py b/agents/agent1/tests/test_blank_string_validator.py index da2bfb5..941e589 100644 --- a/agents/agent1/tests/test_blank_string_validator.py +++ b/agents/agent1/tests/test_blank_string_validator.py @@ -33,6 +33,12 @@ def test_validate_string_is_not_blank_rejects_empty() -> None: validate_string_is_not_blank("", "name") +def test_validate_string_is_not_blank_rejects_whitespace_only() -> None: + """Raises when string contains only whitespace.""" + with pytest.raises(EmptyStringError, match="name"): + validate_string_is_not_blank(" ", "name") + + def test_decorator_raises_missing_parameter() -> None: """Decorator raises when required arg is missing.""" @@ -69,3 +75,16 @@ def greet(first: str, last: str) -> str: greet(" ", "Doe") assert greet("Ada", "Lovelace") == "Ada Lovelace" + + +def test_decorator_with_full_bind() -> None: + """Decorator uses full bind when use_partial_bind=False.""" + + @require_non_blank_strings("first", use_partial_bind=False) # type: ignore[untyped-decorator] + def greet(first: str) -> str: + return f"hi {first}" + + assert greet("Ada") == "hi Ada" + + with pytest.raises(EmptyStringError, match="first"): + greet(" ") From a207e0eabc7f0832eaff281cdabd3ab4cdf7ed26 Mon Sep 17 00:00:00 2001 From: pmalarme Date: Thu, 19 Feb 2026 21:57:33 +0100 Subject: [PATCH 84/90] feat: add create-issue command workflow for PR review comments - Introduced a new workflow to create GitHub issues from PR review comments using the /create-issue command. - Removed the previous PR review comment handler workflow that triaged comments and tagged @copilot for fixes. - Updated security review workflow to change review submission criteria for medium/low issues from APPROVE to COMMENT. - Enhanced README to reflect the new create-issue command and its functionality. - Adjusted documentation to clarify branch protection requirements and workflow processes. --- .github/workflows/copilot-review.lock.yml | 1065 ----------------- .github/workflows/copilot-review.md | 56 - ...lock.yml => create-issue-command.lock.yml} | 101 +- .github/workflows/create-issue-command.md | 87 ++ .../workflows/pr-review-comment-handler.md | 150 --- .github/workflows/security-review.md | 24 +- README.md | 45 +- 7 files changed, 184 insertions(+), 1344 deletions(-) delete mode 100644 .github/workflows/copilot-review.lock.yml delete mode 100644 .github/workflows/copilot-review.md rename .github/workflows/{pr-review-comment-handler.lock.yml => create-issue-command.lock.yml} (92%) create mode 100644 .github/workflows/create-issue-command.md delete mode 100644 .github/workflows/pr-review-comment-handler.md diff --git a/.github/workflows/copilot-review.lock.yml b/.github/workflows/copilot-review.lock.yml deleted file mode 100644 index 86c529d..0000000 --- a/.github/workflows/copilot-review.lock.yml +++ /dev/null @@ -1,1065 +0,0 @@ -# -# ___ _ _ -# / _ \ | | (_) -# | |_| | __ _ ___ _ __ | |_ _ ___ -# | _ |/ _` |/ _ \ '_ \| __| |/ __| -# | | | | (_| | __/ | | | |_| | (__ -# \_| |_/\__, |\___|_| |_|\__|_|\___| -# __/ | -# _ _ |___/ -# | | | | / _| | -# | | | | ___ _ __ _ __| |_| | _____ ____ -# | |/\| |/ _ \ '__| |/ /| _| |/ _ \ \ /\ / / ___| -# \ /\ / (_) | | | | ( | | | | (_) \ V V /\__ \ -# \/ \/ \___/|_| |_|\_\|_| |_|\___/ \_/\_/ |___/ -# -# This file was automatically generated by gh-aw (v0.46.3). DO NOT EDIT. -# -# To update this file, edit the corresponding .md file and run: -# gh aw compile -# Not all edits will cause changes to this file. -# -# For more information: https://github.github.com/gh-aw/introduction/overview/ -# -# Adds Copilot as a reviewer on a pull request after the security review agent approves it. Triggered when any review is submitted; only acts when the review is an approval from the security reviewer. -# -# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"dfaf0658c2e150cf04605979aab0d8c0565c077861dfe5118f012afa69870313","compiler_version":"v0.46.3"} - -name: "Add Copilot Reviewer After Security Approval" -"on": - pull_request_review: - types: - - submitted - -permissions: {} - -concurrency: - group: "gh-aw-${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}" - cancel-in-progress: true - -run-name: "Add Copilot Reviewer After Security Approval" - -jobs: - activation: - needs: pre_activation - if: needs.pre_activation.outputs.activated == 'true' - runs-on: ubuntu-slim - permissions: - contents: read - outputs: - body: ${{ steps.sanitized.outputs.body }} - comment_id: "" - comment_repo: "" - text: ${{ steps.sanitized.outputs.text }} - title: ${{ steps.sanitized.outputs.title }} - steps: - - name: Setup Scripts - uses: github/gh-aw/actions/setup@a70c5eada06553e3510ac27f2c3bda9d3705bccb # v0.46.3 - with: - destination: /opt/gh-aw/actions - - name: Validate context variables - uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 - with: - script: | - const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); - setupGlobals(core, github, context, exec, io); - const { main } = require('/opt/gh-aw/actions/validate_context_variables.cjs'); - await main(); - - name: Checkout .github and .agents folders - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - with: - sparse-checkout: | - .github - .agents - fetch-depth: 1 - persist-credentials: false - - name: Check workflow file timestamps - uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 - env: - GH_AW_WORKFLOW_FILE: "copilot-review.lock.yml" - with: - script: | - const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); - setupGlobals(core, github, context, exec, io); - const { main } = require('/opt/gh-aw/actions/check_workflow_timestamp_api.cjs'); - await main(); - - name: Compute current body text - id: sanitized - uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 - with: - script: | - const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); - setupGlobals(core, github, context, exec, io); - const { main } = require('/opt/gh-aw/actions/compute_text.cjs'); - await main(); - - name: Create prompt with built-in context - env: - GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt - GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }} - GH_AW_GITHUB_ACTOR: ${{ github.actor }} - GH_AW_GITHUB_EVENT_COMMENT_ID: ${{ github.event.comment.id }} - GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER: ${{ github.event.discussion.number }} - GH_AW_GITHUB_EVENT_ISSUE_NUMBER: ${{ github.event.issue.number }} - GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER: ${{ github.event.pull_request.number }} - GH_AW_GITHUB_EVENT_REVIEW_ID: ${{ github.event.review.id }} - GH_AW_GITHUB_REPOSITORY: ${{ github.repository }} - GH_AW_GITHUB_RUN_ID: ${{ github.run_id }} - GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }} - GH_AW_IS_PR_COMMENT: ${{ github.event.issue.pull_request && 'true' || '' }} - run: | - bash /opt/gh-aw/actions/create_prompt_first.sh - cat << 'GH_AW_PROMPT_EOF' > "$GH_AW_PROMPT" - - GH_AW_PROMPT_EOF - cat "/opt/gh-aw/prompts/xpia.md" >> "$GH_AW_PROMPT" - cat "/opt/gh-aw/prompts/temp_folder_prompt.md" >> "$GH_AW_PROMPT" - cat "/opt/gh-aw/prompts/markdown.md" >> "$GH_AW_PROMPT" - cat << 'GH_AW_PROMPT_EOF' >> "$GH_AW_PROMPT" - - GitHub API Access Instructions - - The gh CLI is NOT authenticated. Do NOT use gh commands for GitHub operations. - - - To create or modify GitHub resources (issues, discussions, pull requests, etc.), you MUST call the appropriate safe output tool. Simply writing content will NOT work - the workflow requires actual tool calls. - - Temporary IDs: Some safe output tools support a temporary ID field (usually named temporary_id) so you can reference newly-created items elsewhere in the SAME agent output (for example, using #aw_abc1 in a later body). - - **IMPORTANT - temporary_id format rules:** - - If you DON'T need to reference the item later, OMIT the temporary_id field entirely (it will be auto-generated if needed) - - If you DO need cross-references/chaining, you MUST match this EXACT validation regex: /^aw_[A-Za-z0-9]{3,8}$/i - - Format: aw_ prefix followed by 3 to 8 alphanumeric characters (A-Z, a-z, 0-9, case-insensitive) - - Valid alphanumeric characters: ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789 - - INVALID examples: aw_ab (too short), aw_123456789 (too long), aw_test-id (contains hyphen), aw_id_123 (contains underscore) - - VALID examples: aw_abc, aw_abc1, aw_Test123, aw_A1B2C3D4, aw_12345678 - - To generate valid IDs: use 3-8 random alphanumeric characters or omit the field to let the system auto-generate - - Do NOT invent other aw_* formats — downstream steps will reject them with validation errors matching against /^aw_[A-Za-z0-9]{3,8}$/i. - - Discover available tools from the safeoutputs MCP server. - - **Critical**: Tool calls write structured data that downstream jobs process. Without tool calls, follow-up actions will be skipped. - - **Note**: If you made no other safe output tool calls during this workflow execution, call the "noop" tool to provide a status message indicating completion or that no actions were needed. - - - - The following GitHub context information is available for this workflow: - {{#if __GH_AW_GITHUB_ACTOR__ }} - - **actor**: __GH_AW_GITHUB_ACTOR__ - {{/if}} - {{#if __GH_AW_GITHUB_REPOSITORY__ }} - - **repository**: __GH_AW_GITHUB_REPOSITORY__ - {{/if}} - {{#if __GH_AW_GITHUB_WORKSPACE__ }} - - **workspace**: __GH_AW_GITHUB_WORKSPACE__ - {{/if}} - {{#if __GH_AW_GITHUB_EVENT_ISSUE_NUMBER__ }} - - **issue-number**: #__GH_AW_GITHUB_EVENT_ISSUE_NUMBER__ - {{/if}} - {{#if __GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER__ }} - - **discussion-number**: #__GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER__ - {{/if}} - {{#if __GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER__ }} - - **pull-request-number**: #__GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER__ - {{/if}} - {{#if __GH_AW_GITHUB_EVENT_COMMENT_ID__ }} - - **comment-id**: __GH_AW_GITHUB_EVENT_COMMENT_ID__ - {{/if}} - {{#if __GH_AW_GITHUB_RUN_ID__ }} - - **workflow-run-id**: __GH_AW_GITHUB_RUN_ID__ - {{/if}} - - - GH_AW_PROMPT_EOF - if [ "$GITHUB_EVENT_NAME" = "issue_comment" ] && [ -n "$GH_AW_IS_PR_COMMENT" ] || [ "$GITHUB_EVENT_NAME" = "pull_request_review_comment" ] || [ "$GITHUB_EVENT_NAME" = "pull_request_review" ]; then - cat "/opt/gh-aw/prompts/pr_context_prompt.md" >> "$GH_AW_PROMPT" - fi - cat << 'GH_AW_PROMPT_EOF' >> "$GH_AW_PROMPT" - - GH_AW_PROMPT_EOF - cat << 'GH_AW_PROMPT_EOF' >> "$GH_AW_PROMPT" - {{#runtime-import .github/workflows/copilot-review.md}} - GH_AW_PROMPT_EOF - - name: Interpolate variables and render templates - uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 - env: - GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt - GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER: ${{ github.event.pull_request.number }} - GH_AW_GITHUB_EVENT_REVIEW_ID: ${{ github.event.review.id }} - with: - script: | - const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); - setupGlobals(core, github, context, exec, io); - const { main } = require('/opt/gh-aw/actions/interpolate_prompt.cjs'); - await main(); - - name: Substitute placeholders - uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 - env: - GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt - GH_AW_GITHUB_ACTOR: ${{ github.actor }} - GH_AW_GITHUB_EVENT_COMMENT_ID: ${{ github.event.comment.id }} - GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER: ${{ github.event.discussion.number }} - GH_AW_GITHUB_EVENT_ISSUE_NUMBER: ${{ github.event.issue.number }} - GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER: ${{ github.event.pull_request.number }} - GH_AW_GITHUB_EVENT_REVIEW_ID: ${{ github.event.review.id }} - GH_AW_GITHUB_REPOSITORY: ${{ github.repository }} - GH_AW_GITHUB_RUN_ID: ${{ github.run_id }} - GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }} - GH_AW_IS_PR_COMMENT: ${{ github.event.issue.pull_request && 'true' || '' }} - GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_ACTIVATED: ${{ needs.pre_activation.outputs.activated }} - GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_MATCHED_COMMAND: ${{ needs.pre_activation.outputs.matched_command }} - with: - script: | - const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); - setupGlobals(core, github, context, exec, io); - - const substitutePlaceholders = require('/opt/gh-aw/actions/substitute_placeholders.cjs'); - - // Call the substitution function - return await substitutePlaceholders({ - file: process.env.GH_AW_PROMPT, - substitutions: { - GH_AW_GITHUB_ACTOR: process.env.GH_AW_GITHUB_ACTOR, - GH_AW_GITHUB_EVENT_COMMENT_ID: process.env.GH_AW_GITHUB_EVENT_COMMENT_ID, - GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER: process.env.GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER, - GH_AW_GITHUB_EVENT_ISSUE_NUMBER: process.env.GH_AW_GITHUB_EVENT_ISSUE_NUMBER, - GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER: process.env.GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER, - GH_AW_GITHUB_EVENT_REVIEW_ID: process.env.GH_AW_GITHUB_EVENT_REVIEW_ID, - GH_AW_GITHUB_REPOSITORY: process.env.GH_AW_GITHUB_REPOSITORY, - GH_AW_GITHUB_RUN_ID: process.env.GH_AW_GITHUB_RUN_ID, - GH_AW_GITHUB_WORKSPACE: process.env.GH_AW_GITHUB_WORKSPACE, - GH_AW_IS_PR_COMMENT: process.env.GH_AW_IS_PR_COMMENT, - GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_ACTIVATED: process.env.GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_ACTIVATED, - GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_MATCHED_COMMAND: process.env.GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_MATCHED_COMMAND - } - }); - - name: Validate prompt placeholders - env: - GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt - run: bash /opt/gh-aw/actions/validate_prompt_placeholders.sh - - name: Print prompt - env: - GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt - run: bash /opt/gh-aw/actions/print_prompt_summary.sh - - name: Upload prompt artifact - if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 - with: - name: prompt - path: /tmp/gh-aw/aw-prompts/prompt.txt - retention-days: 1 - - agent: - needs: activation - runs-on: ubuntu-latest - permissions: - contents: read - pull-requests: read - env: - DEFAULT_BRANCH: ${{ github.event.repository.default_branch }} - GH_AW_ASSETS_ALLOWED_EXTS: "" - GH_AW_ASSETS_BRANCH: "" - GH_AW_ASSETS_MAX_SIZE_KB: 0 - GH_AW_MCP_LOG_DIR: /tmp/gh-aw/mcp-logs/safeoutputs - GH_AW_SAFE_OUTPUTS: /opt/gh-aw/safeoutputs/outputs.jsonl - GH_AW_SAFE_OUTPUTS_CONFIG_PATH: /opt/gh-aw/safeoutputs/config.json - GH_AW_SAFE_OUTPUTS_TOOLS_PATH: /opt/gh-aw/safeoutputs/tools.json - GH_AW_WORKFLOW_ID_SANITIZED: copilotreview - outputs: - checkout_pr_success: ${{ steps.checkout-pr.outputs.checkout_pr_success || 'true' }} - has_patch: ${{ steps.collect_output.outputs.has_patch }} - model: ${{ steps.generate_aw_info.outputs.model }} - output: ${{ steps.collect_output.outputs.output }} - output_types: ${{ steps.collect_output.outputs.output_types }} - secret_verification_result: ${{ steps.validate-secret.outputs.verification_result }} - steps: - - name: Setup Scripts - uses: github/gh-aw/actions/setup@a70c5eada06553e3510ac27f2c3bda9d3705bccb # v0.46.3 - with: - destination: /opt/gh-aw/actions - - name: Checkout repository - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - with: - persist-credentials: false - - name: Create gh-aw temp directory - run: bash /opt/gh-aw/actions/create_gh_aw_tmp_dir.sh - - name: Configure Git credentials - env: - REPO_NAME: ${{ github.repository }} - SERVER_URL: ${{ github.server_url }} - run: | - git config --global user.email "github-actions[bot]@users.noreply.github.com" - git config --global user.name "github-actions[bot]" - # Re-authenticate git with GitHub token - SERVER_URL_STRIPPED="${SERVER_URL#https://}" - git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git" - echo "Git configured with standard GitHub Actions identity" - - name: Checkout PR branch - id: checkout-pr - if: | - github.event.pull_request - uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 - env: - GH_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} - with: - github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} - script: | - const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); - setupGlobals(core, github, context, exec, io); - const { main } = require('/opt/gh-aw/actions/checkout_pr_branch.cjs'); - await main(); - - name: Generate agentic run info - id: generate_aw_info - uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 - with: - script: | - const fs = require('fs'); - - const awInfo = { - engine_id: "copilot", - engine_name: "GitHub Copilot CLI", - model: process.env.GH_AW_MODEL_AGENT_COPILOT || "", - version: "", - agent_version: "0.0.411", - cli_version: "v0.46.3", - workflow_name: "Add Copilot Reviewer After Security Approval", - experimental: false, - supports_tools_allowlist: true, - run_id: context.runId, - run_number: context.runNumber, - run_attempt: process.env.GITHUB_RUN_ATTEMPT, - repository: context.repo.owner + '/' + context.repo.repo, - ref: context.ref, - sha: context.sha, - actor: context.actor, - event_name: context.eventName, - staged: false, - allowed_domains: ["defaults"], - firewall_enabled: true, - awf_version: "v0.20.0", - awmg_version: "v0.1.4", - steps: { - firewall: "squid" - }, - created_at: new Date().toISOString() - }; - - // Write to /tmp/gh-aw directory to avoid inclusion in PR - const tmpPath = '/tmp/gh-aw/aw_info.json'; - fs.writeFileSync(tmpPath, JSON.stringify(awInfo, null, 2)); - console.log('Generated aw_info.json at:', tmpPath); - console.log(JSON.stringify(awInfo, null, 2)); - - // Set model as output for reuse in other steps/jobs - core.setOutput('model', awInfo.model); - - name: Validate COPILOT_GITHUB_TOKEN secret - id: validate-secret - run: /opt/gh-aw/actions/validate_multi_secret.sh COPILOT_GITHUB_TOKEN 'GitHub Copilot CLI' https://github.github.com/gh-aw/reference/engines/#github-copilot-default - env: - COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }} - - name: Install GitHub Copilot CLI - run: /opt/gh-aw/actions/install_copilot_cli.sh 0.0.411 - - name: Install awf binary - run: bash /opt/gh-aw/actions/install_awf_binary.sh v0.20.0 - - name: Determine automatic lockdown mode for GitHub MCP Server - id: determine-automatic-lockdown - uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 - env: - GH_AW_GITHUB_TOKEN: ${{ secrets.GH_AW_GITHUB_TOKEN }} - GH_AW_GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN }} - with: - script: | - const determineAutomaticLockdown = require('/opt/gh-aw/actions/determine_automatic_lockdown.cjs'); - await determineAutomaticLockdown(github, context, core); - - name: Download container images - run: bash /opt/gh-aw/actions/download_docker_images.sh ghcr.io/github/gh-aw-firewall/agent:0.20.0 ghcr.io/github/gh-aw-firewall/api-proxy:0.20.0 ghcr.io/github/gh-aw-firewall/squid:0.20.0 ghcr.io/github/gh-aw-mcpg:v0.1.4 ghcr.io/github/github-mcp-server:v0.30.3 node:lts-alpine - - name: Write Safe Outputs Config - run: | - mkdir -p /opt/gh-aw/safeoutputs - mkdir -p /tmp/gh-aw/safeoutputs - mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs - cat > /opt/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_EOF' - {"add_reviewer":{"max":1,"reviewers":["copilot"]},"missing_data":{},"missing_tool":{},"noop":{"max":1}} - GH_AW_SAFE_OUTPUTS_CONFIG_EOF - cat > /opt/gh-aw/safeoutputs/tools.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_EOF' - [ - { - "description": "Add reviewers to a GitHub pull request. Reviewers receive notifications and can approve or request changes. Use 'copilot' as a reviewer name to request the Copilot PR review bot. CONSTRAINTS: Maximum 1 reviewer(s) can be added.", - "inputSchema": { - "additionalProperties": false, - "properties": { - "pull_request_number": { - "description": "Pull request number to add reviewers to. This is the numeric ID from the GitHub URL (e.g., 876 in github.com/owner/repo/pull/876). If omitted, adds reviewers to the PR that triggered this workflow.", - "type": [ - "number", - "string" - ] - }, - "reviewers": { - "description": "GitHub usernames to add as reviewers (e.g., ['octocat', 'copilot']). Users must have access to the repository.", - "items": { - "type": "string" - }, - "type": "array" - } - }, - "required": [ - "reviewers" - ], - "type": "object" - }, - "name": "add_reviewer" - }, - { - "description": "Report that a tool or capability needed to complete the task is not available, or share any information you deem important about missing functionality or limitations. Use this when you cannot accomplish what was requested because the required functionality is missing or access is restricted.", - "inputSchema": { - "additionalProperties": false, - "properties": { - "alternatives": { - "description": "Any workarounds, manual steps, or alternative approaches the user could take (max 256 characters).", - "type": "string" - }, - "reason": { - "description": "Explanation of why this tool is needed or what information you want to share about the limitation (max 256 characters).", - "type": "string" - }, - "tool": { - "description": "Optional: Name or description of the missing tool or capability (max 128 characters). Be specific about what functionality is needed.", - "type": "string" - } - }, - "required": [ - "reason" - ], - "type": "object" - }, - "name": "missing_tool" - }, - { - "description": "Log a transparency message when no significant actions are needed. Use this to confirm workflow completion and provide visibility when analysis is complete but no changes or outputs are required (e.g., 'No issues found', 'All checks passed'). This ensures the workflow produces human-visible output even when no other actions are taken.", - "inputSchema": { - "additionalProperties": false, - "properties": { - "message": { - "description": "Status or completion message to log. Should explain what was analyzed and the outcome (e.g., 'Code review complete - no issues found', 'Analysis complete - all tests passing').", - "type": "string" - } - }, - "required": [ - "message" - ], - "type": "object" - }, - "name": "noop" - }, - { - "description": "Report that data or information needed to complete the task is not available. Use this when you cannot accomplish what was requested because required data, context, or information is missing.", - "inputSchema": { - "additionalProperties": false, - "properties": { - "alternatives": { - "description": "Any workarounds, manual steps, or alternative approaches the user could take (max 256 characters).", - "type": "string" - }, - "context": { - "description": "Additional context about the missing data or where it should come from (max 256 characters).", - "type": "string" - }, - "data_type": { - "description": "Type or description of the missing data or information (max 128 characters). Be specific about what data is needed.", - "type": "string" - }, - "reason": { - "description": "Explanation of why this data is needed to complete the task (max 256 characters).", - "type": "string" - } - }, - "required": [], - "type": "object" - }, - "name": "missing_data" - } - ] - GH_AW_SAFE_OUTPUTS_TOOLS_EOF - cat > /opt/gh-aw/safeoutputs/validation.json << 'GH_AW_SAFE_OUTPUTS_VALIDATION_EOF' - { - "add_reviewer": { - "defaultMax": 3, - "fields": { - "pull_request_number": { - "issueOrPRNumber": true - }, - "reviewers": { - "required": true, - "type": "array", - "itemType": "string", - "itemSanitize": true, - "itemMaxLength": 39 - } - } - }, - "missing_tool": { - "defaultMax": 20, - "fields": { - "alternatives": { - "type": "string", - "sanitize": true, - "maxLength": 512 - }, - "reason": { - "required": true, - "type": "string", - "sanitize": true, - "maxLength": 256 - }, - "tool": { - "type": "string", - "sanitize": true, - "maxLength": 128 - } - } - }, - "noop": { - "defaultMax": 1, - "fields": { - "message": { - "required": true, - "type": "string", - "sanitize": true, - "maxLength": 65000 - } - } - } - } - GH_AW_SAFE_OUTPUTS_VALIDATION_EOF - - name: Generate Safe Outputs MCP Server Config - id: safe-outputs-config - run: | - # Generate a secure random API key (360 bits of entropy, 40+ chars) - # Mask immediately to prevent timing vulnerabilities - API_KEY=$(openssl rand -base64 45 | tr -d '/+=') - echo "::add-mask::${API_KEY}" - - PORT=3001 - - # Set outputs for next steps - { - echo "safe_outputs_api_key=${API_KEY}" - echo "safe_outputs_port=${PORT}" - } >> "$GITHUB_OUTPUT" - - echo "Safe Outputs MCP server will run on port ${PORT}" - - - name: Start Safe Outputs MCP HTTP Server - id: safe-outputs-start - env: - DEBUG: '*' - GH_AW_SAFE_OUTPUTS_PORT: ${{ steps.safe-outputs-config.outputs.safe_outputs_port }} - GH_AW_SAFE_OUTPUTS_API_KEY: ${{ steps.safe-outputs-config.outputs.safe_outputs_api_key }} - GH_AW_SAFE_OUTPUTS_TOOLS_PATH: /opt/gh-aw/safeoutputs/tools.json - GH_AW_SAFE_OUTPUTS_CONFIG_PATH: /opt/gh-aw/safeoutputs/config.json - GH_AW_MCP_LOG_DIR: /tmp/gh-aw/mcp-logs/safeoutputs - run: | - # Environment variables are set above to prevent template injection - export DEBUG - export GH_AW_SAFE_OUTPUTS_PORT - export GH_AW_SAFE_OUTPUTS_API_KEY - export GH_AW_SAFE_OUTPUTS_TOOLS_PATH - export GH_AW_SAFE_OUTPUTS_CONFIG_PATH - export GH_AW_MCP_LOG_DIR - - bash /opt/gh-aw/actions/start_safe_outputs_server.sh - - - name: Start MCP Gateway - id: start-mcp-gateway - env: - GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }} - GH_AW_SAFE_OUTPUTS_API_KEY: ${{ steps.safe-outputs-start.outputs.api_key }} - GH_AW_SAFE_OUTPUTS_PORT: ${{ steps.safe-outputs-start.outputs.port }} - GITHUB_MCP_LOCKDOWN: ${{ steps.determine-automatic-lockdown.outputs.lockdown == 'true' && '1' || '0' }} - GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} - run: | - set -eo pipefail - mkdir -p /tmp/gh-aw/mcp-config - - # Export gateway environment variables for MCP config and gateway script - export MCP_GATEWAY_PORT="80" - export MCP_GATEWAY_DOMAIN="host.docker.internal" - MCP_GATEWAY_API_KEY=$(openssl rand -base64 45 | tr -d '/+=') - echo "::add-mask::${MCP_GATEWAY_API_KEY}" - export MCP_GATEWAY_API_KEY - export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" - mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" - export DEBUG="*" - - export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.4' - - mkdir -p /home/runner/.copilot - cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh - { - "mcpServers": { - "github": { - "type": "stdio", - "container": "ghcr.io/github/github-mcp-server:v0.30.3", - "env": { - "GITHUB_LOCKDOWN_MODE": "$GITHUB_MCP_LOCKDOWN", - "GITHUB_PERSONAL_ACCESS_TOKEN": "\${GITHUB_MCP_SERVER_TOKEN}", - "GITHUB_READ_ONLY": "1", - "GITHUB_TOOLSETS": "repos,pull_requests" - } - }, - "safeoutputs": { - "type": "http", - "url": "http://host.docker.internal:$GH_AW_SAFE_OUTPUTS_PORT", - "headers": { - "Authorization": "\${GH_AW_SAFE_OUTPUTS_API_KEY}" - } - } - }, - "gateway": { - "port": $MCP_GATEWAY_PORT, - "domain": "${MCP_GATEWAY_DOMAIN}", - "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" - } - } - GH_AW_MCP_CONFIG_EOF - - name: Generate workflow overview - uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 - with: - script: | - const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); - await generateWorkflowOverview(core); - - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 - with: - name: prompt - path: /tmp/gh-aw/aw-prompts - - name: Clean git credentials - run: bash /opt/gh-aw/actions/clean_git_credentials.sh - - name: Execute GitHub Copilot CLI - id: agentic_execution - # Copilot CLI tool arguments (sorted): - timeout-minutes: 20 - run: | - set -o pipefail - sudo -E awf --env-all --container-workdir "${GITHUB_WORKSPACE}" --allow-domains api.business.githubcopilot.com,api.enterprise.githubcopilot.com,api.github.com,api.githubcopilot.com,api.individual.githubcopilot.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,github.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,ppa.launchpad.net,raw.githubusercontent.com,registry.npmjs.org,s.symcb.com,s.symcd.com,security.ubuntu.com,telemetry.enterprise.githubcopilot.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --enable-host-access --image-tag 0.20.0 --skip-pull --enable-api-proxy \ - -- /bin/bash -c '/usr/local/bin/copilot --add-dir /tmp/gh-aw/ --log-level all --log-dir /tmp/gh-aw/sandbox/agent/logs/ --add-dir "${GITHUB_WORKSPACE}" --disable-builtin-mcps --allow-all-tools --allow-all-paths --share /tmp/gh-aw/sandbox/agent/logs/conversation.md --prompt "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"${GH_AW_MODEL_AGENT_COPILOT:+ --model "$GH_AW_MODEL_AGENT_COPILOT"}' 2>&1 | tee -a /tmp/gh-aw/agent-stdio.log - env: - COPILOT_AGENT_RUNNER_TYPE: STANDALONE - COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }} - GH_AW_MCP_CONFIG: /home/runner/.copilot/mcp-config.json - GH_AW_MODEL_AGENT_COPILOT: ${{ vars.GH_AW_MODEL_AGENT_COPILOT || '' }} - GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt - GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }} - GITHUB_HEAD_REF: ${{ github.head_ref }} - GITHUB_REF_NAME: ${{ github.ref_name }} - GITHUB_STEP_SUMMARY: ${{ env.GITHUB_STEP_SUMMARY }} - GITHUB_WORKSPACE: ${{ github.workspace }} - XDG_CONFIG_HOME: /home/runner - - name: Configure Git credentials - env: - REPO_NAME: ${{ github.repository }} - SERVER_URL: ${{ github.server_url }} - run: | - git config --global user.email "github-actions[bot]@users.noreply.github.com" - git config --global user.name "github-actions[bot]" - # Re-authenticate git with GitHub token - SERVER_URL_STRIPPED="${SERVER_URL#https://}" - git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git" - echo "Git configured with standard GitHub Actions identity" - - name: Copy Copilot session state files to logs - if: always() - continue-on-error: true - run: | - # Copy Copilot session state files to logs folder for artifact collection - # This ensures they are in /tmp/gh-aw/ where secret redaction can scan them - SESSION_STATE_DIR="$HOME/.copilot/session-state" - LOGS_DIR="/tmp/gh-aw/sandbox/agent/logs" - - if [ -d "$SESSION_STATE_DIR" ]; then - echo "Copying Copilot session state files from $SESSION_STATE_DIR to $LOGS_DIR" - mkdir -p "$LOGS_DIR" - cp -v "$SESSION_STATE_DIR"/*.jsonl "$LOGS_DIR/" 2>/dev/null || true - echo "Session state files copied successfully" - else - echo "No session-state directory found at $SESSION_STATE_DIR" - fi - - name: Stop MCP Gateway - if: always() - continue-on-error: true - env: - MCP_GATEWAY_PORT: ${{ steps.start-mcp-gateway.outputs.gateway-port }} - MCP_GATEWAY_API_KEY: ${{ steps.start-mcp-gateway.outputs.gateway-api-key }} - GATEWAY_PID: ${{ steps.start-mcp-gateway.outputs.gateway-pid }} - run: | - bash /opt/gh-aw/actions/stop_mcp_gateway.sh "$GATEWAY_PID" - - name: Redact secrets in logs - if: always() - uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 - with: - script: | - const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); - setupGlobals(core, github, context, exec, io); - const { main } = require('/opt/gh-aw/actions/redact_secrets.cjs'); - await main(); - env: - GH_AW_SECRET_NAMES: 'COPILOT_GITHUB_TOKEN,GH_AW_GITHUB_MCP_SERVER_TOKEN,GH_AW_GITHUB_TOKEN,GITHUB_TOKEN' - SECRET_COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }} - SECRET_GH_AW_GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN }} - SECRET_GH_AW_GITHUB_TOKEN: ${{ secrets.GH_AW_GITHUB_TOKEN }} - SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: Upload Safe Outputs - if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 - with: - name: safe-output - path: ${{ env.GH_AW_SAFE_OUTPUTS }} - if-no-files-found: warn - - name: Ingest agent output - id: collect_output - if: always() - uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 - env: - GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }} - GH_AW_ALLOWED_DOMAINS: "api.business.githubcopilot.com,api.enterprise.githubcopilot.com,api.github.com,api.githubcopilot.com,api.individual.githubcopilot.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,github.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,ppa.launchpad.net,raw.githubusercontent.com,registry.npmjs.org,s.symcb.com,s.symcd.com,security.ubuntu.com,telemetry.enterprise.githubcopilot.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com" - GITHUB_SERVER_URL: ${{ github.server_url }} - GITHUB_API_URL: ${{ github.api_url }} - with: - script: | - const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); - setupGlobals(core, github, context, exec, io); - const { main } = require('/opt/gh-aw/actions/collect_ndjson_output.cjs'); - await main(); - - name: Upload sanitized agent output - if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 - with: - name: agent-output - path: ${{ env.GH_AW_AGENT_OUTPUT }} - if-no-files-found: warn - - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 - with: - name: agent_outputs - path: | - /tmp/gh-aw/sandbox/agent/logs/ - /tmp/gh-aw/redacted-urls.log - if-no-files-found: ignore - - name: Parse agent logs for step summary - if: always() - uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 - env: - GH_AW_AGENT_OUTPUT: /tmp/gh-aw/sandbox/agent/logs/ - with: - script: | - const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); - setupGlobals(core, github, context, exec, io); - const { main } = require('/opt/gh-aw/actions/parse_copilot_log.cjs'); - await main(); - - name: Parse MCP Gateway logs for step summary - if: always() - uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 - with: - script: | - const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); - setupGlobals(core, github, context, exec, io); - const { main } = require('/opt/gh-aw/actions/parse_mcp_gateway_log.cjs'); - await main(); - - name: Print firewall logs - if: always() - continue-on-error: true - env: - AWF_LOGS_DIR: /tmp/gh-aw/sandbox/firewall/logs - run: | - # Fix permissions on firewall logs so they can be uploaded as artifacts - # AWF runs with sudo, creating files owned by root - sudo chmod -R a+r /tmp/gh-aw/sandbox/firewall/logs 2>/dev/null || true - # Only run awf logs summary if awf command exists (it may not be installed if workflow failed before install step) - if command -v awf &> /dev/null; then - awf logs summary | tee -a "$GITHUB_STEP_SUMMARY" - else - echo 'AWF binary not installed, skipping firewall log summary' - fi - - name: Upload agent artifacts - if: always() - continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 - with: - name: agent-artifacts - path: | - /tmp/gh-aw/aw-prompts/prompt.txt - /tmp/gh-aw/aw_info.json - /tmp/gh-aw/mcp-logs/ - /tmp/gh-aw/sandbox/firewall/logs/ - /tmp/gh-aw/agent-stdio.log - /tmp/gh-aw/agent/ - if-no-files-found: ignore - - conclusion: - needs: - - activation - - agent - - detection - - safe_outputs - if: (always()) && (needs.agent.result != 'skipped') - runs-on: ubuntu-slim - permissions: - contents: read - pull-requests: write - outputs: - noop_message: ${{ steps.noop.outputs.noop_message }} - tools_reported: ${{ steps.missing_tool.outputs.tools_reported }} - total_count: ${{ steps.missing_tool.outputs.total_count }} - steps: - - name: Setup Scripts - uses: github/gh-aw/actions/setup@a70c5eada06553e3510ac27f2c3bda9d3705bccb # v0.46.3 - with: - destination: /opt/gh-aw/actions - - name: Download agent output artifact - continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 - with: - name: agent-output - path: /tmp/gh-aw/safeoutputs/ - - name: Setup agent output environment variable - run: | - mkdir -p /tmp/gh-aw/safeoutputs/ - find "/tmp/gh-aw/safeoutputs/" -type f -print - echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV" - - name: Process No-Op Messages - id: noop - uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 - env: - GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }} - GH_AW_NOOP_MAX: 1 - GH_AW_WORKFLOW_NAME: "Add Copilot Reviewer After Security Approval" - with: - github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} - script: | - const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); - setupGlobals(core, github, context, exec, io); - const { main } = require('/opt/gh-aw/actions/noop.cjs'); - await main(); - - name: Record Missing Tool - id: missing_tool - uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 - env: - GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }} - GH_AW_WORKFLOW_NAME: "Add Copilot Reviewer After Security Approval" - with: - github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} - script: | - const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); - setupGlobals(core, github, context, exec, io); - const { main } = require('/opt/gh-aw/actions/missing_tool.cjs'); - await main(); - - name: Handle Agent Failure - id: handle_agent_failure - uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 - env: - GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }} - GH_AW_WORKFLOW_NAME: "Add Copilot Reviewer After Security Approval" - GH_AW_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} - GH_AW_AGENT_CONCLUSION: ${{ needs.agent.result }} - GH_AW_WORKFLOW_ID: "copilot-review" - GH_AW_SECRET_VERIFICATION_RESULT: ${{ needs.agent.outputs.secret_verification_result }} - GH_AW_CHECKOUT_PR_SUCCESS: ${{ needs.agent.outputs.checkout_pr_success }} - GH_AW_GROUP_REPORTS: "false" - with: - github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} - script: | - const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); - setupGlobals(core, github, context, exec, io); - const { main } = require('/opt/gh-aw/actions/handle_agent_failure.cjs'); - await main(); - - name: Handle No-Op Message - id: handle_noop_message - uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 - env: - GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }} - GH_AW_WORKFLOW_NAME: "Add Copilot Reviewer After Security Approval" - GH_AW_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} - GH_AW_AGENT_CONCLUSION: ${{ needs.agent.result }} - GH_AW_NOOP_MESSAGE: ${{ steps.noop.outputs.noop_message }} - GH_AW_NOOP_REPORT_AS_ISSUE: "true" - with: - github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} - script: | - const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); - setupGlobals(core, github, context, exec, io); - const { main } = require('/opt/gh-aw/actions/handle_noop_message.cjs'); - await main(); - - detection: - needs: agent - if: needs.agent.outputs.output_types != '' || needs.agent.outputs.has_patch == 'true' - runs-on: ubuntu-latest - permissions: {} - timeout-minutes: 10 - outputs: - success: ${{ steps.parse_results.outputs.success }} - steps: - - name: Setup Scripts - uses: github/gh-aw/actions/setup@a70c5eada06553e3510ac27f2c3bda9d3705bccb # v0.46.3 - with: - destination: /opt/gh-aw/actions - - name: Download agent artifacts - continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 - with: - name: agent-artifacts - path: /tmp/gh-aw/threat-detection/ - - name: Download agent output artifact - continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 - with: - name: agent-output - path: /tmp/gh-aw/threat-detection/ - - name: Echo agent output types - env: - AGENT_OUTPUT_TYPES: ${{ needs.agent.outputs.output_types }} - run: | - echo "Agent output-types: $AGENT_OUTPUT_TYPES" - - name: Setup threat detection - uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 - env: - WORKFLOW_NAME: "Add Copilot Reviewer After Security Approval" - WORKFLOW_DESCRIPTION: "Adds Copilot as a reviewer on a pull request after the security review agent approves it. Triggered when any review is submitted; only acts when the review is an approval from the security reviewer." - HAS_PATCH: ${{ needs.agent.outputs.has_patch }} - with: - script: | - const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); - setupGlobals(core, github, context, exec, io); - const { main } = require('/opt/gh-aw/actions/setup_threat_detection.cjs'); - await main(); - - name: Ensure threat-detection directory and log - run: | - mkdir -p /tmp/gh-aw/threat-detection - touch /tmp/gh-aw/threat-detection/detection.log - - name: Validate COPILOT_GITHUB_TOKEN secret - id: validate-secret - run: /opt/gh-aw/actions/validate_multi_secret.sh COPILOT_GITHUB_TOKEN 'GitHub Copilot CLI' https://github.github.com/gh-aw/reference/engines/#github-copilot-default - env: - COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }} - - name: Install GitHub Copilot CLI - run: /opt/gh-aw/actions/install_copilot_cli.sh 0.0.411 - - name: Execute GitHub Copilot CLI - id: agentic_execution - # Copilot CLI tool arguments (sorted): - # --allow-tool shell(cat) - # --allow-tool shell(grep) - # --allow-tool shell(head) - # --allow-tool shell(jq) - # --allow-tool shell(ls) - # --allow-tool shell(tail) - # --allow-tool shell(wc) - timeout-minutes: 20 - run: | - set -o pipefail - COPILOT_CLI_INSTRUCTION="$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" - mkdir -p /tmp/ - mkdir -p /tmp/gh-aw/ - mkdir -p /tmp/gh-aw/agent/ - mkdir -p /tmp/gh-aw/sandbox/agent/logs/ - copilot --add-dir /tmp/ --add-dir /tmp/gh-aw/ --add-dir /tmp/gh-aw/agent/ --log-level all --log-dir /tmp/gh-aw/sandbox/agent/logs/ --disable-builtin-mcps --allow-tool 'shell(cat)' --allow-tool 'shell(grep)' --allow-tool 'shell(head)' --allow-tool 'shell(jq)' --allow-tool 'shell(ls)' --allow-tool 'shell(tail)' --allow-tool 'shell(wc)' --share /tmp/gh-aw/sandbox/agent/logs/conversation.md --prompt "$COPILOT_CLI_INSTRUCTION"${GH_AW_MODEL_DETECTION_COPILOT:+ --model "$GH_AW_MODEL_DETECTION_COPILOT"} 2>&1 | tee /tmp/gh-aw/threat-detection/detection.log - env: - COPILOT_AGENT_RUNNER_TYPE: STANDALONE - COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }} - GH_AW_MODEL_DETECTION_COPILOT: ${{ vars.GH_AW_MODEL_DETECTION_COPILOT || '' }} - GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt - GITHUB_HEAD_REF: ${{ github.head_ref }} - GITHUB_REF_NAME: ${{ github.ref_name }} - GITHUB_STEP_SUMMARY: ${{ env.GITHUB_STEP_SUMMARY }} - GITHUB_WORKSPACE: ${{ github.workspace }} - XDG_CONFIG_HOME: /home/runner - - name: Parse threat detection results - id: parse_results - uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 - with: - script: | - const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); - setupGlobals(core, github, context, exec, io); - const { main } = require('/opt/gh-aw/actions/parse_threat_detection_results.cjs'); - await main(); - - name: Upload threat detection log - if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 - with: - name: threat-detection.log - path: /tmp/gh-aw/threat-detection/detection.log - if-no-files-found: ignore - - pre_activation: - runs-on: ubuntu-slim - outputs: - activated: ${{ steps.check_membership.outputs.is_team_member == 'true' }} - steps: - - name: Setup Scripts - uses: github/gh-aw/actions/setup@a70c5eada06553e3510ac27f2c3bda9d3705bccb # v0.46.3 - with: - destination: /opt/gh-aw/actions - - name: Check team membership for workflow - id: check_membership - uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 - env: - GH_AW_REQUIRED_ROLES: admin,maintainer,write - with: - github-token: ${{ secrets.GITHUB_TOKEN }} - script: | - const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); - setupGlobals(core, github, context, exec, io); - const { main } = require('/opt/gh-aw/actions/check_membership.cjs'); - await main(); - - safe_outputs: - needs: - - agent - - detection - if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (needs.detection.outputs.success == 'true') - runs-on: ubuntu-slim - permissions: - contents: read - pull-requests: write - timeout-minutes: 15 - env: - GH_AW_ENGINE_ID: "copilot" - GH_AW_WORKFLOW_ID: "copilot-review" - GH_AW_WORKFLOW_NAME: "Add Copilot Reviewer After Security Approval" - outputs: - add_reviewer_reviewers_added: ${{ steps.process_safe_outputs.outputs.reviewers_added }} - create_discussion_error_count: ${{ steps.process_safe_outputs.outputs.create_discussion_error_count }} - create_discussion_errors: ${{ steps.process_safe_outputs.outputs.create_discussion_errors }} - process_safe_outputs_processed_count: ${{ steps.process_safe_outputs.outputs.processed_count }} - process_safe_outputs_temporary_id_map: ${{ steps.process_safe_outputs.outputs.temporary_id_map }} - steps: - - name: Setup Scripts - uses: github/gh-aw/actions/setup@a70c5eada06553e3510ac27f2c3bda9d3705bccb # v0.46.3 - with: - destination: /opt/gh-aw/actions - - name: Download agent output artifact - continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 - with: - name: agent-output - path: /tmp/gh-aw/safeoutputs/ - - name: Setup agent output environment variable - run: | - mkdir -p /tmp/gh-aw/safeoutputs/ - find "/tmp/gh-aw/safeoutputs/" -type f -print - echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV" - - name: Process Safe Outputs - id: process_safe_outputs - uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 - env: - GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }} - GH_AW_SAFE_OUTPUTS_HANDLER_CONFIG: "{\"add_reviewer\":{\"allowed\":[\"copilot\"],\"max\":1,\"target\":\"triggering\"},\"missing_data\":{},\"missing_tool\":{}}" - with: - github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} - script: | - const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); - setupGlobals(core, github, context, exec, io); - const { main } = require('/opt/gh-aw/actions/safe_output_handler_manager.cjs'); - await main(); diff --git a/.github/workflows/copilot-review.md b/.github/workflows/copilot-review.md deleted file mode 100644 index da058a7..0000000 --- a/.github/workflows/copilot-review.md +++ /dev/null @@ -1,56 +0,0 @@ ---- -description: Adds Copilot as a reviewer on a pull request after the security review - agent approves it. Triggered when any review is submitted; only acts when the - review is an approval from the security reviewer. - -on: - pull_request_review: - types: [submitted] - -permissions: - contents: read - pull-requests: read - -tools: - github: - toolsets: [repos, pull_requests] - -safe-outputs: - add-reviewer: - reviewers: [copilot] - max: 1 - target: "triggering" - github-token: ${{ secrets.GH_AW_AGENT_TOKEN }} - noop: - max: 1 ---- - -# Add Copilot Reviewer After Security Approval - -A review was just submitted on pull request -#${{ github.event.pull_request.number }}. - -## Instructions - -1. **Fetch the review that was just submitted.** Use the GitHub API to get the - details of the review that triggered this workflow (review ID: - ${{ github.event.review.id }}) on PR - #${{ github.event.pull_request.number }}. - -2. **Check if this is a security review approval.** Determine whether: - - The review was submitted by the security review bot (look for a user - whose login contains "github-actions" or whose review body references - security review categories / security posture analysis). - - The review state is `APPROVED`. - - If **both** conditions are met, proceed to step 3. - - If **either** condition is not met, use `noop` — this review is not - relevant. - -3. **Check if Copilot is already a reviewer.** Fetch the list of requested - reviewers for PR #${{ github.event.pull_request.number }}. If Copilot - (`copilot`) is already in the reviewer list, use `noop` — no action - needed. - -4. **Add Copilot as a reviewer.** If the security review approved the PR and - Copilot is not yet a reviewer, add Copilot as a reviewer on the pull - request. diff --git a/.github/workflows/pr-review-comment-handler.lock.yml b/.github/workflows/create-issue-command.lock.yml similarity index 92% rename from .github/workflows/pr-review-comment-handler.lock.yml rename to .github/workflows/create-issue-command.lock.yml index d1f5023..a7d456f 100644 --- a/.github/workflows/pr-review-comment-handler.lock.yml +++ b/.github/workflows/create-issue-command.lock.yml @@ -21,35 +21,41 @@ # # For more information: https://github.github.com/gh-aw/introduction/overview/ # -# Triages PR review comments. If the comment raises an issue that needs fixing, replies tagging @copilot to fix it directly on the PR. If low priority, creates an issue for later. If not relevant, resolves with a reply. +# Creates a GitHub issue from a PR review comment when a user replies with the /create-issue command. # -# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"1e751f45efe3a0408cd1bd2d316ec761b0ef46c81d6cfe28848f96a71f247df1","compiler_version":"v0.46.3"} +# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"5803717917c6f2441fed2dabdcd10f4e053e56267489c860fb2f0f95398abf51","compiler_version":"v0.46.3"} -name: "PR Review Comment Handler" +name: "PR Review Comment — Create Issue" "on": pull_request_review_comment: types: - created + - edited permissions: {} concurrency: - group: "gh-aw-${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}" - cancel-in-progress: true + group: "gh-aw-${{ github.workflow }}-${{ github.event.issue.number || github.event.pull_request.number }}" -run-name: "PR Review Comment Handler" +run-name: "PR Review Comment — Create Issue" jobs: activation: needs: pre_activation - if: needs.pre_activation.outputs.activated == 'true' + if: > + (needs.pre_activation.outputs.activated == 'true') && ((github.event_name == 'pull_request_review_comment') && + ((startsWith(github.event.comment.body, '/create-issue ')) || (github.event.comment.body == '/create-issue'))) runs-on: ubuntu-slim permissions: contents: read + discussions: write + issues: write + pull-requests: write outputs: body: ${{ steps.sanitized.outputs.body }} comment_id: "" comment_repo: "" + slash_command: ${{ needs.pre_activation.outputs.matched_command }} text: ${{ steps.sanitized.outputs.text }} title: ${{ steps.sanitized.outputs.title }} steps: @@ -76,7 +82,7 @@ jobs: - name: Check workflow file timestamps uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 env: - GH_AW_WORKFLOW_FILE: "pr-review-comment-handler.lock.yml" + GH_AW_WORKFLOW_FILE: "create-issue-command.lock.yml" with: script: | const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); @@ -105,6 +111,7 @@ jobs: GH_AW_GITHUB_RUN_ID: ${{ github.run_id }} GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }} GH_AW_IS_PR_COMMENT: ${{ github.event.issue.pull_request && 'true' || '' }} + GH_AW_STEPS_SANITIZED_OUTPUTS_TEXT: ${{ steps.sanitized.outputs.text }} run: | bash /opt/gh-aw/actions/create_prompt_first.sh cat << 'GH_AW_PROMPT_EOF' > "$GH_AW_PROMPT" @@ -178,7 +185,7 @@ jobs: GH_AW_PROMPT_EOF cat << 'GH_AW_PROMPT_EOF' >> "$GH_AW_PROMPT" - {{#runtime-import .github/workflows/pr-review-comment-handler.md}} + {{#runtime-import .github/workflows/create-issue-command.md}} GH_AW_PROMPT_EOF - name: Interpolate variables and render templates uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 @@ -186,6 +193,7 @@ jobs: GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt GH_AW_GITHUB_EVENT_COMMENT_ID: ${{ github.event.comment.id }} GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER: ${{ github.event.pull_request.number }} + GH_AW_STEPS_SANITIZED_OUTPUTS_TEXT: ${{ steps.sanitized.outputs.text }} with: script: | const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); @@ -207,6 +215,7 @@ jobs: GH_AW_IS_PR_COMMENT: ${{ github.event.issue.pull_request && 'true' || '' }} GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_ACTIVATED: ${{ needs.pre_activation.outputs.activated }} GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_MATCHED_COMMAND: ${{ needs.pre_activation.outputs.matched_command }} + GH_AW_STEPS_SANITIZED_OUTPUTS_TEXT: ${{ steps.sanitized.outputs.text }} with: script: | const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); @@ -228,7 +237,8 @@ jobs: GH_AW_GITHUB_WORKSPACE: process.env.GH_AW_GITHUB_WORKSPACE, GH_AW_IS_PR_COMMENT: process.env.GH_AW_IS_PR_COMMENT, GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_ACTIVATED: process.env.GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_ACTIVATED, - GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_MATCHED_COMMAND: process.env.GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_MATCHED_COMMAND + GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_MATCHED_COMMAND: process.env.GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_MATCHED_COMMAND, + GH_AW_STEPS_SANITIZED_OUTPUTS_TEXT: process.env.GH_AW_STEPS_SANITIZED_OUTPUTS_TEXT } }); - name: Validate prompt placeholders @@ -263,7 +273,7 @@ jobs: GH_AW_SAFE_OUTPUTS: /opt/gh-aw/safeoutputs/outputs.jsonl GH_AW_SAFE_OUTPUTS_CONFIG_PATH: /opt/gh-aw/safeoutputs/config.json GH_AW_SAFE_OUTPUTS_TOOLS_PATH: /opt/gh-aw/safeoutputs/tools.json - GH_AW_WORKFLOW_ID_SANITIZED: prreviewcommenthandler + GH_AW_WORKFLOW_ID_SANITIZED: createissuecommand outputs: checkout_pr_success: ${{ steps.checkout-pr.outputs.checkout_pr_success || 'true' }} has_patch: ${{ steps.collect_output.outputs.has_patch }} @@ -321,7 +331,7 @@ jobs: version: "", agent_version: "0.0.411", cli_version: "v0.46.3", - workflow_name: "PR Review Comment Handler", + workflow_name: "PR Review Comment — Create Issue", experimental: false, supports_tools_allowlist: true, run_id: context.runId, @@ -378,12 +388,12 @@ jobs: mkdir -p /tmp/gh-aw/safeoutputs mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs cat > /opt/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_EOF' - {"create_issue":{"max":10},"missing_data":{},"missing_tool":{},"noop":{"max":10},"resolve_pull_request_review_thread":{"max":10}} + {"create_issue":{"max":1},"missing_data":{},"missing_tool":{},"noop":{"max":1},"resolve_pull_request_review_thread":{"max":1}} GH_AW_SAFE_OUTPUTS_CONFIG_EOF cat > /opt/gh-aw/safeoutputs/tools.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_EOF' [ { - "description": "Create a new GitHub issue for tracking bugs, feature requests, or tasks. Use this for actionable work items that need assignment, labeling, and status tracking. For reports, announcements, or status updates that don't require task tracking, use create_discussion instead. CONSTRAINTS: Maximum 10 issue(s) can be created.", + "description": "Create a new GitHub issue for tracking bugs, feature requests, or tasks. Use this for actionable work items that need assignment, labeling, and status tracking. For reports, announcements, or status updates that don't require task tracking, use create_discussion instead. CONSTRAINTS: Maximum 1 issue(s) can be created.", "inputSchema": { "additionalProperties": false, "properties": { @@ -424,7 +434,7 @@ jobs: "name": "create_issue" }, { - "description": "Reply to an existing review comment on a pull request. Use this to respond to feedback, answer questions, or acknowledge review comments. The comment_id must be the numeric ID of an existing review comment. CONSTRAINTS: Maximum 10 reply/replies can be created.", + "description": "Reply to an existing review comment on a pull request. Use this to respond to feedback, answer questions, or acknowledge review comments. The comment_id must be the numeric ID of an existing review comment. CONSTRAINTS: Maximum 1 reply/replies can be created.", "inputSchema": { "additionalProperties": false, "properties": { @@ -456,7 +466,7 @@ jobs: "name": "reply_to_pull_request_review_comment" }, { - "description": "Resolve a review thread on a pull request. Use this to mark a review conversation as resolved after addressing the feedback. The thread_id must be the node ID of the review thread (e.g., PRRT_kwDO...). CONSTRAINTS: Maximum 10 review thread(s) can be resolved.", + "description": "Resolve a review thread on a pull request. Use this to mark a review conversation as resolved after addressing the feedback. The thread_id must be the node ID of the review thread (e.g., PRRT_kwDO...). CONSTRAINTS: Maximum 1 review thread(s) can be resolved.", "inputSchema": { "additionalProperties": false, "properties": { @@ -815,6 +825,7 @@ jobs: GH_AW_ALLOWED_DOMAINS: "api.business.githubcopilot.com,api.enterprise.githubcopilot.com,api.github.com,api.githubcopilot.com,api.individual.githubcopilot.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,github.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,ppa.launchpad.net,raw.githubusercontent.com,registry.npmjs.org,s.symcb.com,s.symcd.com,security.ubuntu.com,telemetry.enterprise.githubcopilot.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com" GITHUB_SERVER_URL: ${{ github.server_url }} GITHUB_API_URL: ${{ github.api_url }} + GH_AW_COMMAND: create-issue with: script: | const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); @@ -923,8 +934,8 @@ jobs: uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 env: GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }} - GH_AW_NOOP_MAX: 10 - GH_AW_WORKFLOW_NAME: "PR Review Comment Handler" + GH_AW_NOOP_MAX: 1 + GH_AW_WORKFLOW_NAME: "PR Review Comment — Create Issue" with: github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} script: | @@ -937,7 +948,7 @@ jobs: uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 env: GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }} - GH_AW_WORKFLOW_NAME: "PR Review Comment Handler" + GH_AW_WORKFLOW_NAME: "PR Review Comment — Create Issue" with: github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} script: | @@ -950,10 +961,10 @@ jobs: uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 env: GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }} - GH_AW_WORKFLOW_NAME: "PR Review Comment Handler" + GH_AW_WORKFLOW_NAME: "PR Review Comment — Create Issue" GH_AW_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} GH_AW_AGENT_CONCLUSION: ${{ needs.agent.result }} - GH_AW_WORKFLOW_ID: "pr-review-comment-handler" + GH_AW_WORKFLOW_ID: "create-issue-command" GH_AW_SECRET_VERIFICATION_RESULT: ${{ needs.agent.outputs.secret_verification_result }} GH_AW_CHECKOUT_PR_SUCCESS: ${{ needs.agent.outputs.checkout_pr_success }} GH_AW_GROUP_REPORTS: "false" @@ -969,7 +980,7 @@ jobs: uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 env: GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }} - GH_AW_WORKFLOW_NAME: "PR Review Comment Handler" + GH_AW_WORKFLOW_NAME: "PR Review Comment — Create Issue" GH_AW_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} GH_AW_AGENT_CONCLUSION: ${{ needs.agent.result }} GH_AW_NOOP_MESSAGE: ${{ steps.noop.outputs.noop_message }} @@ -1015,8 +1026,8 @@ jobs: - name: Setup threat detection uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 env: - WORKFLOW_NAME: "PR Review Comment Handler" - WORKFLOW_DESCRIPTION: "Triages PR review comments. If the comment raises an issue that needs fixing, replies tagging @copilot to fix it directly on the PR. If low priority, creates an issue for later. If not relevant, resolves with a reply." + WORKFLOW_NAME: "PR Review Comment — Create Issue" + WORKFLOW_DESCRIPTION: "Creates a GitHub issue from a PR review comment when a user replies with the /create-issue command." HAS_PATCH: ${{ needs.agent.outputs.has_patch }} with: script: | @@ -1082,15 +1093,36 @@ jobs: if-no-files-found: ignore pre_activation: + if: > + (github.event_name == 'pull_request_review_comment') && ((startsWith(github.event.comment.body, '/create-issue ')) || + (github.event.comment.body == '/create-issue')) runs-on: ubuntu-slim + permissions: + discussions: write + issues: write + pull-requests: write outputs: - activated: ${{ steps.check_membership.outputs.is_team_member == 'true' }} + activated: ${{ (steps.check_membership.outputs.is_team_member == 'true') && (steps.check_command_position.outputs.command_position_ok == 'true') }} + matched_command: ${{ steps.check_command_position.outputs.matched_command }} steps: - name: Setup Scripts uses: github/gh-aw/actions/setup@a70c5eada06553e3510ac27f2c3bda9d3705bccb # v0.46.3 with: destination: /opt/gh-aw/actions - - name: Check team membership for workflow + - name: Add eyes reaction for immediate feedback + id: react + if: github.event_name == 'issues' || github.event_name == 'issue_comment' || github.event_name == 'pull_request_review_comment' || github.event_name == 'discussion' || github.event_name == 'discussion_comment' || (github.event_name == 'pull_request') && (github.event.pull_request.head.repo.id == github.repository_id) + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_REACTION: "eyes" + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/add_reaction.cjs'); + await main(); + - name: Check team membership for command workflow id: check_membership uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 env: @@ -1102,6 +1134,17 @@ jobs: setupGlobals(core, github, context, exec, io); const { main } = require('/opt/gh-aw/actions/check_membership.cjs'); await main(); + - name: Check command position + id: check_command_position + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_COMMANDS: "[\"create-issue\"]" + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/check_command_position.cjs'); + await main(); safe_outputs: needs: @@ -1116,8 +1159,8 @@ jobs: timeout-minutes: 15 env: GH_AW_ENGINE_ID: "copilot" - GH_AW_WORKFLOW_ID: "pr-review-comment-handler" - GH_AW_WORKFLOW_NAME: "PR Review Comment Handler" + GH_AW_WORKFLOW_ID: "create-issue-command" + GH_AW_WORKFLOW_NAME: "PR Review Comment — Create Issue" outputs: create_discussion_error_count: ${{ steps.process_safe_outputs.outputs.create_discussion_error_count }} create_discussion_errors: ${{ steps.process_safe_outputs.outputs.create_discussion_errors }} @@ -1144,7 +1187,7 @@ jobs: uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 env: GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }} - GH_AW_SAFE_OUTPUTS_HANDLER_CONFIG: "{\"create_issue\":{\"max\":10},\"missing_data\":{},\"missing_tool\":{},\"reply_to_pull_request_review_comment\":{\"max\":10},\"resolve_pull_request_review_thread\":{\"max\":10}}" + GH_AW_SAFE_OUTPUTS_HANDLER_CONFIG: "{\"create_issue\":{\"max\":1},\"missing_data\":{},\"missing_tool\":{},\"reply_to_pull_request_review_comment\":{\"max\":1},\"resolve_pull_request_review_thread\":{\"max\":1}}" with: github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} script: | diff --git a/.github/workflows/create-issue-command.md b/.github/workflows/create-issue-command.md new file mode 100644 index 0000000..d13edeb --- /dev/null +++ b/.github/workflows/create-issue-command.md @@ -0,0 +1,87 @@ +--- +description: Creates a GitHub issue from a PR review comment when a user replies + with the /create-issue command. + +on: + slash_command: + name: create-issue + events: [pull_request_review_comment] + +permissions: + contents: read + pull-requests: read + issues: read + +tools: + github: + toolsets: [repos, pull_requests, issues] + +safe-outputs: + reply-to-pull-request-review-comment: + max: 1 + github-token: ${{ secrets.GH_AW_AGENT_TOKEN }} + resolve-pull-request-review-thread: + max: 1 + github-token: ${{ secrets.GH_AW_AGENT_TOKEN }} + create-issue: + max: 1 +--- + +# PR Review Comment — Create Issue + +You are an AI agent that creates a GitHub issue from a PR review comment +when a user requests it with the `/create-issue` command. + +## Context + +- **Pull Request**: #${{ github.event.pull_request.number }} +- **Review Comment ID**: ${{ github.event.comment.id }} +- **Command Text**: ${{ needs.activation.outputs.text }} + +## Your Task + +1. **Fetch the review comment** details using the GitHub API with comment + ID ${{ github.event.comment.id }} on + PR #${{ github.event.pull_request.number }}. Retrieve the comment body, + author, file path, line number, and diff hunk. + +2. **Find the parent comment.** The `/create-issue` command is posted as a + reply to the review comment that should become an issue. Fetch the parent + review comment (the one being replied to) to get the original finding — + its body, file path, line number, and context. + +3. **Create a GitHub issue** with: + - A clear, descriptive title summarizing the review finding. + - A body that includes: + - A description of the issue or suggestion from the original review + comment. + - The file path and line number(s) involved. + - The original review comment text (quoted). + - A link back to the PR: `Related PR: #${{ github.event.pull_request.number }}`. + - A link to the review comment for context. + +4. **Reply to the review comment** confirming the issue was created. Include + the issue number and link (e.g., "Created issue # to track this + finding."). + +5. **Resolve the review thread** after replying. + +## Guidelines + +- If the `/create-issue` command includes additional text after it (e.g., + `/create-issue high priority` or `/create-issue add label:security`), + use that as extra context for the issue title or body — but do not try + to parse structured options. +- Keep the issue title concise but specific enough to be actionable. +- Quote the original review comment in the issue body using markdown + blockquote syntax. +- If you cannot determine the parent comment (the `/create-issue` was posted + as a top-level comment, not a reply), create the issue from the comment + itself. + +## Safe Outputs + +- Use `create-issue` to create the tracking issue. +- Use `reply-to-pull-request-review-comment` to confirm the issue was created. +- Use `resolve-pull-request-review-thread` to resolve the thread after + creating the issue. diff --git a/.github/workflows/pr-review-comment-handler.md b/.github/workflows/pr-review-comment-handler.md deleted file mode 100644 index 855eebc..0000000 --- a/.github/workflows/pr-review-comment-handler.md +++ /dev/null @@ -1,150 +0,0 @@ ---- -description: Triages PR review comments. If the comment raises an issue that needs - fixing, replies tagging @copilot to fix it directly on the PR. If low priority, - creates an issue for later. If not relevant, resolves with a reply. - -on: - pull_request_review_comment: - types: [created] - -permissions: - contents: read - pull-requests: read - issues: read - -tools: - github: - toolsets: [repos, pull_requests, issues] - -safe-outputs: - reply-to-pull-request-review-comment: - max: 10 - github-token: ${{ secrets.GH_AW_AGENT_TOKEN }} - resolve-pull-request-review-thread: - max: 10 - github-token: ${{ secrets.GH_AW_AGENT_TOKEN }} - create-issue: - max: 10 - noop: - max: 10 ---- - -# PR Review Comment Handler - -You are an AI agent that triages pull request review comments on an open PR. -You read the comment, assess its importance, and take the appropriate action: -tag @copilot to fix it, create a low-priority issue for later, or resolve -it directly with a reply. - -## Context - -- **Pull Request**: #${{ github.event.pull_request.number }} -- **Review Comment ID**: ${{ github.event.comment.id }} - -## Your Task - -1. **Fetch the review comment** details using the GitHub API with comment - ID ${{ github.event.comment.id }} on - PR #${{ github.event.pull_request.number }}. Retrieve the comment body, - author, file path, line number, and diff hunk. - -2. **Fetch the full pull request details and diff** for - PR #${{ github.event.pull_request.number }} to understand the broader - context of the changes. - -3. **Classify the comment** into one of these categories: - - - **Needs fixing**: The comment identifies a genuine issue that must be - addressed — a bug, security concern, logic error, missing validation, - style violation against project standards, performance problem, etc. - - **Low priority**: The comment raises a valid but minor point (small - refactor, optional improvement, cosmetic suggestion, or a medium/low - severity security concern) that does not need to be fixed right now. - - **Not relevant**: The comment is praise, a question already answered by - the code, a subjective preference with no clear benefit, a - misunderstanding of the code's intent, or otherwise does not require any - action. - -4. **Act based on your classification**: - - ### If the comment needs fixing - - Reply to the review comment on the PR tagging `@copilot` and asking it - to fix the issue. The reply must include: - - A brief acknowledgement that the reviewer's concern is valid. - - A clear description of what needs to be fixed. - - The tag `@copilot` so Copilot picks it up and applies the fix - directly on this PR. - - Example reply: - > Valid point — this needs to be fixed. - > @copilot Please fix this: expected behavior should be>. - - Do **not** resolve the thread — leave it open for Copilot to address. - - ### If the comment is low priority - - 1. Create a GitHub issue with: - - A clear title summarizing the suggestion. - - A body that includes: - - A description of the suggested improvement. - - The file path and line number(s) involved. - - A link back to the PR: `Related PR: #${{ github.event.pull_request.number }}`. - - The review comment text for context. - - Apply the `low-priority` label. - 2. Reply to the review comment on the PR with: - - A message explaining this is a valid but low-priority point, tracked - in the created issue for later (e.g., "Good point — tracked as a - low-priority item in # for a future iteration."). - 3. Resolve the review thread. - - ### If the comment is not relevant - - 1. Reply to the review comment with a clear, respectful explanation of why - no change is needed. Reference the relevant code, project standards, or - PR context to justify your reasoning. Do **not** create an issue. - 2. Resolve the review thread. - -## Classification Guidelines - -When deciding how to classify a comment, consider: - -- **Project standards**: This is a Python monorepo using Ruff, Pyright strict - mode, Bandit, and pytest. Code must follow the standards in - `CODING_STANDARDS.md`. If the comment aligns with these standards, it - likely needs fixing. -- **Severity**: Correctness, security, and maintainability issues need - fixing. Cosmetic or style-only preferences are low priority at most. -- **Concreteness**: A comment with a specific, reproducible concern is more - important than a vague suggestion. -- **When in doubt**: Err on the side of treating it as needing a fix or - low priority rather than dismissing a comment. - -## Response Format - -- Keep replies concise and professional. -- When tagging @copilot, be specific about what needs to change so Copilot - can act on it immediately. -- When creating a low-priority issue, include the issue number in the reply. -- When explaining why a comment is not relevant, cite the specific code or - standard that supports your reasoning. -- Do not be dismissive — acknowledge the reviewer's perspective even when - disagreeing. - -## Safe Outputs - -- **Needs fixing**: Use `reply-to-pull-request-review-comment` to reply - tagging `@copilot` with a fix request. Do not resolve the thread. -- **Low priority**: Use `create-issue` to create a low-priority issue - (linked to the PR), then `reply-to-pull-request-review-comment` to reply - with the issue link, then `resolve-pull-request-review-thread` to - resolve the thread. -- **Not relevant**: Use `reply-to-pull-request-review-comment` to reply - with an explanation, then `resolve-pull-request-review-thread` to - resolve the thread. -- **Unclassifiable** (empty or incomprehensible comment): Use - `reply-to-pull-request-review-comment` to reply tagging the PR author so - they can review the comment manually (e.g., "I couldn't determine what - action is needed here. @, could you take a look?"). Look up - the PR author from the pull request details fetched earlier. diff --git a/.github/workflows/security-review.md b/.github/workflows/security-review.md index 0a66fce..41f6529 100644 --- a/.github/workflows/security-review.md +++ b/.github/workflows/security-review.md @@ -62,22 +62,18 @@ agent instructions. 5. **Submit the review.** After posting all inline comments: - If you found any **critical** or **high** severity issues, submit the review with `REQUEST_CHANGES` and a summary body listing the top findings. - - If you found only **medium** or **low** issues, submit with `APPROVE` and + - If you found only **medium** or **low** issues, submit with `COMMENT` and a brief summary noting the medium/low findings. These are not blocking. - - If no issues were found, submit with `APPROVE` and a body stating the + - If no issues were found, submit with `COMMENT` and a body stating the changes look secure. - - **Supersede previous review if resolved.** Check the cache memory for - this PR to see if a previous security review submitted - `REQUEST_CHANGES`. If it did, compare the previous findings against the - current diff. If the previously flagged issues have been fixed and no - new critical/high issues are found, submit the new review as `APPROVE` - with a detailed body that includes: - - A summary stating the previous issues have been resolved. - - A list of the previously flagged findings and how each was addressed - (e.g., "**Input Validation** (high): User input is now sanitized in - `validators.py` — resolved."). - - Any remaining medium/low findings from the current review, if any. - - This replaces the old `REQUEST_CHANGES` review and unblocks the PR. + - **Every push dismisses the previous review.** Each run submits a fresh + review that replaces the previous one. Check the cache memory for this + PR to compare previous findings against the current diff. If any + previously flagged **critical** or **high** issues are still present + (not resolved and not outdated), include them in the current review and + submit with `REQUEST_CHANGES`. If all previous critical/high issues + have been resolved or are outdated, submit with `COMMENT` and include + a summary of what was resolved. 6. **Update memory.** After submitting the review: - Write/update PR-specific memory at diff --git a/README.md b/README.md index 4076ef8..43e280c 100644 --- a/README.md +++ b/README.md @@ -62,9 +62,9 @@ flowchart TB S3[Copilot security review agent — 15 posture categories] end - subgraph L5["5. Copilot Review"] + subgraph L5["5. Copilot Review (branch protection)"] direction LR - CR1[Copilot code review — assigned automatically] + CR1[Copilot code review — required reviewer on main] CR2[AI-powered suggestions and comments] end @@ -91,7 +91,7 @@ Each layer catches different classes of issues: | **Pre-commit** | On `git commit` (staged files) | Style drift, security anti-patterns, broken configs, stale lockfiles | | **CI quality gate** | On PR | Lock verification, full repo-wide type safety, code quality, test regressions, coverage, build validation. Split into three sub-layers: *code quality* (lock-verify, format, lint, type checks, Bandit, markdown lint), *tests* (PyTest + coverage), and *build validation* (wheel build + Docker build & smoke test, both path-filtered) | | **CI security** | On PR / push to main / schedule | CodeQL SAST, Dependabot dependency updates, Copilot security review agent (15 posture categories) | -| **Copilot Review** | On PR (after security review approves) | AI-powered code review with suggestions and inline comments | +| **Copilot Review** | On PR targeting `main` (branch protection) | AI-powered code review with suggestions and inline comments, configured as required reviewer | | **Release** | On push to main or manual | Agent release: builds changed agents, creates `-v` tags with wheel assets. Monorepo release: tags shared infra changes as `v` | --- @@ -147,11 +147,9 @@ Repo root │ │ └─ feature_request.yml # feature request template │ ├─ workflows/ # GitHub Actions workflows │ │ ├─ codeql-analysis.yml # CodeQL security scanning -│ │ ├─ copilot-review.lock.yml # compiled agentic workflow (generated) -│ │ ├─ copilot-review.md # agentic workflow (add Copilot reviewer) │ │ ├─ monorepo-release.yml # tag and release shared monorepo infra -│ │ ├─ pr-review-comment-handler.lock.yml # compiled agentic workflow (generated) -│ │ ├─ pr-review-comment-handler.md # agentic workflow (triage review comments) +│ │ ├─ create-issue-command.lock.yml # compiled agentic workflow (generated) +│ │ ├─ create-issue-command.md # agentic workflow (/create-issue command) │ │ ├─ python-code-quality.yml # format, lint, type-check, security scan │ │ ├─ python-docker-build.yml # build and smoke-test agent Docker images │ │ ├─ python-docs.yml # build Sphinx docs, deploy to GitHub Pages @@ -311,7 +309,7 @@ flowchart TD SR --> SR1["Read PR diff"] SR1 --> SR2["Review 15 security
posture categories"] SR2 --> SR3["Post inline review
comments"] - SR3 --> SR4["Submit review
(REQUEST_CHANGES
or APPROVE)"] + SR3 --> SR4["Submit review
(REQUEST_CHANGES
or COMMENT)"] ``` ### Release workflow — on push to main or manual dispatch @@ -545,7 +543,7 @@ Publishing is **commented out** by default — the workflow only creates tags an ## Agentic workflows -The repository includes [GitHub Agentic Workflows](https://github.github.com/gh-aw/) that automate security review, Copilot code review, and PR review comment triage on every pull request. +The repository includes [GitHub Agentic Workflows](https://github.github.com/gh-aw/) that automate security review and issue creation from PR review comments. ### Security review agent @@ -558,32 +556,19 @@ The agentic workflow at [`.github/workflows/security-review.md`](.github/workflo 1. Reads the pull request diff. 2. Reviews changed files against all 15 security posture categories. 3. Posts inline review comments on specific code lines where issues are found. -4. Submits a consolidated review (`REQUEST_CHANGES` for critical/high, `APPROVE` otherwise). +4. Submits a consolidated review (`REQUEST_CHANGES` for critical/high, `COMMENT` otherwise). >[!IMPORTANT] > The `security-review.md` workflow is using the custom agent `.github/agents/security-reviewer.agent.md` which is defined in this repository. To be able to use this agent with `copilot` AI Engine, `COPILOT_GITHUB_TOKEN` secret must be added to the repository with a fine-grained PAT that has `Copilot Requests: Read-only` scope on public repositories. For more information see the [documentation](https://github.github.com/gh-aw/reference/auth/#copilot_github_token). -### Copilot code review +### Create issue command -The agentic workflow at [`.github/workflows/copilot-review.md`](.github/workflows/copilot-review.md) triggers when a PR review is submitted. It checks whether the security review agent approved the PR and, if so, adds Copilot as a reviewer for additional code quality coverage. This requires a fine-grained PAT stored as the [`GH_AW_AGENT_TOKEN` repository secret](https://github.github.com/gh-aw/reference/auth/#gh_aw_agent_token) with: +The agentic workflow at [`.github/workflows/create-issue-command.md`](.github/workflows/create-issue-command.md) is triggered by the `/create-issue` slash command in PR review comment replies. When invoked, it: -- Resource owner: Your user account -- Repository access: "Public repositories" or select specific repos -- Repository permissions: - - Actions: Write - - Contents: Write - - Issues: Write - - Pull requests: Write - -### PR review comment handler - -The agentic workflow at [`.github/workflows/pr-review-comment-handler.md`](.github/workflows/pr-review-comment-handler.md) triggers when a review comment is posted on a PR. It triages comments into three categories: - -1. **Needs fixing** — replies tagging `@copilot` to address the issue directly. -2. **Low priority** — creates a tracking issue for minor items (including medium/low security findings). -3. **Not relevant** — resolves the review thread. - -Comments that cannot be classified are escalated by tagging the PR author. +1. Fetches the review comment and its parent. +2. Creates a GitHub issue summarizing the finding. +3. Replies to the review thread with a link to the created issue. +4. Resolves the review thread. ### Compiling agentic workflows @@ -643,7 +628,7 @@ The docs workflow triggers on pushes to `main` when documentation sources, agent | **Dependabot** | Weekly updates for pip/uv dependencies and GitHub Actions | Shrinks vulnerability exposure windows | | **CodeQL** | SAST/code scanning for Python and GitHub Actions | Finds dataflow and security issues beyond linters | | **Copilot security agent** | AI-powered reviews against 15 security posture categories | Catches issues that static analysis misses | -| **Branch protection** | Required checks, signed commits, Copilot reviewer, auto-merge for trusted bots | Prevents unverified code from reaching main | +| **Branch protection** | Required checks, signed commits, code owner approval, Copilot reviewer on `main`, auto-merge for trusted bots | Prevents unverified code from reaching main | | **Pre-commit hooks** | Staged-file checks before every commit | Catches issues at the earliest possible point | | **Dual type checkers** | Pyright + MyPy with different inference engines | Maximal type safety for AI-generated code | From 8b9a0075f051983ee118ddf9dfb4820eb090739a Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 19 Feb 2026 21:04:06 +0000 Subject: [PATCH 85/90] fix: add docs-install to setup task and update DEVELOPMENT.md Co-authored-by: pmalarme <686568+pmalarme@users.noreply.github.com> --- DEVELOPMENT.md | 3 ++- pyproject.toml | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/DEVELOPMENT.md b/DEVELOPMENT.md index e24a01f..4cfaf25 100644 --- a/DEVELOPMENT.md +++ b/DEVELOPMENT.md @@ -200,12 +200,13 @@ To set up the development environment, run: uv run poe setup ``` -This task creates or refreshes the virtual environment in `.venv/`, installs all dependencies, and sets up pre-commit hooks. You can specify a Python version with the `--python` flag. +This task creates or refreshes the virtual environment in `.venv/`, installs all dependencies (including the docs group), and sets up pre-commit hooks. You can specify a Python version with the `--python` flag. It is a sequence of the following Poe tasks: - [venv](#venv) - [install](#install) +- [docs-install](#docs-install) - [pre-commit-install](#pre-commit-install) ##### venv diff --git a/pyproject.toml b/pyproject.toml index be14f2b..466f74c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -215,7 +215,7 @@ lock-verify = "uv lock --locked" test = "python scripts/run_tasks_in_agents_if_exists.py test" markdown-code-lint = "uv run python scripts/check_md_code_blocks.py README.md docs/manual/*.md agents/**/README.md .github/instructions/*.md" pre-commit-install = "uv run pre-commit install --install-hooks --overwrite" -install = "uv sync --all-packages --all-extras --dev -U --prerelease=if-necessary-or-explicit --group docs" +install = "uv sync --all-packages --all-extras --dev -U --prerelease=if-necessary-or-explicit --no-group=docs" docs = "uv run python scripts/generate_docs.py" docs-install = "uv sync --all-packages --all-extras --dev -U --prerelease=if-necessary-or-explicit --group docs" check = ["lock-verify", "fmt", "lint", "pyright", "mypy", "bandit", "test", "markdown-code-lint"] @@ -235,6 +235,7 @@ args = [{ name = "python", default = "3.13", options = ['-p', '--python'] }] sequence = [ { ref = "venv --python $python"}, { ref = "install" }, + { ref = "docs-install" }, { ref = "pre-commit-install" } ] args = [{ name = "python", default = "3.13", options = ['-p', '--python'] }] From 31efbd94d158ba318396b7ed03642300a7a94104 Mon Sep 17 00:00:00 2001 From: pmalarme Date: Thu, 19 Feb 2026 22:15:55 +0100 Subject: [PATCH 86/90] refactor: remove resolve review thread functionality from create-issue command --- .../workflows/create-issue-command.lock.yml | 32 ++----------------- .github/workflows/create-issue-command.md | 7 ---- README.md | 1 - 3 files changed, 3 insertions(+), 37 deletions(-) diff --git a/.github/workflows/create-issue-command.lock.yml b/.github/workflows/create-issue-command.lock.yml index a7d456f..c54b19f 100644 --- a/.github/workflows/create-issue-command.lock.yml +++ b/.github/workflows/create-issue-command.lock.yml @@ -23,7 +23,7 @@ # # Creates a GitHub issue from a PR review comment when a user replies with the /create-issue command. # -# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"5803717917c6f2441fed2dabdcd10f4e053e56267489c860fb2f0f95398abf51","compiler_version":"v0.46.3"} +# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"6ce0925dd5c88c9c0c08f269d35e79ba5826861e9f1d750907eefc1a21fee9e7","compiler_version":"v0.46.3"} name: "PR Review Comment — Create Issue" "on": @@ -388,7 +388,7 @@ jobs: mkdir -p /tmp/gh-aw/safeoutputs mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs cat > /opt/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_EOF' - {"create_issue":{"max":1},"missing_data":{},"missing_tool":{},"noop":{"max":1},"resolve_pull_request_review_thread":{"max":1}} + {"create_issue":{"max":1},"missing_data":{},"missing_tool":{},"noop":{"max":1}} GH_AW_SAFE_OUTPUTS_CONFIG_EOF cat > /opt/gh-aw/safeoutputs/tools.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_EOF' [ @@ -465,23 +465,6 @@ jobs: }, "name": "reply_to_pull_request_review_comment" }, - { - "description": "Resolve a review thread on a pull request. Use this to mark a review conversation as resolved after addressing the feedback. The thread_id must be the node ID of the review thread (e.g., PRRT_kwDO...). CONSTRAINTS: Maximum 1 review thread(s) can be resolved.", - "inputSchema": { - "additionalProperties": false, - "properties": { - "thread_id": { - "description": "The node ID of the review thread to resolve (e.g., 'PRRT_kwDOABCD...'). This is the GraphQL node ID, not a numeric ID.", - "type": "string" - } - }, - "required": [ - "thread_id" - ], - "type": "object" - }, - "name": "resolve_pull_request_review_thread" - }, { "description": "Report that a tool or capability needed to complete the task is not available, or share any information you deem important about missing functionality or limitations. Use this when you cannot accomplish what was requested because the required functionality is missing or access is restricted.", "inputSchema": { @@ -619,15 +602,6 @@ jobs: "maxLength": 65000 } } - }, - "resolve_pull_request_review_thread": { - "defaultMax": 10, - "fields": { - "thread_id": { - "required": true, - "type": "string" - } - } } } GH_AW_SAFE_OUTPUTS_VALIDATION_EOF @@ -1187,7 +1161,7 @@ jobs: uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 env: GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }} - GH_AW_SAFE_OUTPUTS_HANDLER_CONFIG: "{\"create_issue\":{\"max\":1},\"missing_data\":{},\"missing_tool\":{},\"reply_to_pull_request_review_comment\":{\"max\":1},\"resolve_pull_request_review_thread\":{\"max\":1}}" + GH_AW_SAFE_OUTPUTS_HANDLER_CONFIG: "{\"create_issue\":{\"max\":1},\"missing_data\":{},\"missing_tool\":{},\"reply_to_pull_request_review_comment\":{\"max\":1}}" with: github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} script: | diff --git a/.github/workflows/create-issue-command.md b/.github/workflows/create-issue-command.md index d13edeb..7d2835a 100644 --- a/.github/workflows/create-issue-command.md +++ b/.github/workflows/create-issue-command.md @@ -20,9 +20,6 @@ safe-outputs: reply-to-pull-request-review-comment: max: 1 github-token: ${{ secrets.GH_AW_AGENT_TOKEN }} - resolve-pull-request-review-thread: - max: 1 - github-token: ${{ secrets.GH_AW_AGENT_TOKEN }} create-issue: max: 1 --- @@ -64,8 +61,6 @@ when a user requests it with the `/create-issue` command. the issue number and link (e.g., "Created issue # to track this finding."). -5. **Resolve the review thread** after replying. - ## Guidelines - If the `/create-issue` command includes additional text after it (e.g., @@ -83,5 +78,3 @@ when a user requests it with the `/create-issue` command. - Use `create-issue` to create the tracking issue. - Use `reply-to-pull-request-review-comment` to confirm the issue was created. -- Use `resolve-pull-request-review-thread` to resolve the thread after - creating the issue. diff --git a/README.md b/README.md index 43e280c..f6b6d1b 100644 --- a/README.md +++ b/README.md @@ -568,7 +568,6 @@ The agentic workflow at [`.github/workflows/create-issue-command.md`](.github/wo 1. Fetches the review comment and its parent. 2. Creates a GitHub issue summarizing the finding. 3. Replies to the review thread with a link to the created issue. -4. Resolves the review thread. ### Compiling agentic workflows From 5ff785e70aba2ad224fe870bb55c8fc814d74c3a Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 19 Feb 2026 21:19:22 +0000 Subject: [PATCH 87/90] docs: update README.md to reflect docs group installed via setup Co-authored-by: pmalarme <686568+pmalarme@users.noreply.github.com> --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 4076ef8..4368a1e 100644 --- a/README.md +++ b/README.md @@ -118,7 +118,7 @@ uv run poe setup uv run poe check ``` -`poe setup` creates `.venv/`, installs all dev dependencies, and installs pre-commit hooks. `poe check` runs the full quality gate (format, lint, type checks, security, tests, markdown lint) across the entire workspace. +`poe setup` creates `.venv/`, installs all dev dependencies (including the docs group), and installs pre-commit hooks. `poe check` runs the full quality gate (format, lint, type checks, security, tests, markdown lint) across the entire workspace. --- @@ -368,7 +368,7 @@ flowchart TD | Task | What it does | | --- | --- | -| `poe setup` | Create `.venv/`, install deps, install pre-commit hooks | +| `poe setup` | Create `.venv/`, install deps (including docs group), install pre-commit hooks | | `poe venv` | Create/refresh `.venv/` (default Python 3.13, override with `-p`) | | `poe install` | `uv sync --all-extras --dev` (docs group excluded) | | `poe pre-commit-install` | Install pre-commit hooks into `.git/hooks` | From d5447581a8bb7d9074a219a2030dea253bbe67e5 Mon Sep 17 00:00:00 2001 From: pmalarme Date: Thu, 19 Feb 2026 22:22:14 +0100 Subject: [PATCH 88/90] feat: enhance create-issue command to check for duplicate issues before creation --- .github/workflows/create-issue-command.md | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/.github/workflows/create-issue-command.md b/.github/workflows/create-issue-command.md index 7d2835a..32917b3 100644 --- a/.github/workflows/create-issue-command.md +++ b/.github/workflows/create-issue-command.md @@ -47,7 +47,14 @@ when a user requests it with the `/create-issue` command. review comment (the one being replied to) to get the original finding — its body, file path, line number, and context. -3. **Create a GitHub issue** with: +3. **Check for duplicate issues.** Search open issues in the repository for + an existing issue that already tracks this review finding (e.g., matching + the file path, line number, or review comment URL). If a matching issue + already exists, **skip creation** and reply to the review comment with a + link to the existing issue (e.g., "An issue already exists for this + finding: #."). + +4. **Create a GitHub issue** with: - A clear, descriptive title summarizing the review finding. - A body that includes: - A description of the issue or suggestion from the original review @@ -57,7 +64,7 @@ when a user requests it with the `/create-issue` command. - A link back to the PR: `Related PR: #${{ github.event.pull_request.number }}`. - A link to the review comment for context. -4. **Reply to the review comment** confirming the issue was created. Include +5. **Reply to the review comment** confirming the issue was created. Include the issue number and link (e.g., "Created issue # to track this finding."). From 9de03fd0b810d06f2ec56301e4ceb38514a66ce5 Mon Sep 17 00:00:00 2001 From: pmalarme Date: Thu, 19 Feb 2026 22:31:48 +0100 Subject: [PATCH 89/90] feat: update create-issue workflow to include concurrency settings --- .github/workflows/create-issue-command.lock.yml | 5 +++-- .github/workflows/create-issue-command.md | 4 ++++ 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/.github/workflows/create-issue-command.lock.yml b/.github/workflows/create-issue-command.lock.yml index c54b19f..f5d5bc5 100644 --- a/.github/workflows/create-issue-command.lock.yml +++ b/.github/workflows/create-issue-command.lock.yml @@ -23,7 +23,7 @@ # # Creates a GitHub issue from a PR review comment when a user replies with the /create-issue command. # -# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"6ce0925dd5c88c9c0c08f269d35e79ba5826861e9f1d750907eefc1a21fee9e7","compiler_version":"v0.46.3"} +# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"3e9b343d5d564d43e2479c07df250d6faa3cd71f7957e40e4ef116609158c500","compiler_version":"v0.46.3"} name: "PR Review Comment — Create Issue" "on": @@ -35,7 +35,8 @@ name: "PR Review Comment — Create Issue" permissions: {} concurrency: - group: "gh-aw-${{ github.workflow }}-${{ github.event.issue.number || github.event.pull_request.number }}" + cancel-in-progress: false + group: create-issue-${{ github.event.comment.id }} run-name: "PR Review Comment — Create Issue" diff --git a/.github/workflows/create-issue-command.md b/.github/workflows/create-issue-command.md index 32917b3..f25c967 100644 --- a/.github/workflows/create-issue-command.md +++ b/.github/workflows/create-issue-command.md @@ -7,6 +7,10 @@ on: name: create-issue events: [pull_request_review_comment] +concurrency: + group: create-issue-${{ github.event.comment.id }} + cancel-in-progress: false + permissions: contents: read pull-requests: read From f96ba6509732f7014af2643d960c0110b0999b6f Mon Sep 17 00:00:00 2001 From: Pierre Malarme Date: Thu, 19 Feb 2026 22:35:00 +0100 Subject: [PATCH 90/90] Update .github/workflows/python-release.yml Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- .github/workflows/python-release.yml | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/.github/workflows/python-release.yml b/.github/workflows/python-release.yml index 345c3b0..7c64039 100644 --- a/.github/workflows/python-release.yml +++ b/.github/workflows/python-release.yml @@ -53,8 +53,13 @@ jobs: for WHL in "${WHEELS[@]}"; do # Wheel filename: {name}-{version}-{python}-{abi}-{platform}.whl BASENAME=$(basename "$WHL") - NAME=$(echo "$BASENAME" | cut -d- -f1) - VERSION=$(echo "$BASENAME" | cut -d- -f2) + NOEXT=${BASENAME%.whl} + # Strip platform, abi, and python tags from the right to get "name-version" + REST=${NOEXT%-*} # drop platform + REST=${REST%-*} # drop abi + REST=${REST%-*} # drop python + VERSION=${REST##*-} + NAME=${REST%-*} TAG="${NAME}-v${VERSION}" # Validate NAME and VERSION to prevent unexpected values from flowing into git commands