diff --git a/.gitignore b/.gitignore
index 08ac8dbda..eb2fc68b4 100644
--- a/.gitignore
+++ b/.gitignore
@@ -150,6 +150,9 @@ webhook-server.private-key.pem
log-colors.json
webhook_server/tests/manifests/logs
.coverage_report.txt
+coverage.json
+webhook-examples
+find_unused_code.py
# AI
.cursor/
@@ -159,3 +162,4 @@ CLAUDE.md
.claude/
.claude-flow/
.swarm/
+CRUSH.md
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 8ec9fa4ca..da0e383ee 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -61,3 +61,13 @@ repos:
- id: mypy
exclude: (tests/)
additional_dependencies: [types-requests, types-PyYAML, types-colorama]
+
+ - repo: https://github.com/pre-commit/mirrors-eslint
+ rev: v9.38.0
+ hooks:
+ - id: eslint
+ files: \.js$
+ exclude: eslint\.config\.js
+ args: [--fix]
+ additional_dependencies:
+ - eslint@9.38.0
diff --git a/Dockerfile b/Dockerfile
index 63b4b2fad..e0ab5bd98 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -9,11 +9,12 @@ ENV PATH="$PATH:$BIN_DIR"
ENV DATA_DIR="$HOME_DIR/data"
ENV APP_DIR="$HOME_DIR/github-webhook-server"
+RUN systemd-machine-id-setup
+
RUN dnf -y install dnf-plugins-core \
&& dnf -y update \
&& dnf -y install \
git \
- hub \
unzip \
gcc \
python3-devel \
@@ -51,6 +52,7 @@ ENV UV_PYTHON=python3.13
ENV UV_COMPILE_BYTECODE=1
ENV UV_NO_SYNC=1
ENV UV_CACHE_DIR=${APP_DIR}/.cache
+ENV PYTHONUNBUFFERED=1
COPY --from=ghcr.io/astral-sh/uv:latest /uv /uvx ${BIN_DIR}/
RUN uv tool install pre-commit && uv tool install poetry && uv tool install prek
@@ -62,7 +64,13 @@ RUN set -x \
&& chmod +x $BIN_DIR/rosa \
&& rm -rf $BIN_DIR/rosa-linux.tar.gz \
&& curl -L https://github.com/regclient/regclient/releases/latest/download/regctl-linux-amd64 >$BIN_DIR/regctl \
- && chmod +x $BIN_DIR/regctl
+ && chmod +x $BIN_DIR/regctl \
+ && curl -L https://github.com/mislav/hub/releases/download/v2.14.2/hub-linux-amd64-2.14.2.tgz --output ${BIN_DIR}/hub-linux-amd64.tgz \
+ && tmp_dir="$(mktemp -d)" \
+ && tar xvf ${BIN_DIR}/hub-linux-amd64.tgz -C "${tmp_dir}" \
+ && mv "${tmp_dir}"/hub-linux-amd64-2.14.2/bin/hub ${BIN_DIR}/hub \
+ && chmod +x ${BIN_DIR}/hub \
+ && rm -rf "${tmp_dir}" ${BIN_DIR}/hub-linux-amd64.tgz
WORKDIR $APP_DIR
diff --git a/README.md b/README.md
index 7d9fc3c75..e12f23a49 100644
--- a/README.md
+++ b/README.md
@@ -57,6 +57,11 @@ GitHub Events → Webhook Server → Repository Management
└─────────────────────────────────────┘
```
+**Key Architecture Components:**
+
+- **Performance Optimized**: Repository data fetched efficiently to minimize API calls
+- **Type-Safe**: Full mypy strict mode coverage ensuring code reliability
+
## Features
### 🔧 Repository Management
diff --git a/entrypoint.py b/entrypoint.py
index 00051ba81..a44bdf8c2 100644
--- a/entrypoint.py
+++ b/entrypoint.py
@@ -46,6 +46,13 @@ def run_podman_cleanup() -> None:
run_podman_cleanup()
result = asyncio.run(repository_and_webhook_settings(webhook_secret=_webhook_secret))
+
+ # Logging Configuration:
+ # - Uvicorn uses default logging which automatically respects FORCE_COLOR environment variable
+ # for colored terminal output (useful for Docker logs with color support)
+ # - Application logs use simple-logger with console=True for colored output in Docker logs
+ # - Both logging systems work together: uvicorn handles HTTP request logs,
+ # while simple-logger handles application-level logs with structured formatting
uvicorn.run(
"webhook_server.app:FASTAPI_APP",
host=_ip_bind,
diff --git a/eslint.config.js b/eslint.config.js
new file mode 100644
index 000000000..9f845d8c0
--- /dev/null
+++ b/eslint.config.js
@@ -0,0 +1,92 @@
+module.exports = [
+ // Frontend configuration - for browser-based JavaScript
+ {
+ files: ["webhook_server/web/static/**/*.js"],
+ languageOptions: {
+ ecmaVersion: 2022,
+ sourceType: "script",
+ globals: {
+ // Browser environment globals
+ window: "readonly",
+ document: "readonly",
+ console: "readonly",
+ fetch: "readonly",
+ WebSocket: "readonly",
+ localStorage: "readonly",
+ sessionStorage: "readonly",
+ alert: "readonly",
+ confirm: "readonly",
+ prompt: "readonly",
+ setTimeout: "readonly",
+ clearTimeout: "readonly",
+ setInterval: "readonly",
+ clearInterval: "readonly",
+ URLSearchParams: "readonly",
+ AbortController: "readonly",
+ },
+ },
+ rules: {
+ // ESLint recommended rules (manually specified for broader coverage)
+ "constructor-super": "error",
+ "for-direction": "error",
+ "getter-return": "error",
+ "no-async-promise-executor": "error",
+ "no-case-declarations": "error",
+ "no-class-assign": "error",
+ "no-compare-neg-zero": "error",
+ "no-cond-assign": "error",
+ "no-const-assign": "error",
+ "no-constant-condition": "error",
+ "no-control-regex": "error",
+ "no-debugger": "error",
+ "no-delete-var": "error",
+ "no-dupe-args": "error",
+ "no-dupe-class-members": "error",
+ "no-dupe-else-if": "error",
+ "no-dupe-keys": "error",
+ "no-duplicate-case": "error",
+ "no-empty": "error",
+ "no-empty-character-class": "error",
+ "no-empty-pattern": "error",
+ "no-ex-assign": "error",
+ "no-extra-boolean-cast": "error",
+ "no-fallthrough": "error",
+ "no-func-assign": "error",
+ "no-global-assign": "error",
+ "no-import-assign": "error",
+ "no-inner-declarations": "error",
+ "no-invalid-regexp": "error",
+ "no-irregular-whitespace": "error",
+ "no-loss-of-precision": "error",
+ "no-misleading-character-class": "error",
+ "no-new-symbol": "error",
+ "no-obj-calls": "error",
+ "no-octal": "error",
+ "no-prototype-builtins": "error",
+ "no-redeclare": "error",
+ "no-regex-spaces": "error",
+ "no-self-assign": "error",
+ "no-setter-return": "error",
+ "no-shadow-restricted-names": "error",
+ "no-sparse-arrays": "error",
+ "no-this-before-super": "error",
+ "no-unexpected-multiline": "error",
+ "no-unreachable": "error",
+ "no-unsafe-finally": "error",
+ "no-unsafe-negation": "error",
+ "no-unsafe-optional-chaining": "error",
+ "no-unused-labels": "error",
+ "no-useless-backreference": "error",
+ "no-useless-catch": "error",
+ "no-useless-escape": "error",
+ "no-with": "error",
+ "require-yield": "error",
+ "use-isnan": "error",
+ "valid-typeof": "error",
+ // Project-specific overrides
+ "no-unused-vars": "warn",
+ "no-undef": "error",
+ "no-console": "off",
+ },
+ },
+];
diff --git a/pyproject.toml b/pyproject.toml
index 7b88fd4a1..0fc6a5468 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -14,6 +14,12 @@ line-length = 120
fix = true
output-format = "grouped"
+[tool.ruff.lint]
+select = ["E", "F", "W", "I", "B", "UP", "PLC0415", "ARG"]
+
+[tool.ruff.lint.per-file-ignores]
+"webhook_server/tests/*" = ["ARG"]
+
[tool.ruff.format]
exclude = [".git", ".venv", ".mypy_cache", ".tox", "__pycache__"]
@@ -33,20 +39,11 @@ warn_redundant_casts = true
[tool.hatch.build.targets.wheel]
packages = ["webhook_server"]
-[tool.uv]
-dev-dependencies = [
- "ipdb>=0.13.13",
- "ipython>=8.12.3",
- "types-colorama>=0.4.15.20240311",
- "types-pyyaml>=6.0.12.20250516",
- "types-requests>=2.32.4.20250611",
-]
-
[project]
name = "github-webhook-server"
version = "3.0.7"
requires-python = ">=3.12"
-description = "A webhook server to manage Github reposotories and pull requests."
+description = "A webhook server to manage Github repositories and pull requests."
readme = "README.md"
license = "Apache-2.0"
classifiers = [
@@ -101,6 +98,11 @@ requires = ["hatchling"]
build-backend = "hatchling.build"
[dependency-groups]
-tests = [
- "psutil>=7.0.0",
+dev = [
+ "ipdb>=0.13.13",
+ "ipython>=8.12.3",
+ "types-colorama>=0.4.15.20240311",
+ "types-pyyaml>=6.0.12.20250516",
+ "types-requests>=2.32.4.20250611",
]
+tests = ["psutil>=7.0.0", "pytest-asyncio>=0.26.0", "pytest-xdist>=3.7.0"]
diff --git a/scripts/generate_changelog.py b/scripts/generate_changelog.py
index 31902492f..b3a0151ac 100644
--- a/scripts/generate_changelog.py
+++ b/scripts/generate_changelog.py
@@ -10,17 +10,22 @@ def json_line(line: str) -> dict:
Format str line to str that can be parsed with json.
In case line is not formatted for json for example:
- '{"title": "Revert "feat: Use git cliff to generate the change log. (#2322)" (#2324)", "commit": "137331fd", "author": "Meni Yakove", "date": "2025-02-16"}'
- title have `"` inside the external `"` `"Revert "feat: Use git cliff to generate the change log. (#2322)" (#2324)"`
+ '{"title": "Revert "feat: Use git cliff to generate the change log. (#2322)" '
+ '(#2324)", "commit": "137331fd", "author": "Meni Yakove", "date": "2025-02-16"}'
+ title have `"` inside the external `"` `"Revert "feat: Use git cliff to '
+ 'generate the change log. (#2322)" (#2324)"`
"""
try:
return json.loads(line)
except json.JSONDecodeError:
# split line like by `,`
- # '{"title": "Revert "feat: Use git cliff to generate the change log. (#2322)" (#2324)", "commit": "137331fd", "author": "Meni Yakove", "date": "2025-02-16"}'
+ # '{"title": "Revert "feat: Use git cliff to generate the change log. '
+ # '(#2322)" (#2324)", "commit": "137331fd", "author": "Meni Yakove", '
+ # '"date": "2025-02-16"}'
line_split = line.split(",")
- # Pop and save `title key` and `title body` from '{"title": "Revert "feat: Use git cliff to generate the change log. (#2322)" (#2324)"'
+ # Pop and save `title key` and `title body` from '{"title": "Revert '
+ # '"feat: Use git cliff to generate the change log. (#2322)" (#2324)"'
title_key, title_body = line_split.pop(0).split(":", 1)
if title_body.count('"') > 2:
diff --git a/uv.lock b/uv.lock
index ad78ff7d0..1618bb22e 100644
--- a/uv.lock
+++ b/uv.lock
@@ -317,76 +317,76 @@ wheels = [
[[package]]
name = "coverage"
-version = "7.11.0"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/1c/38/ee22495420457259d2f3390309505ea98f98a5eed40901cf62196abad006/coverage-7.11.0.tar.gz", hash = "sha256:167bd504ac1ca2af7ff3b81d245dfea0292c5032ebef9d66cc08a7d28c1b8050", size = 811905, upload-time = "2025-10-15T15:15:08.542Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/c4/db/86f6906a7c7edc1a52b2c6682d6dd9be775d73c0dfe2b84f8923dfea5784/coverage-7.11.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9c49e77811cf9d024b95faf86c3f059b11c0c9be0b0d61bc598f453703bd6fd1", size = 216098, upload-time = "2025-10-15T15:13:02.916Z" },
- { url = "https://files.pythonhosted.org/packages/21/54/e7b26157048c7ba555596aad8569ff903d6cd67867d41b75287323678ede/coverage-7.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a61e37a403a778e2cda2a6a39abcc895f1d984071942a41074b5c7ee31642007", size = 216331, upload-time = "2025-10-15T15:13:04.403Z" },
- { url = "https://files.pythonhosted.org/packages/b9/19/1ce6bf444f858b83a733171306134a0544eaddf1ca8851ede6540a55b2ad/coverage-7.11.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:c79cae102bb3b1801e2ef1511fb50e91ec83a1ce466b2c7c25010d884336de46", size = 247825, upload-time = "2025-10-15T15:13:05.92Z" },
- { url = "https://files.pythonhosted.org/packages/71/0b/d3bcbbc259fcced5fb67c5d78f6e7ee965f49760c14afd931e9e663a83b2/coverage-7.11.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:16ce17ceb5d211f320b62df002fa7016b7442ea0fd260c11cec8ce7730954893", size = 250573, upload-time = "2025-10-15T15:13:07.471Z" },
- { url = "https://files.pythonhosted.org/packages/58/8d/b0ff3641a320abb047258d36ed1c21d16be33beed4152628331a1baf3365/coverage-7.11.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:80027673e9d0bd6aef86134b0771845e2da85755cf686e7c7c59566cf5a89115", size = 251706, upload-time = "2025-10-15T15:13:09.4Z" },
- { url = "https://files.pythonhosted.org/packages/59/c8/5a586fe8c7b0458053d9c687f5cff515a74b66c85931f7fe17a1c958b4ac/coverage-7.11.0-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:4d3ffa07a08657306cd2215b0da53761c4d73cb54d9143b9303a6481ec0cd415", size = 248221, upload-time = "2025-10-15T15:13:10.964Z" },
- { url = "https://files.pythonhosted.org/packages/d0/ff/3a25e3132804ba44cfa9a778cdf2b73dbbe63ef4b0945e39602fc896ba52/coverage-7.11.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a3b6a5f8b2524fd6c1066bc85bfd97e78709bb5e37b5b94911a6506b65f47186", size = 249624, upload-time = "2025-10-15T15:13:12.5Z" },
- { url = "https://files.pythonhosted.org/packages/c5/12/ff10c8ce3895e1b17a73485ea79ebc1896a9e466a9d0f4aef63e0d17b718/coverage-7.11.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:fcc0a4aa589de34bc56e1a80a740ee0f8c47611bdfb28cd1849de60660f3799d", size = 247744, upload-time = "2025-10-15T15:13:14.554Z" },
- { url = "https://files.pythonhosted.org/packages/16/02/d500b91f5471b2975947e0629b8980e5e90786fe316b6d7299852c1d793d/coverage-7.11.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:dba82204769d78c3fd31b35c3d5f46e06511936c5019c39f98320e05b08f794d", size = 247325, upload-time = "2025-10-15T15:13:16.438Z" },
- { url = "https://files.pythonhosted.org/packages/77/11/dee0284fbbd9cd64cfce806b827452c6df3f100d9e66188e82dfe771d4af/coverage-7.11.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:81b335f03ba67309a95210caf3eb43bd6fe75a4e22ba653ef97b4696c56c7ec2", size = 249180, upload-time = "2025-10-15T15:13:17.959Z" },
- { url = "https://files.pythonhosted.org/packages/59/1b/cdf1def928f0a150a057cab03286774e73e29c2395f0d30ce3d9e9f8e697/coverage-7.11.0-cp312-cp312-win32.whl", hash = "sha256:037b2d064c2f8cc8716fe4d39cb705779af3fbf1ba318dc96a1af858888c7bb5", size = 218479, upload-time = "2025-10-15T15:13:19.608Z" },
- { url = "https://files.pythonhosted.org/packages/ff/55/e5884d55e031da9c15b94b90a23beccc9d6beee65e9835cd6da0a79e4f3a/coverage-7.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:d66c0104aec3b75e5fd897e7940188ea1892ca1d0235316bf89286d6a22568c0", size = 219290, upload-time = "2025-10-15T15:13:21.593Z" },
- { url = "https://files.pythonhosted.org/packages/23/a8/faa930cfc71c1d16bc78f9a19bb73700464f9c331d9e547bfbc1dbd3a108/coverage-7.11.0-cp312-cp312-win_arm64.whl", hash = "sha256:d91ebeac603812a09cf6a886ba6e464f3bbb367411904ae3790dfe28311b15ad", size = 217924, upload-time = "2025-10-15T15:13:23.39Z" },
- { url = "https://files.pythonhosted.org/packages/60/7f/85e4dfe65e400645464b25c036a26ac226cf3a69d4a50c3934c532491cdd/coverage-7.11.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:cc3f49e65ea6e0d5d9bd60368684fe52a704d46f9e7fc413918f18d046ec40e1", size = 216129, upload-time = "2025-10-15T15:13:25.371Z" },
- { url = "https://files.pythonhosted.org/packages/96/5d/dc5fa98fea3c175caf9d360649cb1aa3715e391ab00dc78c4c66fabd7356/coverage-7.11.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f39ae2f63f37472c17b4990f794035c9890418b1b8cca75c01193f3c8d3e01be", size = 216380, upload-time = "2025-10-15T15:13:26.976Z" },
- { url = "https://files.pythonhosted.org/packages/b2/f5/3da9cc9596708273385189289c0e4d8197d37a386bdf17619013554b3447/coverage-7.11.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:7db53b5cdd2917b6eaadd0b1251cf4e7d96f4a8d24e174bdbdf2f65b5ea7994d", size = 247375, upload-time = "2025-10-15T15:13:28.923Z" },
- { url = "https://files.pythonhosted.org/packages/65/6c/f7f59c342359a235559d2bc76b0c73cfc4bac7d61bb0df210965cb1ecffd/coverage-7.11.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:10ad04ac3a122048688387828b4537bc9cf60c0bf4869c1e9989c46e45690b82", size = 249978, upload-time = "2025-10-15T15:13:30.525Z" },
- { url = "https://files.pythonhosted.org/packages/e7/8c/042dede2e23525e863bf1ccd2b92689692a148d8b5fd37c37899ba882645/coverage-7.11.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4036cc9c7983a2b1f2556d574d2eb2154ac6ed55114761685657e38782b23f52", size = 251253, upload-time = "2025-10-15T15:13:32.174Z" },
- { url = "https://files.pythonhosted.org/packages/7b/a9/3c58df67bfa809a7bddd786356d9c5283e45d693edb5f3f55d0986dd905a/coverage-7.11.0-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:7ab934dd13b1c5e94b692b1e01bd87e4488cb746e3a50f798cb9464fd128374b", size = 247591, upload-time = "2025-10-15T15:13:34.147Z" },
- { url = "https://files.pythonhosted.org/packages/26/5b/c7f32efd862ee0477a18c41e4761305de6ddd2d49cdeda0c1116227570fd/coverage-7.11.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:59a6e5a265f7cfc05f76e3bb53eca2e0dfe90f05e07e849930fecd6abb8f40b4", size = 249411, upload-time = "2025-10-15T15:13:38.425Z" },
- { url = "https://files.pythonhosted.org/packages/76/b5/78cb4f1e86c1611431c990423ec0768122905b03837e1b4c6a6f388a858b/coverage-7.11.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:df01d6c4c81e15a7c88337b795bb7595a8596e92310266b5072c7e301168efbd", size = 247303, upload-time = "2025-10-15T15:13:40.464Z" },
- { url = "https://files.pythonhosted.org/packages/87/c9/23c753a8641a330f45f221286e707c427e46d0ffd1719b080cedc984ec40/coverage-7.11.0-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:8c934bd088eed6174210942761e38ee81d28c46de0132ebb1801dbe36a390dcc", size = 247157, upload-time = "2025-10-15T15:13:42.087Z" },
- { url = "https://files.pythonhosted.org/packages/c5/42/6e0cc71dc8a464486e944a4fa0d85bdec031cc2969e98ed41532a98336b9/coverage-7.11.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5a03eaf7ec24078ad64a07f02e30060aaf22b91dedf31a6b24d0d98d2bba7f48", size = 248921, upload-time = "2025-10-15T15:13:43.715Z" },
- { url = "https://files.pythonhosted.org/packages/e8/1c/743c2ef665e6858cccb0f84377dfe3a4c25add51e8c7ef19249be92465b6/coverage-7.11.0-cp313-cp313-win32.whl", hash = "sha256:695340f698a5f56f795b2836abe6fb576e7c53d48cd155ad2f80fd24bc63a040", size = 218526, upload-time = "2025-10-15T15:13:45.336Z" },
- { url = "https://files.pythonhosted.org/packages/ff/d5/226daadfd1bf8ddbccefbd3aa3547d7b960fb48e1bdac124e2dd13a2b71a/coverage-7.11.0-cp313-cp313-win_amd64.whl", hash = "sha256:2727d47fce3ee2bac648528e41455d1b0c46395a087a229deac75e9f88ba5a05", size = 219317, upload-time = "2025-10-15T15:13:47.401Z" },
- { url = "https://files.pythonhosted.org/packages/97/54/47db81dcbe571a48a298f206183ba8a7ba79200a37cd0d9f4788fcd2af4a/coverage-7.11.0-cp313-cp313-win_arm64.whl", hash = "sha256:0efa742f431529699712b92ecdf22de8ff198df41e43aeaaadf69973eb93f17a", size = 217948, upload-time = "2025-10-15T15:13:49.096Z" },
- { url = "https://files.pythonhosted.org/packages/e5/8b/cb68425420154e7e2a82fd779a8cc01549b6fa83c2ad3679cd6c088ebd07/coverage-7.11.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:587c38849b853b157706407e9ebdca8fd12f45869edb56defbef2daa5fb0812b", size = 216837, upload-time = "2025-10-15T15:13:51.09Z" },
- { url = "https://files.pythonhosted.org/packages/33/55/9d61b5765a025685e14659c8d07037247de6383c0385757544ffe4606475/coverage-7.11.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b971bdefdd75096163dd4261c74be813c4508477e39ff7b92191dea19f24cd37", size = 217061, upload-time = "2025-10-15T15:13:52.747Z" },
- { url = "https://files.pythonhosted.org/packages/52/85/292459c9186d70dcec6538f06ea251bc968046922497377bf4a1dc9a71de/coverage-7.11.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:269bfe913b7d5be12ab13a95f3a76da23cf147be7fa043933320ba5625f0a8de", size = 258398, upload-time = "2025-10-15T15:13:54.45Z" },
- { url = "https://files.pythonhosted.org/packages/1f/e2/46edd73fb8bf51446c41148d81944c54ed224854812b6ca549be25113ee0/coverage-7.11.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:dadbcce51a10c07b7c72b0ce4a25e4b6dcb0c0372846afb8e5b6307a121eb99f", size = 260574, upload-time = "2025-10-15T15:13:56.145Z" },
- { url = "https://files.pythonhosted.org/packages/07/5e/1df469a19007ff82e2ca8fe509822820a31e251f80ee7344c34f6cd2ec43/coverage-7.11.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9ed43fa22c6436f7957df036331f8fe4efa7af132054e1844918866cd228af6c", size = 262797, upload-time = "2025-10-15T15:13:58.635Z" },
- { url = "https://files.pythonhosted.org/packages/f9/50/de216b31a1434b94d9b34a964c09943c6be45069ec704bfc379d8d89a649/coverage-7.11.0-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:9516add7256b6713ec08359b7b05aeff8850c98d357784c7205b2e60aa2513fa", size = 257361, upload-time = "2025-10-15T15:14:00.409Z" },
- { url = "https://files.pythonhosted.org/packages/82/1e/3f9f8344a48111e152e0fd495b6fff13cc743e771a6050abf1627a7ba918/coverage-7.11.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:eb92e47c92fcbcdc692f428da67db33337fa213756f7adb6a011f7b5a7a20740", size = 260349, upload-time = "2025-10-15T15:14:02.188Z" },
- { url = "https://files.pythonhosted.org/packages/65/9b/3f52741f9e7d82124272f3070bbe316006a7de1bad1093f88d59bfc6c548/coverage-7.11.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:d06f4fc7acf3cabd6d74941d53329e06bab00a8fe10e4df2714f0b134bfc64ef", size = 258114, upload-time = "2025-10-15T15:14:03.907Z" },
- { url = "https://files.pythonhosted.org/packages/0b/8b/918f0e15f0365d50d3986bbd3338ca01178717ac5678301f3f547b6619e6/coverage-7.11.0-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:6fbcee1a8f056af07ecd344482f711f563a9eb1c2cad192e87df00338ec3cdb0", size = 256723, upload-time = "2025-10-15T15:14:06.324Z" },
- { url = "https://files.pythonhosted.org/packages/44/9e/7776829f82d3cf630878a7965a7d70cc6ca94f22c7d20ec4944f7148cb46/coverage-7.11.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:dbbf012be5f32533a490709ad597ad8a8ff80c582a95adc8d62af664e532f9ca", size = 259238, upload-time = "2025-10-15T15:14:08.002Z" },
- { url = "https://files.pythonhosted.org/packages/9a/b8/49cf253e1e7a3bedb85199b201862dd7ca4859f75b6cf25ffa7298aa0760/coverage-7.11.0-cp313-cp313t-win32.whl", hash = "sha256:cee6291bb4fed184f1c2b663606a115c743df98a537c969c3c64b49989da96c2", size = 219180, upload-time = "2025-10-15T15:14:09.786Z" },
- { url = "https://files.pythonhosted.org/packages/ac/e1/1a541703826be7ae2125a0fb7f821af5729d56bb71e946e7b933cc7a89a4/coverage-7.11.0-cp313-cp313t-win_amd64.whl", hash = "sha256:a386c1061bf98e7ea4758e4313c0ab5ecf57af341ef0f43a0bf26c2477b5c268", size = 220241, upload-time = "2025-10-15T15:14:11.471Z" },
- { url = "https://files.pythonhosted.org/packages/d5/d1/5ee0e0a08621140fd418ec4020f595b4d52d7eb429ae6a0c6542b4ba6f14/coverage-7.11.0-cp313-cp313t-win_arm64.whl", hash = "sha256:f9ea02ef40bb83823b2b04964459d281688fe173e20643870bb5d2edf68bc836", size = 218510, upload-time = "2025-10-15T15:14:13.46Z" },
- { url = "https://files.pythonhosted.org/packages/f4/06/e923830c1985ce808e40a3fa3eb46c13350b3224b7da59757d37b6ce12b8/coverage-7.11.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:c770885b28fb399aaf2a65bbd1c12bf6f307ffd112d6a76c5231a94276f0c497", size = 216110, upload-time = "2025-10-15T15:14:15.157Z" },
- { url = "https://files.pythonhosted.org/packages/42/82/cdeed03bfead45203fb651ed756dfb5266028f5f939e7f06efac4041dad5/coverage-7.11.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:a3d0e2087dba64c86a6b254f43e12d264b636a39e88c5cc0a01a7c71bcfdab7e", size = 216395, upload-time = "2025-10-15T15:14:16.863Z" },
- { url = "https://files.pythonhosted.org/packages/fc/ba/e1c80caffc3199aa699813f73ff097bc2df7b31642bdbc7493600a8f1de5/coverage-7.11.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:73feb83bb41c32811973b8565f3705caf01d928d972b72042b44e97c71fd70d1", size = 247433, upload-time = "2025-10-15T15:14:18.589Z" },
- { url = "https://files.pythonhosted.org/packages/80/c0/5b259b029694ce0a5bbc1548834c7ba3db41d3efd3474489d7efce4ceb18/coverage-7.11.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:c6f31f281012235ad08f9a560976cc2fc9c95c17604ff3ab20120fe480169bca", size = 249970, upload-time = "2025-10-15T15:14:20.307Z" },
- { url = "https://files.pythonhosted.org/packages/8c/86/171b2b5e1aac7e2fd9b43f7158b987dbeb95f06d1fbecad54ad8163ae3e8/coverage-7.11.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e9570ad567f880ef675673992222746a124b9595506826b210fbe0ce3f0499cd", size = 251324, upload-time = "2025-10-15T15:14:22.419Z" },
- { url = "https://files.pythonhosted.org/packages/1a/7e/7e10414d343385b92024af3932a27a1caf75c6e27ee88ba211221ff1a145/coverage-7.11.0-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:8badf70446042553a773547a61fecaa734b55dc738cacf20c56ab04b77425e43", size = 247445, upload-time = "2025-10-15T15:14:24.205Z" },
- { url = "https://files.pythonhosted.org/packages/c4/3b/e4f966b21f5be8c4bf86ad75ae94efa0de4c99c7bbb8114476323102e345/coverage-7.11.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:a09c1211959903a479e389685b7feb8a17f59ec5a4ef9afde7650bd5eabc2777", size = 249324, upload-time = "2025-10-15T15:14:26.234Z" },
- { url = "https://files.pythonhosted.org/packages/00/a2/8479325576dfcd909244d0df215f077f47437ab852ab778cfa2f8bf4d954/coverage-7.11.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:5ef83b107f50db3f9ae40f69e34b3bd9337456c5a7fe3461c7abf8b75dd666a2", size = 247261, upload-time = "2025-10-15T15:14:28.42Z" },
- { url = "https://files.pythonhosted.org/packages/7b/d8/3a9e2db19d94d65771d0f2e21a9ea587d11b831332a73622f901157cc24b/coverage-7.11.0-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:f91f927a3215b8907e214af77200250bb6aae36eca3f760f89780d13e495388d", size = 247092, upload-time = "2025-10-15T15:14:30.784Z" },
- { url = "https://files.pythonhosted.org/packages/b3/b1/bbca3c472544f9e2ad2d5116b2379732957048be4b93a9c543fcd0207e5f/coverage-7.11.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:cdbcd376716d6b7fbfeedd687a6c4be019c5a5671b35f804ba76a4c0a778cba4", size = 248755, upload-time = "2025-10-15T15:14:32.585Z" },
- { url = "https://files.pythonhosted.org/packages/89/49/638d5a45a6a0f00af53d6b637c87007eb2297042186334e9923a61aa8854/coverage-7.11.0-cp314-cp314-win32.whl", hash = "sha256:bab7ec4bb501743edc63609320aaec8cd9188b396354f482f4de4d40a9d10721", size = 218793, upload-time = "2025-10-15T15:14:34.972Z" },
- { url = "https://files.pythonhosted.org/packages/30/cc/b675a51f2d068adb3cdf3799212c662239b0ca27f4691d1fff81b92ea850/coverage-7.11.0-cp314-cp314-win_amd64.whl", hash = "sha256:3d4ba9a449e9364a936a27322b20d32d8b166553bfe63059bd21527e681e2fad", size = 219587, upload-time = "2025-10-15T15:14:37.047Z" },
- { url = "https://files.pythonhosted.org/packages/93/98/5ac886876026de04f00820e5094fe22166b98dcb8b426bf6827aaf67048c/coverage-7.11.0-cp314-cp314-win_arm64.whl", hash = "sha256:ce37f215223af94ef0f75ac68ea096f9f8e8c8ec7d6e8c346ee45c0d363f0479", size = 218168, upload-time = "2025-10-15T15:14:38.861Z" },
- { url = "https://files.pythonhosted.org/packages/14/d1/b4145d35b3e3ecf4d917e97fc8895bcf027d854879ba401d9ff0f533f997/coverage-7.11.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:f413ce6e07e0d0dc9c433228727b619871532674b45165abafe201f200cc215f", size = 216850, upload-time = "2025-10-15T15:14:40.651Z" },
- { url = "https://files.pythonhosted.org/packages/ca/d1/7f645fc2eccd318369a8a9948acc447bb7c1ade2911e31d3c5620544c22b/coverage-7.11.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:05791e528a18f7072bf5998ba772fe29db4da1234c45c2087866b5ba4dea710e", size = 217071, upload-time = "2025-10-15T15:14:42.755Z" },
- { url = "https://files.pythonhosted.org/packages/54/7d/64d124649db2737ceced1dfcbdcb79898d5868d311730f622f8ecae84250/coverage-7.11.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:cacb29f420cfeb9283b803263c3b9a068924474ff19ca126ba9103e1278dfa44", size = 258570, upload-time = "2025-10-15T15:14:44.542Z" },
- { url = "https://files.pythonhosted.org/packages/6c/3f/6f5922f80dc6f2d8b2c6f974835c43f53eb4257a7797727e6ca5b7b2ec1f/coverage-7.11.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:314c24e700d7027ae3ab0d95fbf8d53544fca1f20345fd30cd219b737c6e58d3", size = 260738, upload-time = "2025-10-15T15:14:46.436Z" },
- { url = "https://files.pythonhosted.org/packages/0e/5f/9e883523c4647c860b3812b417a2017e361eca5b635ee658387dc11b13c1/coverage-7.11.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:630d0bd7a293ad2fc8b4b94e5758c8b2536fdf36c05f1681270203e463cbfa9b", size = 262994, upload-time = "2025-10-15T15:14:48.3Z" },
- { url = "https://files.pythonhosted.org/packages/07/bb/43b5a8e94c09c8bf51743ffc65c4c841a4ca5d3ed191d0a6919c379a1b83/coverage-7.11.0-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e89641f5175d65e2dbb44db15fe4ea48fade5d5bbb9868fdc2b4fce22f4a469d", size = 257282, upload-time = "2025-10-15T15:14:50.236Z" },
- { url = "https://files.pythonhosted.org/packages/aa/e5/0ead8af411411330b928733e1d201384b39251a5f043c1612970310e8283/coverage-7.11.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:c9f08ea03114a637dab06cedb2e914da9dc67fa52c6015c018ff43fdde25b9c2", size = 260430, upload-time = "2025-10-15T15:14:52.413Z" },
- { url = "https://files.pythonhosted.org/packages/ae/66/03dd8bb0ba5b971620dcaac145461950f6d8204953e535d2b20c6b65d729/coverage-7.11.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:ce9f3bde4e9b031eaf1eb61df95c1401427029ea1bfddb8621c1161dcb0fa02e", size = 258190, upload-time = "2025-10-15T15:14:54.268Z" },
- { url = "https://files.pythonhosted.org/packages/45/ae/28a9cce40bf3174426cb2f7e71ee172d98e7f6446dff936a7ccecee34b14/coverage-7.11.0-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:e4dc07e95495923d6fd4d6c27bf70769425b71c89053083843fd78f378558996", size = 256658, upload-time = "2025-10-15T15:14:56.436Z" },
- { url = "https://files.pythonhosted.org/packages/5c/7c/3a44234a8599513684bfc8684878fd7b126c2760f79712bb78c56f19efc4/coverage-7.11.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:424538266794db2861db4922b05d729ade0940ee69dcf0591ce8f69784db0e11", size = 259342, upload-time = "2025-10-15T15:14:58.538Z" },
- { url = "https://files.pythonhosted.org/packages/e1/e6/0108519cba871af0351725ebdb8660fd7a0fe2ba3850d56d32490c7d9b4b/coverage-7.11.0-cp314-cp314t-win32.whl", hash = "sha256:4c1eeb3fb8eb9e0190bebafd0462936f75717687117339f708f395fe455acc73", size = 219568, upload-time = "2025-10-15T15:15:00.382Z" },
- { url = "https://files.pythonhosted.org/packages/c9/76/44ba876e0942b4e62fdde23ccb029ddb16d19ba1bef081edd00857ba0b16/coverage-7.11.0-cp314-cp314t-win_amd64.whl", hash = "sha256:b56efee146c98dbf2cf5cffc61b9829d1e94442df4d7398b26892a53992d3547", size = 220687, upload-time = "2025-10-15T15:15:02.322Z" },
- { url = "https://files.pythonhosted.org/packages/b9/0c/0df55ecb20d0d0ed5c322e10a441775e1a3a5d78c60f0c4e1abfe6fcf949/coverage-7.11.0-cp314-cp314t-win_arm64.whl", hash = "sha256:b5c2705afa83f49bd91962a4094b6b082f94aef7626365ab3f8f4bd159c5acf3", size = 218711, upload-time = "2025-10-15T15:15:04.575Z" },
- { url = "https://files.pythonhosted.org/packages/5f/04/642c1d8a448ae5ea1369eac8495740a79eb4e581a9fb0cbdce56bbf56da1/coverage-7.11.0-py3-none-any.whl", hash = "sha256:4b7589765348d78fb4e5fb6ea35d07564e387da2fc5efff62e0222971f155f68", size = 207761, upload-time = "2025-10-15T15:15:06.439Z" },
+version = "7.11.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/89/12/3e2d2ec71796e0913178478e693a06af6a3bc9f7f9cb899bf85a426d8370/coverage-7.11.1.tar.gz", hash = "sha256:b4b3a072559578129a9e863082a2972a2abd8975bc0e2ec57da96afcd6580a8a", size = 814037, upload-time = "2025-11-07T10:52:41.067Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/0f/31/04af7e42fdb3681e4d73d37bf3f375f0488aa38d1001ee746c7dbfe09643/coverage-7.11.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:421e2d237dcecdefa9b77cae1aa0dfff5c495f29e053e776172457e289976311", size = 216896, upload-time = "2025-11-07T10:50:31.429Z" },
+ { url = "https://files.pythonhosted.org/packages/f5/e9/1c3628a1225bdea66295a117cd2bb1d324d9c433c40078b24d50f55448a7/coverage-7.11.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:08ef89c812072ecd52a862b46e131f75596475d23cc7f5a75410394341d4332f", size = 217261, upload-time = "2025-11-07T10:50:33.008Z" },
+ { url = "https://files.pythonhosted.org/packages/2b/80/4d4f943da23c432b2bba8664f4eada9b19911081852e8cc89776c61d0b94/coverage-7.11.1-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:bc6e0b2d6ed317810b4e435ffabc31b2d517d6ceb4183dfd6af4748c52d170eb", size = 248742, upload-time = "2025-11-07T10:50:34.634Z" },
+ { url = "https://files.pythonhosted.org/packages/e3/e1/c4b42f02fbb6ce08e05d7a2b26bcf5df11d3e67a3806e40415f7ab9511e7/coverage-7.11.1-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b59736704df8b1f8b1dafb36b16f2ef8a952e4410465634442459426bd2319ae", size = 251503, upload-time = "2025-11-07T10:50:36.501Z" },
+ { url = "https://files.pythonhosted.org/packages/31/a8/3df60e88f1dabccae4994c6df4a2f23d4cd0eee27fc3ae8f0bb2e78cb538/coverage-7.11.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:843816452d8bfc4c2be72546b3b382850cb91150feaa963ec7d2b665ec9d4768", size = 252590, upload-time = "2025-11-07T10:50:38.059Z" },
+ { url = "https://files.pythonhosted.org/packages/06/1c/2b9fae11361b0348c2d3612a8179d2cc8b6b245e8b14d5479c75b9f18613/coverage-7.11.1-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:19363046125d4a423c25d3d7c90bab3a0230932c16014198f87a6b3960c1b187", size = 249133, upload-time = "2025-11-07T10:50:39.648Z" },
+ { url = "https://files.pythonhosted.org/packages/b8/2b/e33712a8eede02762a536bdc2f89e736e0ad87bd13b35d724306585aeb54/coverage-7.11.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4e37486aed7045c280ebdc207026bdef9267730177d929a5e25250e1f33cc125", size = 250524, upload-time = "2025-11-07T10:50:41.59Z" },
+ { url = "https://files.pythonhosted.org/packages/84/c9/6181877977a0f6e46b9c93a8382b8c671769fb12df8a15be8d6091541b77/coverage-7.11.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:7c68180e67b4843674bfb1d3ec928ffcfc94081b5da959e616405eca51c23356", size = 248673, upload-time = "2025-11-07T10:50:43.153Z" },
+ { url = "https://files.pythonhosted.org/packages/9b/d6/ff26c2eb57d4dcd46c6ed136d6b04aceb7f58f48dcc500c77f7194711a6f/coverage-7.11.1-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:cf825b60f94d1706c22d4887310db26cc3117d545ac6ad4229b4a0d718afcf9a", size = 248251, upload-time = "2025-11-07T10:50:45.069Z" },
+ { url = "https://files.pythonhosted.org/packages/f9/ff/411803f1fcb9efe00afbc96442564cc691f537541a8bde377cf1ac04e695/coverage-7.11.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:437149272ff0440df66044bd6ee87cbc252463754ca43cafa496cfb2f57f56dd", size = 250111, upload-time = "2025-11-07T10:50:46.701Z" },
+ { url = "https://files.pythonhosted.org/packages/c1/9f/781c045e1e5f8930f8266f224318040413b60837749d2ed11883b7478c81/coverage-7.11.1-cp312-cp312-win32.whl", hash = "sha256:98ea0b8d1addfc333494c2248af367e8ecb27724a99804a18376b801f876da58", size = 219407, upload-time = "2025-11-07T10:50:48.862Z" },
+ { url = "https://files.pythonhosted.org/packages/26/59/813d8eedc96a781e8a6f9c37f6ecb4326ebbffdafe2e1154ed2def468b76/coverage-7.11.1-cp312-cp312-win_amd64.whl", hash = "sha256:7d49a473799e55a465bcadd19525977ab80031b8b86baaa622241808df4585cd", size = 220220, upload-time = "2025-11-07T10:50:51.576Z" },
+ { url = "https://files.pythonhosted.org/packages/63/5f/c0905d9159d38194943a21d7d013f1c2f0c43e7d63f680ed56269728418a/coverage-7.11.1-cp312-cp312-win_arm64.whl", hash = "sha256:0c77e5951ab176a6ccb70c6f688fca2a7ac834753ba82ee4eb741be655f30b43", size = 218856, upload-time = "2025-11-07T10:50:53.591Z" },
+ { url = "https://files.pythonhosted.org/packages/f4/01/0c50c318f5e8f1a482da05d788d0ff06137803ed8fface4a1ba51e04b3ad/coverage-7.11.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:da9930594ca99d66eb6f613d7beba850db2f8dfa86810ee35ae24e4d5f2bb97d", size = 216920, upload-time = "2025-11-07T10:50:55.992Z" },
+ { url = "https://files.pythonhosted.org/packages/20/11/9f038e6c2baea968c377ab355b0d1d0a46b5f38985691bf51164e1b78c1f/coverage-7.11.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cc47a280dc014220b0fc6e5f55082a3f51854faf08fd9635b8a4f341c46c77d3", size = 217301, upload-time = "2025-11-07T10:50:57.609Z" },
+ { url = "https://files.pythonhosted.org/packages/68/cd/9dcf93d81d0cddaa0bba90c3b4580e6f1ddf833918b816930d250cc553a4/coverage-7.11.1-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:74003324321bbf130939146886eddf92e48e616b5910215e79dea6edeb8ee7c8", size = 248277, upload-time = "2025-11-07T10:50:59.442Z" },
+ { url = "https://files.pythonhosted.org/packages/11/f5/b2c7c494046c9c783d3cac4c812fc24d6104dd36a7a598e7dd6fea3e7927/coverage-7.11.1-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:211f7996265daab60a8249af4ca6641b3080769cbedcffc42cc4841118f3a305", size = 250871, upload-time = "2025-11-07T10:51:01.094Z" },
+ { url = "https://files.pythonhosted.org/packages/a5/5a/b359649566954498aa17d7c98093182576d9e435ceb4ea917b3b48d56f86/coverage-7.11.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:70619d194d8fea0cb028cb6bb9c85b519c7509c1d1feef1eea635183bc8ecd27", size = 252115, upload-time = "2025-11-07T10:51:03.087Z" },
+ { url = "https://files.pythonhosted.org/packages/f3/17/3cef1ede3739622950f0737605353b797ec564e70c9d254521b10f4b03ba/coverage-7.11.1-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e0208bb59d441cfa3321569040f8e455f9261256e0df776c5462a1e5a9b31e13", size = 248442, upload-time = "2025-11-07T10:51:04.888Z" },
+ { url = "https://files.pythonhosted.org/packages/5f/63/d5854c47ae42d9d18855329db6bc528f5b7f4f874257edb00cf8b483f9f8/coverage-7.11.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:545714d8765bda1c51f8b1c96e0b497886a054471c68211e76ef49dd1468587d", size = 250253, upload-time = "2025-11-07T10:51:06.515Z" },
+ { url = "https://files.pythonhosted.org/packages/48/e8/c7706f8a5358a59c18b489e7e19e83d6161b7c8bc60771f95920570c94a8/coverage-7.11.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d0a2b02c1e20158dd405054bcca87f91fd5b7605626aee87150819ea616edd67", size = 248217, upload-time = "2025-11-07T10:51:08.405Z" },
+ { url = "https://files.pythonhosted.org/packages/5b/c9/a2136dfb168eb09e2f6d9d6b6c986243fdc0b3866a9376adb263d3c3378b/coverage-7.11.1-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:e0f4aa986a4308a458e0fb572faa3eb3db2ea7ce294604064b25ab32b435a468", size = 248040, upload-time = "2025-11-07T10:51:10.626Z" },
+ { url = "https://files.pythonhosted.org/packages/18/9a/a63991c0608ddc6adf65e6f43124951aaf36bd79f41937b028120b8268ea/coverage-7.11.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d51cc6687e8bbfd1e041f52baed0f979cd592242cf50bf18399a7e03afc82d88", size = 249801, upload-time = "2025-11-07T10:51:12.63Z" },
+ { url = "https://files.pythonhosted.org/packages/84/19/947acf7c0c6e90e4ec3abf474133ed36d94407d07e36eafdfd3acb59fee9/coverage-7.11.1-cp313-cp313-win32.whl", hash = "sha256:1b3067db3afe6deeca2b2c9f0ec23820d5f1bd152827acfadf24de145dfc5f66", size = 219430, upload-time = "2025-11-07T10:51:14.329Z" },
+ { url = "https://files.pythonhosted.org/packages/35/54/36fef7afb3884450c7b6d494fcabe2fab7c669d547c800ca30f41c1dc212/coverage-7.11.1-cp313-cp313-win_amd64.whl", hash = "sha256:39a4c44b0cd40e3c9d89b2b7303ebd6ab9ae8a63f9e9a8c4d65a181a0b33aebe", size = 220239, upload-time = "2025-11-07T10:51:16.418Z" },
+ { url = "https://files.pythonhosted.org/packages/d3/dc/7d38bb99e8e69200b7dd5de15507226bd90eac102dfc7cc891b9934cdc76/coverage-7.11.1-cp313-cp313-win_arm64.whl", hash = "sha256:a2e3560bf82fa8169a577e054cbbc29888699526063fee26ea59ea2627fd6e73", size = 218868, upload-time = "2025-11-07T10:51:18.186Z" },
+ { url = "https://files.pythonhosted.org/packages/36/c6/d1ff54fbd6bcad42dbcfd13b417e636ef84aae194353b1ef3361700f2525/coverage-7.11.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:47a4f362a10285897ab3aa7a4b37d28213a4f2626823923613d6d7a3584dd79a", size = 217615, upload-time = "2025-11-07T10:51:21.065Z" },
+ { url = "https://files.pythonhosted.org/packages/73/f9/6ed59e7cf1488d6f975e5b14ef836f5e537913523e92175135f8518a83ce/coverage-7.11.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:0df35fa7419ef571db9dacd50b0517bc54dbfe37eb94043b5fc3540bff276acd", size = 217960, upload-time = "2025-11-07T10:51:22.797Z" },
+ { url = "https://files.pythonhosted.org/packages/c4/74/2dab1dc2ebe16f074f80ae483b0f45faf278d102be703ac01b32cd85b6c3/coverage-7.11.1-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:e1a2c621d341c9d56f7917e56fbb56be4f73fe0d0e8dae28352fb095060fd467", size = 259262, upload-time = "2025-11-07T10:51:24.467Z" },
+ { url = "https://files.pythonhosted.org/packages/15/49/eccfe039663e29a50a54b0c2c8d076acd174d7ac50d018ef8a5b1c37c8dc/coverage-7.11.1-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:6c354b111be9b2234d9573d75dd30ca4e414b7659c730e477e89be4f620b3fb5", size = 261326, upload-time = "2025-11-07T10:51:26.232Z" },
+ { url = "https://files.pythonhosted.org/packages/f0/bb/2b829aa23fd5ee8318e33cc02a606eb09900921291497963adc3f06af8bb/coverage-7.11.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4589bd44698728f600233fb2881014c9b8ec86637ef454c00939e779661dbe7e", size = 263758, upload-time = "2025-11-07T10:51:27.912Z" },
+ { url = "https://files.pythonhosted.org/packages/ac/03/d44c3d70e5da275caf2cad2071da6b425412fbcb1d1d5a81f1f89b45e3f1/coverage-7.11.1-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c6956fc8754f2309131230272a7213a483a32ecbe29e2b9316d808a28f2f8ea1", size = 258444, upload-time = "2025-11-07T10:51:30.107Z" },
+ { url = "https://files.pythonhosted.org/packages/a9/c1/cf61d9f46ae088774c65dd3387a15dfbc72de90c1f6e105025e9eda19b42/coverage-7.11.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:63926a97ed89dc6a087369b92dcb8b9a94cead46c08b33a7f1f4818cd8b6a3c3", size = 261335, upload-time = "2025-11-07T10:51:31.814Z" },
+ { url = "https://files.pythonhosted.org/packages/95/9a/b3299bb14f11f2364d78a2b9704491b15395e757af6116694731ce4e5834/coverage-7.11.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:f5311ba00c53a7fb2b293fdc1f478b7286fe2a845a7ba9cda053f6e98178f0b4", size = 258951, upload-time = "2025-11-07T10:51:33.925Z" },
+ { url = "https://files.pythonhosted.org/packages/3f/a3/73cb2763e59f14ba6d8d6444b1f640a9be2242bfb59b7e50581c695db7ff/coverage-7.11.1-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:31bf5ffad84c974f9e72ac53493350f36b6fa396109159ec704210698f12860b", size = 257840, upload-time = "2025-11-07T10:51:36.092Z" },
+ { url = "https://files.pythonhosted.org/packages/85/db/482e72589a952027e238ffa3a15f192c552e0685fd0c5220ad05b5f17d56/coverage-7.11.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:227ee59fbc4a8c57a7383a1d7af6ca94a78ae3beee4045f38684548a8479a65b", size = 260040, upload-time = "2025-11-07T10:51:38.277Z" },
+ { url = "https://files.pythonhosted.org/packages/18/a1/b931d3ee099c2dca8e9ea56c07ae84c0f91562f7bbbcccab8c91b3474ef1/coverage-7.11.1-cp313-cp313t-win32.whl", hash = "sha256:a447d97b3ce680bb1da2e6bd822ebb71be6a1fb77ce2c2ad2fe4bd8aacec3058", size = 220102, upload-time = "2025-11-07T10:51:40.017Z" },
+ { url = "https://files.pythonhosted.org/packages/9a/53/b553b7bfa6207def4918f0cb72884c844fa4c3f1566e58fbb4f34e54cdc5/coverage-7.11.1-cp313-cp313t-win_amd64.whl", hash = "sha256:d6d11180437c67bde2248563a42b8e5bbf85c8df78fae13bf818ad17bfb15f02", size = 221166, upload-time = "2025-11-07T10:51:41.921Z" },
+ { url = "https://files.pythonhosted.org/packages/6b/45/1c1d58b3ed585598764bd2fe41fcf60ccafe15973ad621c322ba52e22d32/coverage-7.11.1-cp313-cp313t-win_arm64.whl", hash = "sha256:1e19a4c43d612760c6f7190411fb157e2d8a6dde00c91b941d43203bd3b17f6f", size = 219439, upload-time = "2025-11-07T10:51:43.753Z" },
+ { url = "https://files.pythonhosted.org/packages/d9/c2/ac2c3417eaa4de1361036ebbc7da664242b274b2e00c4b4a1cfc7b29920b/coverage-7.11.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:0305463c45c5f21f0396cd5028de92b1f1387e2e0756a85dd3147daa49f7a674", size = 216967, upload-time = "2025-11-07T10:51:45.55Z" },
+ { url = "https://files.pythonhosted.org/packages/5e/a3/afef455d03c468ee303f9df9a6f407e8bea64cd576fca914ff888faf52ca/coverage-7.11.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:fa4d468d5efa1eb6e3062be8bd5f45cbf28257a37b71b969a8c1da2652dfec77", size = 217298, upload-time = "2025-11-07T10:51:47.31Z" },
+ { url = "https://files.pythonhosted.org/packages/9d/59/6e2fb3fb58637001132dc32228b4fb5b332d75d12f1353cb00fe084ee0ba/coverage-7.11.1-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d2b2f5fc8fe383cbf2d5c77d6c4b2632ede553bc0afd0cdc910fa5390046c290", size = 248337, upload-time = "2025-11-07T10:51:49.48Z" },
+ { url = "https://files.pythonhosted.org/packages/1d/5e/ce442bab963e3388658da8bde6ddbd0a15beda230afafaa25e3c487dc391/coverage-7.11.1-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bde6488c1ad509f4fb1a4f9960fd003d5a94adef61e226246f9699befbab3276", size = 250853, upload-time = "2025-11-07T10:51:51.215Z" },
+ { url = "https://files.pythonhosted.org/packages/d1/2f/43f94557924ca9b64e09f1c3876da4eec44a05a41e27b8a639d899716c0e/coverage-7.11.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a69e0d6fa0b920fe6706a898c52955ec5bcfa7e45868215159f45fd87ea6da7c", size = 252190, upload-time = "2025-11-07T10:51:53.262Z" },
+ { url = "https://files.pythonhosted.org/packages/8c/fa/a04e769b92bc5628d4bd909dcc3c8219efe5e49f462e29adc43e198ecfde/coverage-7.11.1-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:976e51e4a549b80e4639eda3a53e95013a14ff6ad69bb58ed604d34deb0e774c", size = 248335, upload-time = "2025-11-07T10:51:55.388Z" },
+ { url = "https://files.pythonhosted.org/packages/99/d0/b98ab5d2abe425c71117a7c690ead697a0b32b83256bf0f566c726b7f77b/coverage-7.11.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:d61fcc4d384c82971a3d9cf00d0872881f9ded19404c714d6079b7a4547e2955", size = 250209, upload-time = "2025-11-07T10:51:57.263Z" },
+ { url = "https://files.pythonhosted.org/packages/9c/3f/b9c4fbd2e6d1b64098f99fb68df7f7c1b3e0a0968d24025adb24f359cdec/coverage-7.11.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:284c5df762b533fae3ebd764e3b81c20c1c9648d93ef34469759cb4e3dfe13d0", size = 248163, upload-time = "2025-11-07T10:51:59.014Z" },
+ { url = "https://files.pythonhosted.org/packages/08/fc/3e4d54fb6368b0628019eefd897fc271badbd025410fd5421a65fb58758f/coverage-7.11.1-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:bab32cb1d4ad2ac6dcc4e17eee5fa136c2a1d14ae914e4bce6c8b78273aece3c", size = 247983, upload-time = "2025-11-07T10:52:01.027Z" },
+ { url = "https://files.pythonhosted.org/packages/b9/4a/a5700764a12e932b35afdddb2f59adbca289c1689455d06437f609f3ef35/coverage-7.11.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:36f2fed9ce392ca450fb4e283900d0b41f05c8c5db674d200f471498be3ce747", size = 249646, upload-time = "2025-11-07T10:52:02.856Z" },
+ { url = "https://files.pythonhosted.org/packages/0e/2c/45ed33d9e80a1cc9b44b4bd535d44c154d3204671c65abd90ec1e99522a2/coverage-7.11.1-cp314-cp314-win32.whl", hash = "sha256:853136cecb92a5ba1cc8f61ec6ffa62ca3c88b4b386a6c835f8b833924f9a8c5", size = 219700, upload-time = "2025-11-07T10:52:05.05Z" },
+ { url = "https://files.pythonhosted.org/packages/90/d7/5845597360f6434af1290118ebe114642865f45ce47e7e822d9c07b371be/coverage-7.11.1-cp314-cp314-win_amd64.whl", hash = "sha256:77443d39143e20927259a61da0c95d55ffc31cf43086b8f0f11a92da5260d592", size = 220516, upload-time = "2025-11-07T10:52:07.259Z" },
+ { url = "https://files.pythonhosted.org/packages/ae/d0/d311a06f9cf7a48a98ffcfd0c57db0dcab6da46e75c439286a50dc648161/coverage-7.11.1-cp314-cp314-win_arm64.whl", hash = "sha256:829acb88fa47591a64bf5197e96a931ce9d4b3634c7f81a224ba3319623cdf6c", size = 219091, upload-time = "2025-11-07T10:52:09.216Z" },
+ { url = "https://files.pythonhosted.org/packages/a7/3d/c6a84da4fa9b840933045b19dd19d17b892f3f2dd1612903260291416dba/coverage-7.11.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:2ad1fe321d9522ea14399de83e75a11fb6a8887930c3679feb383301c28070d9", size = 217700, upload-time = "2025-11-07T10:52:11.348Z" },
+ { url = "https://files.pythonhosted.org/packages/94/10/a4fc5022017dd7ac682dc423849c241dfbdad31734b8f96060d84e70b587/coverage-7.11.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f69c332f0c3d1357c74decc9b1843fcd428cf9221bf196a20ad22aa1db3e1b6c", size = 217968, upload-time = "2025-11-07T10:52:13.203Z" },
+ { url = "https://files.pythonhosted.org/packages/59/2d/a554cd98924d296de5816413280ac3b09e42a05fb248d66f8d474d321938/coverage-7.11.1-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:576baeea4eebde684bf6c91c01e97171c8015765c8b2cfd4022a42b899897811", size = 259334, upload-time = "2025-11-07T10:52:15.079Z" },
+ { url = "https://files.pythonhosted.org/packages/05/98/d484cb659ec33958ca96b6f03438f56edc23b239d1ad0417b7a97fc1848a/coverage-7.11.1-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:28ad84c694fa86084cfd3c1eab4149844b8cb95bd8e5cbfc4a647f3ee2cce2b3", size = 261445, upload-time = "2025-11-07T10:52:17.134Z" },
+ { url = "https://files.pythonhosted.org/packages/f3/fa/920cba122cc28f4557c0507f8bd7c6e527ebcc537d0309186f66464a8fd9/coverage-7.11.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b1043ff958f09fc3f552c014d599f3c6b7088ba97d7bc1bd1cce8603cd75b520", size = 263858, upload-time = "2025-11-07T10:52:19.836Z" },
+ { url = "https://files.pythonhosted.org/packages/2a/a0/036397bdbee0f3bd46c2e26fdfbb1a61b2140bf9059240c37b61149047fa/coverage-7.11.1-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c6681add5060c2742dafcf29826dff1ff8eef889a3b03390daeed84361c428bd", size = 258381, upload-time = "2025-11-07T10:52:21.687Z" },
+ { url = "https://files.pythonhosted.org/packages/b6/61/2533926eb8990f182eb287f4873216c8ca530cc47241144aabf46fe80abe/coverage-7.11.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:773419b225ec9a75caa1e941dd0c83a91b92c2b525269e44e6ee3e4c630607db", size = 261321, upload-time = "2025-11-07T10:52:23.612Z" },
+ { url = "https://files.pythonhosted.org/packages/32/6e/618f7e203a998e4f6b8a0fa395744a416ad2adbcdc3735bc19466456718a/coverage-7.11.1-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:a9cb272a0e0157dbb9b2fd0b201b759bd378a1a6138a16536c025c2ce4f7643b", size = 258933, upload-time = "2025-11-07T10:52:25.514Z" },
+ { url = "https://files.pythonhosted.org/packages/22/40/6b1c27f772cb08a14a338647ead1254a57ee9dabbb4cacbc15df7f278741/coverage-7.11.1-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:e09adb2a7811dc75998eef68f47599cf699e2b62eed09c9fefaeb290b3920f34", size = 257756, upload-time = "2025-11-07T10:52:27.845Z" },
+ { url = "https://files.pythonhosted.org/packages/73/07/f9cd12f71307a785ea15b009c8d8cc2543e4a867bd04b8673843970b6b43/coverage-7.11.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:1335fa8c2a2fea49924d97e1e3500cfe8d7c849f5369f26bb7559ad4259ccfab", size = 260086, upload-time = "2025-11-07T10:52:29.776Z" },
+ { url = "https://files.pythonhosted.org/packages/34/02/31c5394f6f5d72a466966bcfdb61ce5a19862d452816d6ffcbb44add16ee/coverage-7.11.1-cp314-cp314t-win32.whl", hash = "sha256:4782d71d2a4fa7cef95e853b7097c8bbead4dbd0e6f9c7152a6b11a194b794db", size = 220483, upload-time = "2025-11-07T10:52:31.752Z" },
+ { url = "https://files.pythonhosted.org/packages/7f/96/81e1ef5fbfd5090113a96e823dbe055e4c58d96ca73b1fb0ad9d26f9ec36/coverage-7.11.1-cp314-cp314t-win_amd64.whl", hash = "sha256:939f45e66eceb63c75e8eb8fc58bb7077c00f1a41b0e15c6ef02334a933cfe93", size = 221592, upload-time = "2025-11-07T10:52:33.724Z" },
+ { url = "https://files.pythonhosted.org/packages/38/7a/a5d050de44951ac453a2046a0f3fb5471a4a557f0c914d00db27d543d94c/coverage-7.11.1-cp314-cp314t-win_arm64.whl", hash = "sha256:01c575bdbef35e3f023b50a146e9a75c53816e4f2569109458155cd2315f87d9", size = 219627, upload-time = "2025-11-07T10:52:36.285Z" },
+ { url = "https://files.pythonhosted.org/packages/76/32/bd9f48c28e23b2f08946f8e83983617b00619f5538dbd7e1045fa7e88c00/coverage-7.11.1-py3-none-any.whl", hash = "sha256:0fa848acb5f1da24765cee840e1afe9232ac98a8f9431c6112c15b34e880b9e8", size = 208689, upload-time = "2025-11-07T10:52:38.646Z" },
]
[[package]]
@@ -474,7 +474,7 @@ wheels = [
[[package]]
name = "fastapi"
-version = "0.120.1"
+version = "0.121.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "annotated-doc" },
@@ -482,9 +482,9 @@ dependencies = [
{ name = "starlette" },
{ name = "typing-extensions" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/40/cc/28aff6e246ee85bd571b26e4a793b84d42700e3bdc3008c3d747eda7b06d/fastapi-0.120.1.tar.gz", hash = "sha256:b5c6217e9ddca6dfcf54c97986180d4a1955e10c693d74943fc5327700178bff", size = 337616, upload-time = "2025-10-27T17:53:42.954Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/8c/e3/77a2df0946703973b9905fd0cde6172c15e0781984320123b4f5079e7113/fastapi-0.121.0.tar.gz", hash = "sha256:06663356a0b1ee93e875bbf05a31fb22314f5bed455afaaad2b2dad7f26e98fa", size = 342412, upload-time = "2025-11-03T10:25:54.818Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/7e/bb/1a74dbe87e9a595bf63052c886dfef965dc5b91d149456a8301eb3d41ce2/fastapi-0.120.1-py3-none-any.whl", hash = "sha256:0e8a2c328e96c117272d8c794d3a97d205f753cc2e69dd7ee387b7488a75601f", size = 108254, upload-time = "2025-10-27T17:53:40.076Z" },
+ { url = "https://files.pythonhosted.org/packages/dd/2c/42277afc1ba1a18f8358561eee40785d27becab8f80a1f945c0a3051c6eb/fastapi-0.121.0-py3-none-any.whl", hash = "sha256:8bdf1b15a55f4e4b0d6201033da9109ea15632cb76cf156e7b8b4019f2172106", size = 109183, upload-time = "2025-11-03T10:25:53.27Z" },
]
[[package]]
@@ -555,6 +555,8 @@ dev = [
]
tests = [
{ name = "psutil" },
+ { name = "pytest-asyncio" },
+ { name = "pytest-xdist" },
]
[package.metadata]
@@ -596,7 +598,11 @@ dev = [
{ name = "types-pyyaml", specifier = ">=6.0.12.20250516" },
{ name = "types-requests", specifier = ">=2.32.4.20250611" },
]
-tests = [{ name = "psutil", specifier = ">=7.0.0" }]
+tests = [
+ { name = "psutil", specifier = ">=7.0.0" },
+ { name = "pytest-asyncio", specifier = ">=0.26.0" },
+ { name = "pytest-xdist", specifier = ">=3.7.0" },
+]
[[package]]
name = "h11"
@@ -715,7 +721,7 @@ wheels = [
[[package]]
name = "ipython"
-version = "9.6.0"
+version = "9.7.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "colorama", marker = "sys_platform == 'win32'" },
@@ -729,9 +735,9 @@ dependencies = [
{ name = "stack-data" },
{ name = "traitlets" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/2a/34/29b18c62e39ee2f7a6a3bba7efd952729d8aadd45ca17efc34453b717665/ipython-9.6.0.tar.gz", hash = "sha256:5603d6d5d356378be5043e69441a072b50a5b33b4503428c77b04cb8ce7bc731", size = 4396932, upload-time = "2025-09-29T10:55:53.948Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/29/e6/48c74d54039241a456add616464ea28c6ebf782e4110d419411b83dae06f/ipython-9.7.0.tar.gz", hash = "sha256:5f6de88c905a566c6a9d6c400a8fed54a638e1f7543d17aae2551133216b1e4e", size = 4422115, upload-time = "2025-11-05T12:18:54.646Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/48/c5/d5e07995077e48220269c28a221e168c91123ad5ceee44d548f54a057fc0/ipython-9.6.0-py3-none-any.whl", hash = "sha256:5f77efafc886d2f023442479b8149e7d86547ad0a979e9da9f045d252f648196", size = 616170, upload-time = "2025-09-29T10:55:47.676Z" },
+ { url = "https://files.pythonhosted.org/packages/05/aa/62893d6a591d337aa59dcc4c6f6c842f1fe20cd72c8c5c1f980255243252/ipython-9.7.0-py3-none-any.whl", hash = "sha256:bce8ac85eb9521adc94e1845b4c03d88365fd6ac2f4908ec4ed1eb1b0a065f9f", size = 618911, upload-time = "2025-11-05T12:18:52.484Z" },
]
[[package]]
@@ -811,7 +817,7 @@ wheels = [
[[package]]
name = "mcp"
-version = "1.19.0"
+version = "1.21.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "anyio" },
@@ -820,15 +826,16 @@ dependencies = [
{ name = "jsonschema" },
{ name = "pydantic" },
{ name = "pydantic-settings" },
+ { name = "pyjwt", extra = ["crypto"] },
{ name = "python-multipart" },
{ name = "pywin32", marker = "sys_platform == 'win32'" },
{ name = "sse-starlette" },
{ name = "starlette" },
{ name = "uvicorn", marker = "sys_platform != 'emscripten'" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/69/2b/916852a5668f45d8787378461eaa1244876d77575ffef024483c94c0649c/mcp-1.19.0.tar.gz", hash = "sha256:213de0d3cd63f71bc08ffe9cc8d4409cc87acffd383f6195d2ce0457c021b5c1", size = 444163, upload-time = "2025-10-24T01:11:15.839Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/33/54/dd2330ef4611c27ae59124820863c34e1d3edb1133c58e6375e2d938c9c5/mcp-1.21.0.tar.gz", hash = "sha256:bab0a38e8f8c48080d787233343f8d301b0e1e95846ae7dead251b2421d99855", size = 452697, upload-time = "2025-11-06T23:19:58.432Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/ce/a3/3e71a875a08b6a830b88c40bc413bff01f1650f1efe8a054b5e90a9d4f56/mcp-1.19.0-py3-none-any.whl", hash = "sha256:f5907fe1c0167255f916718f376d05f09a830a215327a3ccdd5ec8a519f2e572", size = 170105, upload-time = "2025-10-24T01:11:14.151Z" },
+ { url = "https://files.pythonhosted.org/packages/39/47/850b6edc96c03bd44b00de9a0ca3c1cc71e0ba1cd5822955bc9e4eb3fad3/mcp-1.21.0-py3-none-any.whl", hash = "sha256:598619e53eb0b7a6513db38c426b28a4bdf57496fed04332100d2c56acade98b", size = 173672, upload-time = "2025-11-06T23:19:56.508Z" },
]
[[package]]
@@ -917,28 +924,28 @@ wheels = [
[[package]]
name = "psutil"
-version = "7.1.2"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/cd/ec/7b8e6b9b1d22708138630ef34c53ab2b61032c04f16adfdbb96791c8c70c/psutil-7.1.2.tar.gz", hash = "sha256:aa225cdde1335ff9684708ee8c72650f6598d5ed2114b9a7c5802030b1785018", size = 487424, upload-time = "2025-10-25T10:46:34.931Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/b8/d9/b56cc9f883140ac10021a8c9b0f4e16eed1ba675c22513cdcbce3ba64014/psutil-7.1.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0cc5c6889b9871f231ed5455a9a02149e388fffcb30b607fb7a8896a6d95f22e", size = 238575, upload-time = "2025-10-25T10:46:38.728Z" },
- { url = "https://files.pythonhosted.org/packages/36/eb/28d22de383888deb252c818622196e709da98816e296ef95afda33f1c0a2/psutil-7.1.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:8e9e77a977208d84aa363a4a12e0f72189d58bbf4e46b49aae29a2c6e93ef206", size = 239297, upload-time = "2025-10-25T10:46:41.347Z" },
- { url = "https://files.pythonhosted.org/packages/89/5d/220039e2f28cc129626e54d63892ab05c0d56a29818bfe7268dcb5008932/psutil-7.1.2-cp313-cp313t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7d9623a5e4164d2220ecceb071f4b333b3c78866141e8887c072129185f41278", size = 280420, upload-time = "2025-10-25T10:46:44.122Z" },
- { url = "https://files.pythonhosted.org/packages/ba/7a/286f0e1c167445b2ef4a6cbdfc8c59fdb45a5a493788950cf8467201dc73/psutil-7.1.2-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:364b1c10fe4ed59c89ec49e5f1a70da353b27986fa8233b4b999df4742a5ee2f", size = 283049, upload-time = "2025-10-25T10:46:47.095Z" },
- { url = "https://files.pythonhosted.org/packages/aa/cc/7eb93260794a42e39b976f3a4dde89725800b9f573b014fac142002a5c98/psutil-7.1.2-cp313-cp313t-win_amd64.whl", hash = "sha256:f101ef84de7e05d41310e3ccbdd65a6dd1d9eed85e8aaf0758405d022308e204", size = 248713, upload-time = "2025-10-25T10:46:49.573Z" },
- { url = "https://files.pythonhosted.org/packages/ab/1a/0681a92b53366e01f0a099f5237d0c8a2f79d322ac589cccde5e30c8a4e2/psutil-7.1.2-cp313-cp313t-win_arm64.whl", hash = "sha256:20c00824048a95de67f00afedc7b08b282aa08638585b0206a9fb51f28f1a165", size = 244644, upload-time = "2025-10-25T10:46:51.924Z" },
- { url = "https://files.pythonhosted.org/packages/56/9e/f1c5c746b4ed5320952acd3002d3962fe36f30524c00ea79fdf954cc6779/psutil-7.1.2-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:e09cfe92aa8e22b1ec5e2d394820cf86c5dff6367ac3242366485dfa874d43bc", size = 238640, upload-time = "2025-10-25T10:46:54.089Z" },
- { url = "https://files.pythonhosted.org/packages/32/ee/fd26216a735395cc25c3899634e34aeb41fb1f3dbb44acc67d9e594be562/psutil-7.1.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:fa6342cf859c48b19df3e4aa170e4cfb64aadc50b11e06bb569c6c777b089c9e", size = 239303, upload-time = "2025-10-25T10:46:56.932Z" },
- { url = "https://files.pythonhosted.org/packages/3c/cd/7d96eaec4ef7742b845a9ce2759a2769ecce4ab7a99133da24abacbc9e41/psutil-7.1.2-cp314-cp314t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:625977443498ee7d6c1e63e93bacca893fd759a66c5f635d05e05811d23fb5ee", size = 281717, upload-time = "2025-10-25T10:46:59.116Z" },
- { url = "https://files.pythonhosted.org/packages/bc/1a/7f0b84bdb067d35fe7fade5fff888408688caf989806ce2d6dae08c72dd5/psutil-7.1.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4a24bcd7b7f2918d934af0fb91859f621b873d6aa81267575e3655cd387572a7", size = 284575, upload-time = "2025-10-25T10:47:00.944Z" },
- { url = "https://files.pythonhosted.org/packages/de/05/7820ef8f7b275268917e0c750eada5834581206d9024ca88edce93c4b762/psutil-7.1.2-cp314-cp314t-win_amd64.whl", hash = "sha256:329f05610da6380982e6078b9d0881d9ab1e9a7eb7c02d833bfb7340aa634e31", size = 249491, upload-time = "2025-10-25T10:47:03.174Z" },
- { url = "https://files.pythonhosted.org/packages/db/9a/58de399c7cb58489f08498459ff096cd76b3f1ddc4f224ec2c5ef729c7d0/psutil-7.1.2-cp314-cp314t-win_arm64.whl", hash = "sha256:7b04c29e3c0c888e83ed4762b70f31e65c42673ea956cefa8ced0e31e185f582", size = 244880, upload-time = "2025-10-25T10:47:05.228Z" },
- { url = "https://files.pythonhosted.org/packages/ae/89/b9f8d47ddbc52d7301fc868e8224e5f44ed3c7f55e6d0f54ecaf5dd9ff5e/psutil-7.1.2-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:c9ba5c19f2d46203ee8c152c7b01df6eec87d883cfd8ee1af2ef2727f6b0f814", size = 237244, upload-time = "2025-10-25T10:47:07.086Z" },
- { url = "https://files.pythonhosted.org/packages/c8/7a/8628c2f6b240680a67d73d8742bb9ff39b1820a693740e43096d5dcb01e5/psutil-7.1.2-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:2a486030d2fe81bec023f703d3d155f4823a10a47c36784c84f1cc7f8d39bedb", size = 238101, upload-time = "2025-10-25T10:47:09.523Z" },
- { url = "https://files.pythonhosted.org/packages/30/28/5e27f4d5a0e347f8e3cc16cd7d35533dbce086c95807f1f0e9cd77e26c10/psutil-7.1.2-cp36-abi3-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3efd8fc791492e7808a51cb2b94889db7578bfaea22df931424f874468e389e3", size = 258675, upload-time = "2025-10-25T10:47:11.082Z" },
- { url = "https://files.pythonhosted.org/packages/e5/5c/79cf60c9acf36d087f0db0f82066fca4a780e97e5b3a2e4c38209c03d170/psutil-7.1.2-cp36-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e2aeb9b64f481b8eabfc633bd39e0016d4d8bbcd590d984af764d80bf0851b8a", size = 260203, upload-time = "2025-10-25T10:47:13.226Z" },
- { url = "https://files.pythonhosted.org/packages/f7/03/0a464404c51685dcb9329fdd660b1721e076ccd7b3d97dee066bcc9ffb15/psutil-7.1.2-cp37-abi3-win_amd64.whl", hash = "sha256:8e17852114c4e7996fe9da4745c2bdef001ebbf2f260dec406290e66628bdb91", size = 246714, upload-time = "2025-10-25T10:47:15.093Z" },
- { url = "https://files.pythonhosted.org/packages/6a/32/97ca2090f2f1b45b01b6aa7ae161cfe50671de097311975ca6eea3e7aabc/psutil-7.1.2-cp37-abi3-win_arm64.whl", hash = "sha256:3e988455e61c240cc879cb62a008c2699231bf3e3d061d7fce4234463fd2abb4", size = 243742, upload-time = "2025-10-25T10:47:17.302Z" },
+version = "7.1.3"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/e1/88/bdd0a41e5857d5d703287598cbf08dad90aed56774ea52ae071bae9071b6/psutil-7.1.3.tar.gz", hash = "sha256:6c86281738d77335af7aec228328e944b30930899ea760ecf33a4dba66be5e74", size = 489059, upload-time = "2025-11-02T12:25:54.619Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/bd/93/0c49e776b8734fef56ec9c5c57f923922f2cf0497d62e0f419465f28f3d0/psutil-7.1.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0005da714eee687b4b8decd3d6cc7c6db36215c9e74e5ad2264b90c3df7d92dc", size = 239751, upload-time = "2025-11-02T12:25:58.161Z" },
+ { url = "https://files.pythonhosted.org/packages/6f/8d/b31e39c769e70780f007969815195a55c81a63efebdd4dbe9e7a113adb2f/psutil-7.1.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:19644c85dcb987e35eeeaefdc3915d059dac7bd1167cdcdbf27e0ce2df0c08c0", size = 240368, upload-time = "2025-11-02T12:26:00.491Z" },
+ { url = "https://files.pythonhosted.org/packages/62/61/23fd4acc3c9eebbf6b6c78bcd89e5d020cfde4acf0a9233e9d4e3fa698b4/psutil-7.1.3-cp313-cp313t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:95ef04cf2e5ba0ab9eaafc4a11eaae91b44f4ef5541acd2ee91d9108d00d59a7", size = 287134, upload-time = "2025-11-02T12:26:02.613Z" },
+ { url = "https://files.pythonhosted.org/packages/30/1c/f921a009ea9ceb51aa355cb0cc118f68d354db36eae18174bab63affb3e6/psutil-7.1.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1068c303be3a72f8e18e412c5b2a8f6d31750fb152f9cb106b54090296c9d251", size = 289904, upload-time = "2025-11-02T12:26:05.207Z" },
+ { url = "https://files.pythonhosted.org/packages/a6/82/62d68066e13e46a5116df187d319d1724b3f437ddd0f958756fc052677f4/psutil-7.1.3-cp313-cp313t-win_amd64.whl", hash = "sha256:18349c5c24b06ac5612c0428ec2a0331c26443d259e2a0144a9b24b4395b58fa", size = 249642, upload-time = "2025-11-02T12:26:07.447Z" },
+ { url = "https://files.pythonhosted.org/packages/df/ad/c1cd5fe965c14a0392112f68362cfceb5230819dbb5b1888950d18a11d9f/psutil-7.1.3-cp313-cp313t-win_arm64.whl", hash = "sha256:c525ffa774fe4496282fb0b1187725793de3e7c6b29e41562733cae9ada151ee", size = 245518, upload-time = "2025-11-02T12:26:09.719Z" },
+ { url = "https://files.pythonhosted.org/packages/2e/bb/6670bded3e3236eb4287c7bcdc167e9fae6e1e9286e437f7111caed2f909/psutil-7.1.3-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:b403da1df4d6d43973dc004d19cee3b848e998ae3154cc8097d139b77156c353", size = 239843, upload-time = "2025-11-02T12:26:11.968Z" },
+ { url = "https://files.pythonhosted.org/packages/b8/66/853d50e75a38c9a7370ddbeefabdd3d3116b9c31ef94dc92c6729bc36bec/psutil-7.1.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:ad81425efc5e75da3f39b3e636293360ad8d0b49bed7df824c79764fb4ba9b8b", size = 240369, upload-time = "2025-11-02T12:26:14.358Z" },
+ { url = "https://files.pythonhosted.org/packages/41/bd/313aba97cb5bfb26916dc29cf0646cbe4dd6a89ca69e8c6edce654876d39/psutil-7.1.3-cp314-cp314t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8f33a3702e167783a9213db10ad29650ebf383946e91bc77f28a5eb083496bc9", size = 288210, upload-time = "2025-11-02T12:26:16.699Z" },
+ { url = "https://files.pythonhosted.org/packages/c2/fa/76e3c06e760927a0cfb5705eb38164254de34e9bd86db656d4dbaa228b04/psutil-7.1.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fac9cd332c67f4422504297889da5ab7e05fd11e3c4392140f7370f4208ded1f", size = 291182, upload-time = "2025-11-02T12:26:18.848Z" },
+ { url = "https://files.pythonhosted.org/packages/0f/1d/5774a91607035ee5078b8fd747686ebec28a962f178712de100d00b78a32/psutil-7.1.3-cp314-cp314t-win_amd64.whl", hash = "sha256:3792983e23b69843aea49c8f5b8f115572c5ab64c153bada5270086a2123c7e7", size = 250466, upload-time = "2025-11-02T12:26:21.183Z" },
+ { url = "https://files.pythonhosted.org/packages/00/ca/e426584bacb43a5cb1ac91fae1937f478cd8fbe5e4ff96574e698a2c77cd/psutil-7.1.3-cp314-cp314t-win_arm64.whl", hash = "sha256:31d77fcedb7529f27bb3a0472bea9334349f9a04160e8e6e5020f22c59893264", size = 245756, upload-time = "2025-11-02T12:26:23.148Z" },
+ { url = "https://files.pythonhosted.org/packages/ef/94/46b9154a800253e7ecff5aaacdf8ebf43db99de4a2dfa18575b02548654e/psutil-7.1.3-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:2bdbcd0e58ca14996a42adf3621a6244f1bb2e2e528886959c72cf1e326677ab", size = 238359, upload-time = "2025-11-02T12:26:25.284Z" },
+ { url = "https://files.pythonhosted.org/packages/68/3a/9f93cff5c025029a36d9a92fef47220ab4692ee7f2be0fba9f92813d0cb8/psutil-7.1.3-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:bc31fa00f1fbc3c3802141eede66f3a2d51d89716a194bf2cd6fc68310a19880", size = 239171, upload-time = "2025-11-02T12:26:27.23Z" },
+ { url = "https://files.pythonhosted.org/packages/ce/b1/5f49af514f76431ba4eea935b8ad3725cdeb397e9245ab919dbc1d1dc20f/psutil-7.1.3-cp36-abi3-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3bb428f9f05c1225a558f53e30ccbad9930b11c3fc206836242de1091d3e7dd3", size = 263261, upload-time = "2025-11-02T12:26:29.48Z" },
+ { url = "https://files.pythonhosted.org/packages/e0/95/992c8816a74016eb095e73585d747e0a8ea21a061ed3689474fabb29a395/psutil-7.1.3-cp36-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:56d974e02ca2c8eb4812c3f76c30e28836fffc311d55d979f1465c1feeb2b68b", size = 264635, upload-time = "2025-11-02T12:26:31.74Z" },
+ { url = "https://files.pythonhosted.org/packages/55/4c/c3ed1a622b6ae2fd3c945a366e64eb35247a31e4db16cf5095e269e8eb3c/psutil-7.1.3-cp37-abi3-win_amd64.whl", hash = "sha256:f39c2c19fe824b47484b96f9692932248a54c43799a84282cfe58d05a6449efd", size = 247633, upload-time = "2025-11-02T12:26:33.887Z" },
+ { url = "https://files.pythonhosted.org/packages/c9/ad/33b2ccec09bf96c2b2ef3f9a6f66baac8253d7565d8839e024a6b905d45d/psutil-7.1.3-cp37-abi3-win_arm64.whl", hash = "sha256:bd0d69cee829226a761e92f28140bec9a5ee9d5b4fb4b0cc589068dbfff559b1", size = 244608, upload-time = "2025-11-02T12:26:36.136Z" },
]
[[package]]
@@ -970,7 +977,7 @@ wheels = [
[[package]]
name = "pydantic"
-version = "2.12.3"
+version = "2.12.4"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "annotated-types" },
@@ -978,76 +985,80 @@ dependencies = [
{ name = "typing-extensions" },
{ name = "typing-inspection" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/f3/1e/4f0a3233767010308f2fd6bd0814597e3f63f1dc98304a9112b8759df4ff/pydantic-2.12.3.tar.gz", hash = "sha256:1da1c82b0fc140bb0103bc1441ffe062154c8d38491189751ee00fd8ca65ce74", size = 819383, upload-time = "2025-10-17T15:04:21.222Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/96/ad/a17bc283d7d81837c061c49e3eaa27a45991759a1b7eae1031921c6bd924/pydantic-2.12.4.tar.gz", hash = "sha256:0f8cb9555000a4b5b617f66bfd2566264c4984b27589d3b845685983e8ea85ac", size = 821038, upload-time = "2025-11-05T10:50:08.59Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/a1/6b/83661fa77dcefa195ad5f8cd9af3d1a7450fd57cc883ad04d65446ac2029/pydantic-2.12.3-py3-none-any.whl", hash = "sha256:6986454a854bc3bc6e5443e1369e06a3a456af9d339eda45510f517d9ea5c6bf", size = 462431, upload-time = "2025-10-17T15:04:19.346Z" },
+ { url = "https://files.pythonhosted.org/packages/82/2f/e68750da9b04856e2a7ec56fc6f034a5a79775e9b9a81882252789873798/pydantic-2.12.4-py3-none-any.whl", hash = "sha256:92d3d202a745d46f9be6df459ac5a064fdaa3c1c4cd8adcfa332ccf3c05f871e", size = 463400, upload-time = "2025-11-05T10:50:06.732Z" },
]
[[package]]
name = "pydantic-core"
-version = "2.41.4"
+version = "2.41.5"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "typing-extensions" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/df/18/d0944e8eaaa3efd0a91b0f1fc537d3be55ad35091b6a87638211ba691964/pydantic_core-2.41.4.tar.gz", hash = "sha256:70e47929a9d4a1905a67e4b687d5946026390568a8e952b92824118063cee4d5", size = 457557, upload-time = "2025-10-14T10:23:47.909Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/e9/81/d3b3e95929c4369d30b2a66a91db63c8ed0a98381ae55a45da2cd1cc1288/pydantic_core-2.41.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:ab06d77e053d660a6faaf04894446df7b0a7e7aba70c2797465a0a1af00fc887", size = 2099043, upload-time = "2025-10-14T10:20:28.561Z" },
- { url = "https://files.pythonhosted.org/packages/58/da/46fdac49e6717e3a94fc9201403e08d9d61aa7a770fab6190b8740749047/pydantic_core-2.41.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c53ff33e603a9c1179a9364b0a24694f183717b2e0da2b5ad43c316c956901b2", size = 1910699, upload-time = "2025-10-14T10:20:30.217Z" },
- { url = "https://files.pythonhosted.org/packages/1e/63/4d948f1b9dd8e991a5a98b77dd66c74641f5f2e5225fee37994b2e07d391/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:304c54176af2c143bd181d82e77c15c41cbacea8872a2225dd37e6544dce9999", size = 1952121, upload-time = "2025-10-14T10:20:32.246Z" },
- { url = "https://files.pythonhosted.org/packages/b2/a7/e5fc60a6f781fc634ecaa9ecc3c20171d238794cef69ae0af79ac11b89d7/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:025ba34a4cf4fb32f917d5d188ab5e702223d3ba603be4d8aca2f82bede432a4", size = 2041590, upload-time = "2025-10-14T10:20:34.332Z" },
- { url = "https://files.pythonhosted.org/packages/70/69/dce747b1d21d59e85af433428978a1893c6f8a7068fa2bb4a927fba7a5ff/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b9f5f30c402ed58f90c70e12eff65547d3ab74685ffe8283c719e6bead8ef53f", size = 2219869, upload-time = "2025-10-14T10:20:35.965Z" },
- { url = "https://files.pythonhosted.org/packages/83/6a/c070e30e295403bf29c4df1cb781317b6a9bac7cd07b8d3acc94d501a63c/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd96e5d15385d301733113bcaa324c8bcf111275b7675a9c6e88bfb19fc05e3b", size = 2345169, upload-time = "2025-10-14T10:20:37.627Z" },
- { url = "https://files.pythonhosted.org/packages/f0/83/06d001f8043c336baea7fd202a9ac7ad71f87e1c55d8112c50b745c40324/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98f348cbb44fae6e9653c1055db7e29de67ea6a9ca03a5fa2c2e11a47cff0e47", size = 2070165, upload-time = "2025-10-14T10:20:39.246Z" },
- { url = "https://files.pythonhosted.org/packages/14/0a/e567c2883588dd12bcbc110232d892cf385356f7c8a9910311ac997ab715/pydantic_core-2.41.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ec22626a2d14620a83ca583c6f5a4080fa3155282718b6055c2ea48d3ef35970", size = 2189067, upload-time = "2025-10-14T10:20:41.015Z" },
- { url = "https://files.pythonhosted.org/packages/f4/1d/3d9fca34273ba03c9b1c5289f7618bc4bd09c3ad2289b5420481aa051a99/pydantic_core-2.41.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3a95d4590b1f1a43bf33ca6d647b990a88f4a3824a8c4572c708f0b45a5290ed", size = 2132997, upload-time = "2025-10-14T10:20:43.106Z" },
- { url = "https://files.pythonhosted.org/packages/52/70/d702ef7a6cd41a8afc61f3554922b3ed8d19dd54c3bd4bdbfe332e610827/pydantic_core-2.41.4-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:f9672ab4d398e1b602feadcffcdd3af44d5f5e6ddc15bc7d15d376d47e8e19f8", size = 2307187, upload-time = "2025-10-14T10:20:44.849Z" },
- { url = "https://files.pythonhosted.org/packages/68/4c/c06be6e27545d08b802127914156f38d10ca287a9e8489342793de8aae3c/pydantic_core-2.41.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:84d8854db5f55fead3b579f04bda9a36461dab0730c5d570e1526483e7bb8431", size = 2305204, upload-time = "2025-10-14T10:20:46.781Z" },
- { url = "https://files.pythonhosted.org/packages/b0/e5/35ae4919bcd9f18603419e23c5eaf32750224a89d41a8df1a3704b69f77e/pydantic_core-2.41.4-cp312-cp312-win32.whl", hash = "sha256:9be1c01adb2ecc4e464392c36d17f97e9110fbbc906bcbe1c943b5b87a74aabd", size = 1972536, upload-time = "2025-10-14T10:20:48.39Z" },
- { url = "https://files.pythonhosted.org/packages/1e/c2/49c5bb6d2a49eb2ee3647a93e3dae7080c6409a8a7558b075027644e879c/pydantic_core-2.41.4-cp312-cp312-win_amd64.whl", hash = "sha256:d682cf1d22bab22a5be08539dca3d1593488a99998f9f412137bc323179067ff", size = 2031132, upload-time = "2025-10-14T10:20:50.421Z" },
- { url = "https://files.pythonhosted.org/packages/06/23/936343dbcba6eec93f73e95eb346810fc732f71ba27967b287b66f7b7097/pydantic_core-2.41.4-cp312-cp312-win_arm64.whl", hash = "sha256:833eebfd75a26d17470b58768c1834dfc90141b7afc6eb0429c21fc5a21dcfb8", size = 1969483, upload-time = "2025-10-14T10:20:52.35Z" },
- { url = "https://files.pythonhosted.org/packages/13/d0/c20adabd181a029a970738dfe23710b52a31f1258f591874fcdec7359845/pydantic_core-2.41.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:85e050ad9e5f6fe1004eec65c914332e52f429bc0ae12d6fa2092407a462c746", size = 2105688, upload-time = "2025-10-14T10:20:54.448Z" },
- { url = "https://files.pythonhosted.org/packages/00/b6/0ce5c03cec5ae94cca220dfecddc453c077d71363b98a4bbdb3c0b22c783/pydantic_core-2.41.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e7393f1d64792763a48924ba31d1e44c2cfbc05e3b1c2c9abb4ceeadd912cced", size = 1910807, upload-time = "2025-10-14T10:20:56.115Z" },
- { url = "https://files.pythonhosted.org/packages/68/3e/800d3d02c8beb0b5c069c870cbb83799d085debf43499c897bb4b4aaff0d/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94dab0940b0d1fb28bcab847adf887c66a27a40291eedf0b473be58761c9799a", size = 1956669, upload-time = "2025-10-14T10:20:57.874Z" },
- { url = "https://files.pythonhosted.org/packages/60/a4/24271cc71a17f64589be49ab8bd0751f6a0a03046c690df60989f2f95c2c/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:de7c42f897e689ee6f9e93c4bec72b99ae3b32a2ade1c7e4798e690ff5246e02", size = 2051629, upload-time = "2025-10-14T10:21:00.006Z" },
- { url = "https://files.pythonhosted.org/packages/68/de/45af3ca2f175d91b96bfb62e1f2d2f1f9f3b14a734afe0bfeff079f78181/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:664b3199193262277b8b3cd1e754fb07f2c6023289c815a1e1e8fb415cb247b1", size = 2224049, upload-time = "2025-10-14T10:21:01.801Z" },
- { url = "https://files.pythonhosted.org/packages/af/8f/ae4e1ff84672bf869d0a77af24fd78387850e9497753c432875066b5d622/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d95b253b88f7d308b1c0b417c4624f44553ba4762816f94e6986819b9c273fb2", size = 2342409, upload-time = "2025-10-14T10:21:03.556Z" },
- { url = "https://files.pythonhosted.org/packages/18/62/273dd70b0026a085c7b74b000394e1ef95719ea579c76ea2f0cc8893736d/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1351f5bbdbbabc689727cb91649a00cb9ee7203e0a6e54e9f5ba9e22e384b84", size = 2069635, upload-time = "2025-10-14T10:21:05.385Z" },
- { url = "https://files.pythonhosted.org/packages/30/03/cf485fff699b4cdaea469bc481719d3e49f023241b4abb656f8d422189fc/pydantic_core-2.41.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1affa4798520b148d7182da0615d648e752de4ab1a9566b7471bc803d88a062d", size = 2194284, upload-time = "2025-10-14T10:21:07.122Z" },
- { url = "https://files.pythonhosted.org/packages/f9/7e/c8e713db32405dfd97211f2fc0a15d6bf8adb7640f3d18544c1f39526619/pydantic_core-2.41.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7b74e18052fea4aa8dea2fb7dbc23d15439695da6cbe6cfc1b694af1115df09d", size = 2137566, upload-time = "2025-10-14T10:21:08.981Z" },
- { url = "https://files.pythonhosted.org/packages/04/f7/db71fd4cdccc8b75990f79ccafbbd66757e19f6d5ee724a6252414483fb4/pydantic_core-2.41.4-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:285b643d75c0e30abda9dc1077395624f314a37e3c09ca402d4015ef5979f1a2", size = 2316809, upload-time = "2025-10-14T10:21:10.805Z" },
- { url = "https://files.pythonhosted.org/packages/76/63/a54973ddb945f1bca56742b48b144d85c9fc22f819ddeb9f861c249d5464/pydantic_core-2.41.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:f52679ff4218d713b3b33f88c89ccbf3a5c2c12ba665fb80ccc4192b4608dbab", size = 2311119, upload-time = "2025-10-14T10:21:12.583Z" },
- { url = "https://files.pythonhosted.org/packages/f8/03/5d12891e93c19218af74843a27e32b94922195ded2386f7b55382f904d2f/pydantic_core-2.41.4-cp313-cp313-win32.whl", hash = "sha256:ecde6dedd6fff127c273c76821bb754d793be1024bc33314a120f83a3c69460c", size = 1981398, upload-time = "2025-10-14T10:21:14.584Z" },
- { url = "https://files.pythonhosted.org/packages/be/d8/fd0de71f39db91135b7a26996160de71c073d8635edfce8b3c3681be0d6d/pydantic_core-2.41.4-cp313-cp313-win_amd64.whl", hash = "sha256:d081a1f3800f05409ed868ebb2d74ac39dd0c1ff6c035b5162356d76030736d4", size = 2030735, upload-time = "2025-10-14T10:21:16.432Z" },
- { url = "https://files.pythonhosted.org/packages/72/86/c99921c1cf6650023c08bfab6fe2d7057a5142628ef7ccfa9921f2dda1d5/pydantic_core-2.41.4-cp313-cp313-win_arm64.whl", hash = "sha256:f8e49c9c364a7edcbe2a310f12733aad95b022495ef2a8d653f645e5d20c1564", size = 1973209, upload-time = "2025-10-14T10:21:18.213Z" },
- { url = "https://files.pythonhosted.org/packages/36/0d/b5706cacb70a8414396efdda3d72ae0542e050b591119e458e2490baf035/pydantic_core-2.41.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:ed97fd56a561f5eb5706cebe94f1ad7c13b84d98312a05546f2ad036bafe87f4", size = 1877324, upload-time = "2025-10-14T10:21:20.363Z" },
- { url = "https://files.pythonhosted.org/packages/de/2d/cba1fa02cfdea72dfb3a9babb067c83b9dff0bbcb198368e000a6b756ea7/pydantic_core-2.41.4-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a870c307bf1ee91fc58a9a61338ff780d01bfae45922624816878dce784095d2", size = 1884515, upload-time = "2025-10-14T10:21:22.339Z" },
- { url = "https://files.pythonhosted.org/packages/07/ea/3df927c4384ed9b503c9cc2d076cf983b4f2adb0c754578dfb1245c51e46/pydantic_core-2.41.4-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d25e97bc1f5f8f7985bdc2335ef9e73843bb561eb1fa6831fdfc295c1c2061cf", size = 2042819, upload-time = "2025-10-14T10:21:26.683Z" },
- { url = "https://files.pythonhosted.org/packages/6a/ee/df8e871f07074250270a3b1b82aad4cd0026b588acd5d7d3eb2fcb1471a3/pydantic_core-2.41.4-cp313-cp313t-win_amd64.whl", hash = "sha256:d405d14bea042f166512add3091c1af40437c2e7f86988f3915fabd27b1e9cd2", size = 1995866, upload-time = "2025-10-14T10:21:28.951Z" },
- { url = "https://files.pythonhosted.org/packages/fc/de/b20f4ab954d6d399499c33ec4fafc46d9551e11dc1858fb7f5dca0748ceb/pydantic_core-2.41.4-cp313-cp313t-win_arm64.whl", hash = "sha256:19f3684868309db5263a11bace3c45d93f6f24afa2ffe75a647583df22a2ff89", size = 1970034, upload-time = "2025-10-14T10:21:30.869Z" },
- { url = "https://files.pythonhosted.org/packages/54/28/d3325da57d413b9819365546eb9a6e8b7cbd9373d9380efd5f74326143e6/pydantic_core-2.41.4-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:e9205d97ed08a82ebb9a307e92914bb30e18cdf6f6b12ca4bedadb1588a0bfe1", size = 2102022, upload-time = "2025-10-14T10:21:32.809Z" },
- { url = "https://files.pythonhosted.org/packages/9e/24/b58a1bc0d834bf1acc4361e61233ee217169a42efbdc15a60296e13ce438/pydantic_core-2.41.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:82df1f432b37d832709fbcc0e24394bba04a01b6ecf1ee87578145c19cde12ac", size = 1905495, upload-time = "2025-10-14T10:21:34.812Z" },
- { url = "https://files.pythonhosted.org/packages/fb/a4/71f759cc41b7043e8ecdaab81b985a9b6cad7cec077e0b92cff8b71ecf6b/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3b4cc4539e055cfa39a3763c939f9d409eb40e85813257dcd761985a108554", size = 1956131, upload-time = "2025-10-14T10:21:36.924Z" },
- { url = "https://files.pythonhosted.org/packages/b0/64/1e79ac7aa51f1eec7c4cda8cbe456d5d09f05fdd68b32776d72168d54275/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b1eb1754fce47c63d2ff57fdb88c351a6c0150995890088b33767a10218eaa4e", size = 2052236, upload-time = "2025-10-14T10:21:38.927Z" },
- { url = "https://files.pythonhosted.org/packages/e9/e3/a3ffc363bd4287b80f1d43dc1c28ba64831f8dfc237d6fec8f2661138d48/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e6ab5ab30ef325b443f379ddb575a34969c333004fca5a1daa0133a6ffaad616", size = 2223573, upload-time = "2025-10-14T10:21:41.574Z" },
- { url = "https://files.pythonhosted.org/packages/28/27/78814089b4d2e684a9088ede3790763c64693c3d1408ddc0a248bc789126/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:31a41030b1d9ca497634092b46481b937ff9397a86f9f51bd41c4767b6fc04af", size = 2342467, upload-time = "2025-10-14T10:21:44.018Z" },
- { url = "https://files.pythonhosted.org/packages/92/97/4de0e2a1159cb85ad737e03306717637842c88c7fd6d97973172fb183149/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a44ac1738591472c3d020f61c6df1e4015180d6262ebd39bf2aeb52571b60f12", size = 2063754, upload-time = "2025-10-14T10:21:46.466Z" },
- { url = "https://files.pythonhosted.org/packages/0f/50/8cb90ce4b9efcf7ae78130afeb99fd1c86125ccdf9906ef64b9d42f37c25/pydantic_core-2.41.4-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d72f2b5e6e82ab8f94ea7d0d42f83c487dc159c5240d8f83beae684472864e2d", size = 2196754, upload-time = "2025-10-14T10:21:48.486Z" },
- { url = "https://files.pythonhosted.org/packages/34/3b/ccdc77af9cd5082723574a1cc1bcae7a6acacc829d7c0a06201f7886a109/pydantic_core-2.41.4-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:c4d1e854aaf044487d31143f541f7aafe7b482ae72a022c664b2de2e466ed0ad", size = 2137115, upload-time = "2025-10-14T10:21:50.63Z" },
- { url = "https://files.pythonhosted.org/packages/ca/ba/e7c7a02651a8f7c52dc2cff2b64a30c313e3b57c7d93703cecea76c09b71/pydantic_core-2.41.4-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:b568af94267729d76e6ee5ececda4e283d07bbb28e8148bb17adad93d025d25a", size = 2317400, upload-time = "2025-10-14T10:21:52.959Z" },
- { url = "https://files.pythonhosted.org/packages/2c/ba/6c533a4ee8aec6b812c643c49bb3bd88d3f01e3cebe451bb85512d37f00f/pydantic_core-2.41.4-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:6d55fb8b1e8929b341cc313a81a26e0d48aa3b519c1dbaadec3a6a2b4fcad025", size = 2312070, upload-time = "2025-10-14T10:21:55.419Z" },
- { url = "https://files.pythonhosted.org/packages/22/ae/f10524fcc0ab8d7f96cf9a74c880243576fd3e72bd8ce4f81e43d22bcab7/pydantic_core-2.41.4-cp314-cp314-win32.whl", hash = "sha256:5b66584e549e2e32a1398df11da2e0a7eff45d5c2d9db9d5667c5e6ac764d77e", size = 1982277, upload-time = "2025-10-14T10:21:57.474Z" },
- { url = "https://files.pythonhosted.org/packages/b4/dc/e5aa27aea1ad4638f0c3fb41132f7eb583bd7420ee63204e2d4333a3bbf9/pydantic_core-2.41.4-cp314-cp314-win_amd64.whl", hash = "sha256:557a0aab88664cc552285316809cab897716a372afaf8efdbef756f8b890e894", size = 2024608, upload-time = "2025-10-14T10:21:59.557Z" },
- { url = "https://files.pythonhosted.org/packages/3e/61/51d89cc2612bd147198e120a13f150afbf0bcb4615cddb049ab10b81b79e/pydantic_core-2.41.4-cp314-cp314-win_arm64.whl", hash = "sha256:3f1ea6f48a045745d0d9f325989d8abd3f1eaf47dd00485912d1a3a63c623a8d", size = 1967614, upload-time = "2025-10-14T10:22:01.847Z" },
- { url = "https://files.pythonhosted.org/packages/0d/c2/472f2e31b95eff099961fa050c376ab7156a81da194f9edb9f710f68787b/pydantic_core-2.41.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6c1fe4c5404c448b13188dd8bd2ebc2bdd7e6727fa61ff481bcc2cca894018da", size = 1876904, upload-time = "2025-10-14T10:22:04.062Z" },
- { url = "https://files.pythonhosted.org/packages/4a/07/ea8eeb91173807ecdae4f4a5f4b150a520085b35454350fc219ba79e66a3/pydantic_core-2.41.4-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:523e7da4d43b113bf8e7b49fa4ec0c35bf4fe66b2230bfc5c13cc498f12c6c3e", size = 1882538, upload-time = "2025-10-14T10:22:06.39Z" },
- { url = "https://files.pythonhosted.org/packages/1e/29/b53a9ca6cd366bfc928823679c6a76c7a4c69f8201c0ba7903ad18ebae2f/pydantic_core-2.41.4-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5729225de81fb65b70fdb1907fcf08c75d498f4a6f15af005aabb1fdadc19dfa", size = 2041183, upload-time = "2025-10-14T10:22:08.812Z" },
- { url = "https://files.pythonhosted.org/packages/c7/3d/f8c1a371ceebcaf94d6dd2d77c6cf4b1c078e13a5837aee83f760b4f7cfd/pydantic_core-2.41.4-cp314-cp314t-win_amd64.whl", hash = "sha256:de2cfbb09e88f0f795fd90cf955858fc2c691df65b1f21f0aa00b99f3fbc661d", size = 1993542, upload-time = "2025-10-14T10:22:11.332Z" },
- { url = "https://files.pythonhosted.org/packages/8a/ac/9fc61b4f9d079482a290afe8d206b8f490e9fd32d4fc03ed4fc698214e01/pydantic_core-2.41.4-cp314-cp314t-win_arm64.whl", hash = "sha256:d34f950ae05a83e0ede899c595f312ca976023ea1db100cd5aa188f7005e3ab0", size = 1973897, upload-time = "2025-10-14T10:22:13.444Z" },
- { url = "https://files.pythonhosted.org/packages/c4/48/ae937e5a831b7c0dc646b2ef788c27cd003894882415300ed21927c21efa/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:4f5d640aeebb438517150fdeec097739614421900e4a08db4a3ef38898798537", size = 2112087, upload-time = "2025-10-14T10:22:56.818Z" },
- { url = "https://files.pythonhosted.org/packages/5e/db/6db8073e3d32dae017da7e0d16a9ecb897d0a4d92e00634916e486097961/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:4a9ab037b71927babc6d9e7fc01aea9e66dc2a4a34dff06ef0724a4049629f94", size = 1920387, upload-time = "2025-10-14T10:22:59.342Z" },
- { url = "https://files.pythonhosted.org/packages/0d/c1/dd3542d072fcc336030d66834872f0328727e3b8de289c662faa04aa270e/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4dab9484ec605c3016df9ad4fd4f9a390bc5d816a3b10c6550f8424bb80b18c", size = 1951495, upload-time = "2025-10-14T10:23:02.089Z" },
- { url = "https://files.pythonhosted.org/packages/2b/c6/db8d13a1f8ab3f1eb08c88bd00fd62d44311e3456d1e85c0e59e0a0376e7/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8a5028425820731d8c6c098ab642d7b8b999758e24acae03ed38a66eca8335", size = 2139008, upload-time = "2025-10-14T10:23:04.539Z" },
+sdist = { url = "https://files.pythonhosted.org/packages/71/70/23b021c950c2addd24ec408e9ab05d59b035b39d97cdc1130e1bce647bb6/pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e", size = 460952, upload-time = "2025-11-04T13:43:49.098Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/5f/5d/5f6c63eebb5afee93bcaae4ce9a898f3373ca23df3ccaef086d0233a35a7/pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7", size = 2110990, upload-time = "2025-11-04T13:39:58.079Z" },
+ { url = "https://files.pythonhosted.org/packages/aa/32/9c2e8ccb57c01111e0fd091f236c7b371c1bccea0fa85247ac55b1e2b6b6/pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0", size = 1896003, upload-time = "2025-11-04T13:39:59.956Z" },
+ { url = "https://files.pythonhosted.org/packages/68/b8/a01b53cb0e59139fbc9e4fda3e9724ede8de279097179be4ff31f1abb65a/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69", size = 1919200, upload-time = "2025-11-04T13:40:02.241Z" },
+ { url = "https://files.pythonhosted.org/packages/38/de/8c36b5198a29bdaade07b5985e80a233a5ac27137846f3bc2d3b40a47360/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75", size = 2052578, upload-time = "2025-11-04T13:40:04.401Z" },
+ { url = "https://files.pythonhosted.org/packages/00/b5/0e8e4b5b081eac6cb3dbb7e60a65907549a1ce035a724368c330112adfdd/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05", size = 2208504, upload-time = "2025-11-04T13:40:06.072Z" },
+ { url = "https://files.pythonhosted.org/packages/77/56/87a61aad59c7c5b9dc8caad5a41a5545cba3810c3e828708b3d7404f6cef/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc", size = 2335816, upload-time = "2025-11-04T13:40:07.835Z" },
+ { url = "https://files.pythonhosted.org/packages/0d/76/941cc9f73529988688a665a5c0ecff1112b3d95ab48f81db5f7606f522d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c", size = 2075366, upload-time = "2025-11-04T13:40:09.804Z" },
+ { url = "https://files.pythonhosted.org/packages/d3/43/ebef01f69baa07a482844faaa0a591bad1ef129253ffd0cdaa9d8a7f72d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5", size = 2171698, upload-time = "2025-11-04T13:40:12.004Z" },
+ { url = "https://files.pythonhosted.org/packages/b1/87/41f3202e4193e3bacfc2c065fab7706ebe81af46a83d3e27605029c1f5a6/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c", size = 2132603, upload-time = "2025-11-04T13:40:13.868Z" },
+ { url = "https://files.pythonhosted.org/packages/49/7d/4c00df99cb12070b6bccdef4a195255e6020a550d572768d92cc54dba91a/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294", size = 2329591, upload-time = "2025-11-04T13:40:15.672Z" },
+ { url = "https://files.pythonhosted.org/packages/cc/6a/ebf4b1d65d458f3cda6a7335d141305dfa19bdc61140a884d165a8a1bbc7/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1", size = 2319068, upload-time = "2025-11-04T13:40:17.532Z" },
+ { url = "https://files.pythonhosted.org/packages/49/3b/774f2b5cd4192d5ab75870ce4381fd89cf218af999515baf07e7206753f0/pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d", size = 1985908, upload-time = "2025-11-04T13:40:19.309Z" },
+ { url = "https://files.pythonhosted.org/packages/86/45/00173a033c801cacf67c190fef088789394feaf88a98a7035b0e40d53dc9/pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815", size = 2020145, upload-time = "2025-11-04T13:40:21.548Z" },
+ { url = "https://files.pythonhosted.org/packages/f9/22/91fbc821fa6d261b376a3f73809f907cec5ca6025642c463d3488aad22fb/pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3", size = 1976179, upload-time = "2025-11-04T13:40:23.393Z" },
+ { url = "https://files.pythonhosted.org/packages/87/06/8806241ff1f70d9939f9af039c6c35f2360cf16e93c2ca76f184e76b1564/pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9", size = 2120403, upload-time = "2025-11-04T13:40:25.248Z" },
+ { url = "https://files.pythonhosted.org/packages/94/02/abfa0e0bda67faa65fef1c84971c7e45928e108fe24333c81f3bfe35d5f5/pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34", size = 1896206, upload-time = "2025-11-04T13:40:27.099Z" },
+ { url = "https://files.pythonhosted.org/packages/15/df/a4c740c0943e93e6500f9eb23f4ca7ec9bf71b19e608ae5b579678c8d02f/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0", size = 1919307, upload-time = "2025-11-04T13:40:29.806Z" },
+ { url = "https://files.pythonhosted.org/packages/9a/e3/6324802931ae1d123528988e0e86587c2072ac2e5394b4bc2bc34b61ff6e/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33", size = 2063258, upload-time = "2025-11-04T13:40:33.544Z" },
+ { url = "https://files.pythonhosted.org/packages/c9/d4/2230d7151d4957dd79c3044ea26346c148c98fbf0ee6ebd41056f2d62ab5/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e", size = 2214917, upload-time = "2025-11-04T13:40:35.479Z" },
+ { url = "https://files.pythonhosted.org/packages/e6/9f/eaac5df17a3672fef0081b6c1bb0b82b33ee89aa5cec0d7b05f52fd4a1fa/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2", size = 2332186, upload-time = "2025-11-04T13:40:37.436Z" },
+ { url = "https://files.pythonhosted.org/packages/cf/4e/35a80cae583a37cf15604b44240e45c05e04e86f9cfd766623149297e971/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586", size = 2073164, upload-time = "2025-11-04T13:40:40.289Z" },
+ { url = "https://files.pythonhosted.org/packages/bf/e3/f6e262673c6140dd3305d144d032f7bd5f7497d3871c1428521f19f9efa2/pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d", size = 2179146, upload-time = "2025-11-04T13:40:42.809Z" },
+ { url = "https://files.pythonhosted.org/packages/75/c7/20bd7fc05f0c6ea2056a4565c6f36f8968c0924f19b7d97bbfea55780e73/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740", size = 2137788, upload-time = "2025-11-04T13:40:44.752Z" },
+ { url = "https://files.pythonhosted.org/packages/3a/8d/34318ef985c45196e004bc46c6eab2eda437e744c124ef0dbe1ff2c9d06b/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e", size = 2340133, upload-time = "2025-11-04T13:40:46.66Z" },
+ { url = "https://files.pythonhosted.org/packages/9c/59/013626bf8c78a5a5d9350d12e7697d3d4de951a75565496abd40ccd46bee/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858", size = 2324852, upload-time = "2025-11-04T13:40:48.575Z" },
+ { url = "https://files.pythonhosted.org/packages/1a/d9/c248c103856f807ef70c18a4f986693a46a8ffe1602e5d361485da502d20/pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36", size = 1994679, upload-time = "2025-11-04T13:40:50.619Z" },
+ { url = "https://files.pythonhosted.org/packages/9e/8b/341991b158ddab181cff136acd2552c9f35bd30380422a639c0671e99a91/pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11", size = 2019766, upload-time = "2025-11-04T13:40:52.631Z" },
+ { url = "https://files.pythonhosted.org/packages/73/7d/f2f9db34af103bea3e09735bb40b021788a5e834c81eedb541991badf8f5/pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd", size = 1981005, upload-time = "2025-11-04T13:40:54.734Z" },
+ { url = "https://files.pythonhosted.org/packages/ea/28/46b7c5c9635ae96ea0fbb779e271a38129df2550f763937659ee6c5dbc65/pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a", size = 2119622, upload-time = "2025-11-04T13:40:56.68Z" },
+ { url = "https://files.pythonhosted.org/packages/74/1a/145646e5687e8d9a1e8d09acb278c8535ebe9e972e1f162ed338a622f193/pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14", size = 1891725, upload-time = "2025-11-04T13:40:58.807Z" },
+ { url = "https://files.pythonhosted.org/packages/23/04/e89c29e267b8060b40dca97bfc64a19b2a3cf99018167ea1677d96368273/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1", size = 1915040, upload-time = "2025-11-04T13:41:00.853Z" },
+ { url = "https://files.pythonhosted.org/packages/84/a3/15a82ac7bd97992a82257f777b3583d3e84bdb06ba6858f745daa2ec8a85/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66", size = 2063691, upload-time = "2025-11-04T13:41:03.504Z" },
+ { url = "https://files.pythonhosted.org/packages/74/9b/0046701313c6ef08c0c1cf0e028c67c770a4e1275ca73131563c5f2a310a/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869", size = 2213897, upload-time = "2025-11-04T13:41:05.804Z" },
+ { url = "https://files.pythonhosted.org/packages/8a/cd/6bac76ecd1b27e75a95ca3a9a559c643b3afcd2dd62086d4b7a32a18b169/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2", size = 2333302, upload-time = "2025-11-04T13:41:07.809Z" },
+ { url = "https://files.pythonhosted.org/packages/4c/d2/ef2074dc020dd6e109611a8be4449b98cd25e1b9b8a303c2f0fca2f2bcf7/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375", size = 2064877, upload-time = "2025-11-04T13:41:09.827Z" },
+ { url = "https://files.pythonhosted.org/packages/18/66/e9db17a9a763d72f03de903883c057b2592c09509ccfe468187f2a2eef29/pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553", size = 2180680, upload-time = "2025-11-04T13:41:12.379Z" },
+ { url = "https://files.pythonhosted.org/packages/d3/9e/3ce66cebb929f3ced22be85d4c2399b8e85b622db77dad36b73c5387f8f8/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90", size = 2138960, upload-time = "2025-11-04T13:41:14.627Z" },
+ { url = "https://files.pythonhosted.org/packages/a6/62/205a998f4327d2079326b01abee48e502ea739d174f0a89295c481a2272e/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07", size = 2339102, upload-time = "2025-11-04T13:41:16.868Z" },
+ { url = "https://files.pythonhosted.org/packages/3c/0d/f05e79471e889d74d3d88f5bd20d0ed189ad94c2423d81ff8d0000aab4ff/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb", size = 2326039, upload-time = "2025-11-04T13:41:18.934Z" },
+ { url = "https://files.pythonhosted.org/packages/ec/e1/e08a6208bb100da7e0c4b288eed624a703f4d129bde2da475721a80cab32/pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23", size = 1995126, upload-time = "2025-11-04T13:41:21.418Z" },
+ { url = "https://files.pythonhosted.org/packages/48/5d/56ba7b24e9557f99c9237e29f5c09913c81eeb2f3217e40e922353668092/pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf", size = 2015489, upload-time = "2025-11-04T13:41:24.076Z" },
+ { url = "https://files.pythonhosted.org/packages/4e/bb/f7a190991ec9e3e0ba22e4993d8755bbc4a32925c0b5b42775c03e8148f9/pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0", size = 1977288, upload-time = "2025-11-04T13:41:26.33Z" },
+ { url = "https://files.pythonhosted.org/packages/92/ed/77542d0c51538e32e15afe7899d79efce4b81eee631d99850edc2f5e9349/pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a", size = 2120255, upload-time = "2025-11-04T13:41:28.569Z" },
+ { url = "https://files.pythonhosted.org/packages/bb/3d/6913dde84d5be21e284439676168b28d8bbba5600d838b9dca99de0fad71/pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3", size = 1863760, upload-time = "2025-11-04T13:41:31.055Z" },
+ { url = "https://files.pythonhosted.org/packages/5a/f0/e5e6b99d4191da102f2b0eb9687aaa7f5bea5d9964071a84effc3e40f997/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c", size = 1878092, upload-time = "2025-11-04T13:41:33.21Z" },
+ { url = "https://files.pythonhosted.org/packages/71/48/36fb760642d568925953bcc8116455513d6e34c4beaa37544118c36aba6d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612", size = 2053385, upload-time = "2025-11-04T13:41:35.508Z" },
+ { url = "https://files.pythonhosted.org/packages/20/25/92dc684dd8eb75a234bc1c764b4210cf2646479d54b47bf46061657292a8/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d", size = 2218832, upload-time = "2025-11-04T13:41:37.732Z" },
+ { url = "https://files.pythonhosted.org/packages/e2/09/f53e0b05023d3e30357d82eb35835d0f6340ca344720a4599cd663dca599/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9", size = 2327585, upload-time = "2025-11-04T13:41:40Z" },
+ { url = "https://files.pythonhosted.org/packages/aa/4e/2ae1aa85d6af35a39b236b1b1641de73f5a6ac4d5a7509f77b814885760c/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660", size = 2041078, upload-time = "2025-11-04T13:41:42.323Z" },
+ { url = "https://files.pythonhosted.org/packages/cd/13/2e215f17f0ef326fc72afe94776edb77525142c693767fc347ed6288728d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9", size = 2173914, upload-time = "2025-11-04T13:41:45.221Z" },
+ { url = "https://files.pythonhosted.org/packages/02/7a/f999a6dcbcd0e5660bc348a3991c8915ce6599f4f2c6ac22f01d7a10816c/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3", size = 2129560, upload-time = "2025-11-04T13:41:47.474Z" },
+ { url = "https://files.pythonhosted.org/packages/3a/b1/6c990ac65e3b4c079a4fb9f5b05f5b013afa0f4ed6780a3dd236d2cbdc64/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf", size = 2329244, upload-time = "2025-11-04T13:41:49.992Z" },
+ { url = "https://files.pythonhosted.org/packages/d9/02/3c562f3a51afd4d88fff8dffb1771b30cfdfd79befd9883ee094f5b6c0d8/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470", size = 2331955, upload-time = "2025-11-04T13:41:54.079Z" },
+ { url = "https://files.pythonhosted.org/packages/5c/96/5fb7d8c3c17bc8c62fdb031c47d77a1af698f1d7a406b0f79aaa1338f9ad/pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa", size = 1988906, upload-time = "2025-11-04T13:41:56.606Z" },
+ { url = "https://files.pythonhosted.org/packages/22/ed/182129d83032702912c2e2d8bbe33c036f342cc735737064668585dac28f/pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c", size = 1981607, upload-time = "2025-11-04T13:41:58.889Z" },
+ { url = "https://files.pythonhosted.org/packages/9f/ed/068e41660b832bb0b1aa5b58011dea2a3fe0ba7861ff38c4d4904c1c1a99/pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008", size = 1974769, upload-time = "2025-11-04T13:42:01.186Z" },
+ { url = "https://files.pythonhosted.org/packages/09/32/59b0c7e63e277fa7911c2fc70ccfb45ce4b98991e7ef37110663437005af/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd", size = 2110495, upload-time = "2025-11-04T13:42:49.689Z" },
+ { url = "https://files.pythonhosted.org/packages/aa/81/05e400037eaf55ad400bcd318c05bb345b57e708887f07ddb2d20e3f0e98/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc", size = 1915388, upload-time = "2025-11-04T13:42:52.215Z" },
+ { url = "https://files.pythonhosted.org/packages/6e/0d/e3549b2399f71d56476b77dbf3cf8937cec5cd70536bdc0e374a421d0599/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56", size = 1942879, upload-time = "2025-11-04T13:42:56.483Z" },
+ { url = "https://files.pythonhosted.org/packages/f7/07/34573da085946b6a313d7c42f82f16e8920bfd730665de2d11c0c37a74b5/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b", size = 2139017, upload-time = "2025-11-04T13:42:59.471Z" },
]
[[package]]
@@ -1458,28 +1469,28 @@ wheels = [
[[package]]
name = "ruff"
-version = "0.14.2"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/ee/34/8218a19b2055b80601e8fd201ec723c74c7fe1ca06d525a43ed07b6d8e85/ruff-0.14.2.tar.gz", hash = "sha256:98da787668f239313d9c902ca7c523fe11b8ec3f39345553a51b25abc4629c96", size = 5539663, upload-time = "2025-10-23T19:37:00.956Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/16/dd/23eb2db5ad9acae7c845700493b72d3ae214dce0b226f27df89216110f2b/ruff-0.14.2-py3-none-linux_armv6l.whl", hash = "sha256:7cbe4e593505bdec5884c2d0a4d791a90301bc23e49a6b1eb642dd85ef9c64f1", size = 12533390, upload-time = "2025-10-23T19:36:18.044Z" },
- { url = "https://files.pythonhosted.org/packages/5a/8c/5f9acff43ddcf3f85130d0146d0477e28ccecc495f9f684f8f7119b74c0d/ruff-0.14.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:8d54b561729cee92f8d89c316ad7a3f9705533f5903b042399b6ae0ddfc62e11", size = 12887187, upload-time = "2025-10-23T19:36:22.664Z" },
- { url = "https://files.pythonhosted.org/packages/99/fa/047646491479074029665022e9f3dc6f0515797f40a4b6014ea8474c539d/ruff-0.14.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:5c8753dfa44ebb2cde10ce5b4d2ef55a41fb9d9b16732a2c5df64620dbda44a3", size = 11925177, upload-time = "2025-10-23T19:36:24.778Z" },
- { url = "https://files.pythonhosted.org/packages/15/8b/c44cf7fe6e59ab24a9d939493a11030b503bdc2a16622cede8b7b1df0114/ruff-0.14.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d0bbeffb8d9f4fccf7b5198d566d0bad99a9cb622f1fc3467af96cb8773c9e3", size = 12358285, upload-time = "2025-10-23T19:36:26.979Z" },
- { url = "https://files.pythonhosted.org/packages/45/01/47701b26254267ef40369aea3acb62a7b23e921c27372d127e0f3af48092/ruff-0.14.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7047f0c5a713a401e43a88d36843d9c83a19c584e63d664474675620aaa634a8", size = 12303832, upload-time = "2025-10-23T19:36:29.192Z" },
- { url = "https://files.pythonhosted.org/packages/2d/5c/ae7244ca4fbdf2bee9d6405dcd5bc6ae51ee1df66eb7a9884b77b8af856d/ruff-0.14.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3bf8d2f9aa1602599217d82e8e0af7fd33e5878c4d98f37906b7c93f46f9a839", size = 13036995, upload-time = "2025-10-23T19:36:31.861Z" },
- { url = "https://files.pythonhosted.org/packages/27/4c/0860a79ce6fd4c709ac01173f76f929d53f59748d0dcdd662519835dae43/ruff-0.14.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:1c505b389e19c57a317cf4b42db824e2fca96ffb3d86766c1c9f8b96d32048a7", size = 14512649, upload-time = "2025-10-23T19:36:33.915Z" },
- { url = "https://files.pythonhosted.org/packages/7f/7f/d365de998069720a3abfc250ddd876fc4b81a403a766c74ff9bde15b5378/ruff-0.14.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a307fc45ebd887b3f26b36d9326bb70bf69b01561950cdcc6c0bdf7bb8e0f7cc", size = 14088182, upload-time = "2025-10-23T19:36:36.983Z" },
- { url = "https://files.pythonhosted.org/packages/6c/ea/d8e3e6b209162000a7be1faa41b0a0c16a133010311edc3329753cc6596a/ruff-0.14.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:61ae91a32c853172f832c2f40bd05fd69f491db7289fb85a9b941ebdd549781a", size = 13599516, upload-time = "2025-10-23T19:36:39.208Z" },
- { url = "https://files.pythonhosted.org/packages/fa/ea/c7810322086db68989fb20a8d5221dd3b79e49e396b01badca07b433ab45/ruff-0.14.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1967e40286f63ee23c615e8e7e98098dedc7301568bd88991f6e544d8ae096", size = 13272690, upload-time = "2025-10-23T19:36:41.453Z" },
- { url = "https://files.pythonhosted.org/packages/a9/39/10b05acf8c45786ef501d454e00937e1b97964f846bf28883d1f9619928a/ruff-0.14.2-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:2877f02119cdebf52a632d743a2e302dea422bfae152ebe2f193d3285a3a65df", size = 13496497, upload-time = "2025-10-23T19:36:43.61Z" },
- { url = "https://files.pythonhosted.org/packages/59/a1/1f25f8301e13751c30895092485fada29076e5e14264bdacc37202e85d24/ruff-0.14.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:e681c5bc777de5af898decdcb6ba3321d0d466f4cb43c3e7cc2c3b4e7b843a05", size = 12266116, upload-time = "2025-10-23T19:36:45.625Z" },
- { url = "https://files.pythonhosted.org/packages/5c/fa/0029bfc9ce16ae78164e6923ef392e5f173b793b26cc39aa1d8b366cf9dc/ruff-0.14.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:e21be42d72e224736f0c992cdb9959a2fa53c7e943b97ef5d081e13170e3ffc5", size = 12281345, upload-time = "2025-10-23T19:36:47.618Z" },
- { url = "https://files.pythonhosted.org/packages/a5/ab/ece7baa3c0f29b7683be868c024f0838770c16607bea6852e46b202f1ff6/ruff-0.14.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:b8264016f6f209fac16262882dbebf3f8be1629777cf0f37e7aff071b3e9b92e", size = 12629296, upload-time = "2025-10-23T19:36:49.789Z" },
- { url = "https://files.pythonhosted.org/packages/a4/7f/638f54b43f3d4e48c6a68062794e5b367ddac778051806b9e235dfb7aa81/ruff-0.14.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5ca36b4cb4db3067a3b24444463ceea5565ea78b95fe9a07ca7cb7fd16948770", size = 13371610, upload-time = "2025-10-23T19:36:51.882Z" },
- { url = "https://files.pythonhosted.org/packages/8d/35/3654a973ebe5b32e1fd4a08ed2d46755af7267da7ac710d97420d7b8657d/ruff-0.14.2-py3-none-win32.whl", hash = "sha256:41775927d287685e08f48d8eb3f765625ab0b7042cc9377e20e64f4eb0056ee9", size = 12415318, upload-time = "2025-10-23T19:36:53.961Z" },
- { url = "https://files.pythonhosted.org/packages/71/30/3758bcf9e0b6a4193a6f51abf84254aba00887dfa8c20aba18aa366c5f57/ruff-0.14.2-py3-none-win_amd64.whl", hash = "sha256:0df3424aa5c3c08b34ed8ce099df1021e3adaca6e90229273496b839e5a7e1af", size = 13565279, upload-time = "2025-10-23T19:36:56.578Z" },
- { url = "https://files.pythonhosted.org/packages/2e/5d/aa883766f8ef9ffbe6aa24f7192fb71632f31a30e77eb39aa2b0dc4290ac/ruff-0.14.2-py3-none-win_arm64.whl", hash = "sha256:ea9d635e83ba21569fbacda7e78afbfeb94911c9434aff06192d9bc23fd5495a", size = 12554956, upload-time = "2025-10-23T19:36:58.714Z" },
+version = "0.14.4"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/df/55/cccfca45157a2031dcbb5a462a67f7cf27f8b37d4b3b1cd7438f0f5c1df6/ruff-0.14.4.tar.gz", hash = "sha256:f459a49fe1085a749f15414ca76f61595f1a2cc8778ed7c279b6ca2e1fd19df3", size = 5587844, upload-time = "2025-11-06T22:07:45.033Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/17/b9/67240254166ae1eaa38dec32265e9153ac53645a6c6670ed36ad00722af8/ruff-0.14.4-py3-none-linux_armv6l.whl", hash = "sha256:e6604613ffbcf2297cd5dcba0e0ac9bd0c11dc026442dfbb614504e87c349518", size = 12606781, upload-time = "2025-11-06T22:07:01.841Z" },
+ { url = "https://files.pythonhosted.org/packages/46/c8/09b3ab245d8652eafe5256ab59718641429f68681ee713ff06c5c549f156/ruff-0.14.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:d99c0b52b6f0598acede45ee78288e5e9b4409d1ce7f661f0fa36d4cbeadf9a4", size = 12946765, upload-time = "2025-11-06T22:07:05.858Z" },
+ { url = "https://files.pythonhosted.org/packages/14/bb/1564b000219144bf5eed2359edc94c3590dd49d510751dad26202c18a17d/ruff-0.14.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:9358d490ec030f1b51d048a7fd6ead418ed0826daf6149e95e30aa67c168af33", size = 11928120, upload-time = "2025-11-06T22:07:08.023Z" },
+ { url = "https://files.pythonhosted.org/packages/a3/92/d5f1770e9988cc0742fefaa351e840d9aef04ec24ae1be36f333f96d5704/ruff-0.14.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81b40d27924f1f02dfa827b9c0712a13c0e4b108421665322218fc38caf615c2", size = 12370877, upload-time = "2025-11-06T22:07:10.015Z" },
+ { url = "https://files.pythonhosted.org/packages/e2/29/e9282efa55f1973d109faf839a63235575519c8ad278cc87a182a366810e/ruff-0.14.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f5e649052a294fe00818650712083cddc6cc02744afaf37202c65df9ea52efa5", size = 12408538, upload-time = "2025-11-06T22:07:13.085Z" },
+ { url = "https://files.pythonhosted.org/packages/8e/01/930ed6ecfce130144b32d77d8d69f5c610e6d23e6857927150adf5d7379a/ruff-0.14.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa082a8f878deeba955531f975881828fd6afd90dfa757c2b0808aadb437136e", size = 13141942, upload-time = "2025-11-06T22:07:15.386Z" },
+ { url = "https://files.pythonhosted.org/packages/6a/46/a9c89b42b231a9f487233f17a89cbef9d5acd538d9488687a02ad288fa6b/ruff-0.14.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:1043c6811c2419e39011890f14d0a30470f19d47d197c4858b2787dfa698f6c8", size = 14544306, upload-time = "2025-11-06T22:07:17.631Z" },
+ { url = "https://files.pythonhosted.org/packages/78/96/9c6cf86491f2a6d52758b830b89b78c2ae61e8ca66b86bf5a20af73d20e6/ruff-0.14.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a9f3a936ac27fb7c2a93e4f4b943a662775879ac579a433291a6f69428722649", size = 14210427, upload-time = "2025-11-06T22:07:19.832Z" },
+ { url = "https://files.pythonhosted.org/packages/71/f4/0666fe7769a54f63e66404e8ff698de1dcde733e12e2fd1c9c6efb689cb5/ruff-0.14.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:95643ffd209ce78bc113266b88fba3d39e0461f0cbc8b55fb92505030fb4a850", size = 13658488, upload-time = "2025-11-06T22:07:22.32Z" },
+ { url = "https://files.pythonhosted.org/packages/ee/79/6ad4dda2cfd55e41ac9ed6d73ef9ab9475b1eef69f3a85957210c74ba12c/ruff-0.14.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:456daa2fa1021bc86ca857f43fe29d5d8b3f0e55e9f90c58c317c1dcc2afc7b5", size = 13354908, upload-time = "2025-11-06T22:07:24.347Z" },
+ { url = "https://files.pythonhosted.org/packages/b5/60/f0b6990f740bb15c1588601d19d21bcc1bd5de4330a07222041678a8e04f/ruff-0.14.4-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:f911bba769e4a9f51af6e70037bb72b70b45a16db5ce73e1f72aefe6f6d62132", size = 13587803, upload-time = "2025-11-06T22:07:26.327Z" },
+ { url = "https://files.pythonhosted.org/packages/c9/da/eaaada586f80068728338e0ef7f29ab3e4a08a692f92eb901a4f06bbff24/ruff-0.14.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:76158a7369b3979fa878612c623a7e5430c18b2fd1c73b214945c2d06337db67", size = 12279654, upload-time = "2025-11-06T22:07:28.46Z" },
+ { url = "https://files.pythonhosted.org/packages/66/d4/b1d0e82cf9bf8aed10a6d45be47b3f402730aa2c438164424783ac88c0ed/ruff-0.14.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:f3b8f3b442d2b14c246e7aeca2e75915159e06a3540e2f4bed9f50d062d24469", size = 12357520, upload-time = "2025-11-06T22:07:31.468Z" },
+ { url = "https://files.pythonhosted.org/packages/04/f4/53e2b42cc82804617e5c7950b7079d79996c27e99c4652131c6a1100657f/ruff-0.14.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:c62da9a06779deecf4d17ed04939ae8b31b517643b26370c3be1d26f3ef7dbde", size = 12719431, upload-time = "2025-11-06T22:07:33.831Z" },
+ { url = "https://files.pythonhosted.org/packages/a2/94/80e3d74ed9a72d64e94a7b7706b1c1ebaa315ef2076fd33581f6a1cd2f95/ruff-0.14.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5a443a83a1506c684e98acb8cb55abaf3ef725078be40237463dae4463366349", size = 13464394, upload-time = "2025-11-06T22:07:35.905Z" },
+ { url = "https://files.pythonhosted.org/packages/54/1a/a49f071f04c42345c793d22f6cf5e0920095e286119ee53a64a3a3004825/ruff-0.14.4-py3-none-win32.whl", hash = "sha256:643b69cb63cd996f1fc7229da726d07ac307eae442dd8974dbc7cf22c1e18fff", size = 12493429, upload-time = "2025-11-06T22:07:38.43Z" },
+ { url = "https://files.pythonhosted.org/packages/bc/22/e58c43e641145a2b670328fb98bc384e20679b5774258b1e540207580266/ruff-0.14.4-py3-none-win_amd64.whl", hash = "sha256:26673da283b96fe35fa0c939bf8411abec47111644aa9f7cfbd3c573fb125d2c", size = 13635380, upload-time = "2025-11-06T22:07:40.496Z" },
+ { url = "https://files.pythonhosted.org/packages/30/bd/4168a751ddbbf43e86544b4de8b5c3b7be8d7167a2a5cb977d274e04f0a1/ruff-0.14.4-py3-none-win_arm64.whl", hash = "sha256:dd09c292479596b0e6fec8cd95c65c3a6dc68e9ad17b8f2382130f87ff6a75bb", size = 12663065, upload-time = "2025-11-06T22:07:42.603Z" },
]
[[package]]
@@ -1529,14 +1540,14 @@ wheels = [
[[package]]
name = "sse-starlette"
-version = "3.0.2"
+version = "3.0.3"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "anyio" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/42/6f/22ed6e33f8a9e76ca0a412405f31abb844b779d52c5f96660766edcd737c/sse_starlette-3.0.2.tar.gz", hash = "sha256:ccd60b5765ebb3584d0de2d7a6e4f745672581de4f5005ab31c3a25d10b52b3a", size = 20985, upload-time = "2025-07-27T09:07:44.565Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/db/3c/fa6517610dc641262b77cc7bf994ecd17465812c1b0585fe33e11be758ab/sse_starlette-3.0.3.tar.gz", hash = "sha256:88cfb08747e16200ea990c8ca876b03910a23b547ab3bd764c0d8eb81019b971", size = 21943, upload-time = "2025-10-30T18:44:20.117Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/ef/10/c78f463b4ef22eef8491f218f692be838282cd65480f6e423d7730dfd1fb/sse_starlette-3.0.2-py3-none-any.whl", hash = "sha256:16b7cbfddbcd4eaca11f7b586f3b8a080f1afe952c15813455b162edea619e5a", size = 11297, upload-time = "2025-07-27T09:07:43.268Z" },
+ { url = "https://files.pythonhosted.org/packages/23/a0/984525d19ca5c8a6c33911a0c164b11490dd0f90ff7fd689f704f84e9a11/sse_starlette-3.0.3-py3-none-any.whl", hash = "sha256:af5bf5a6f3933df1d9c7f8539633dc8444ca6a97ab2e2a7cd3b6e431ac03a431", size = 11765, upload-time = "2025-10-30T18:44:18.834Z" },
]
[[package]]
@@ -1555,15 +1566,15 @@ wheels = [
[[package]]
name = "starlette"
-version = "0.49.0"
+version = "0.49.3"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "anyio" },
{ name = "typing-extensions", marker = "python_full_version < '3.13'" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/e1/36/70878596a60339cd9f78ee58b9ca8548f235a02d21be649ca73c35ccdd5a/starlette-0.49.0.tar.gz", hash = "sha256:cb75dfe3267b99caf9036db355601f8c6092c4d50d132b3724cb373766cb2ab1", size = 2654292, upload-time = "2025-10-28T08:11:42.379Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/de/1a/608df0b10b53b0beb96a37854ee05864d182ddd4b1156a22f1ad3860425a/starlette-0.49.3.tar.gz", hash = "sha256:1c14546f299b5901a1ea0e34410575bc33bbd741377a10484a54445588d00284", size = 2655031, upload-time = "2025-11-01T15:12:26.13Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/98/c4/da4392a59f3f1c9da2fa7bafd697ed14cf5f2ee2102c5cd4a4a1c5985e42/starlette-0.49.0-py3-none-any.whl", hash = "sha256:6f3988f55dda3fd36e5640aeaceee5b691063e0ebc4d7851e1c3a6695fbdc8ba", size = 74057, upload-time = "2025-10-28T08:11:40.895Z" },
+ { url = "https://files.pythonhosted.org/packages/a3/e0/021c772d6a662f43b63044ab481dc6ac7592447605b5b35a957785363122/starlette-0.49.3-py3-none-any.whl", hash = "sha256:b579b99715fdc2980cf88c8ec96d3bf1ce16f5a8051a7c2b84ef9b1cdecaea2f", size = 74340, upload-time = "2025-11-01T15:12:24.387Z" },
]
[[package]]
@@ -1861,11 +1872,11 @@ wheels = [
[[package]]
name = "webcolors"
-version = "24.11.1"
+version = "25.10.0"
source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/7b/29/061ec845fb58521848f3739e466efd8250b4b7b98c1b6c5bf4d40b419b7e/webcolors-24.11.1.tar.gz", hash = "sha256:ecb3d768f32202af770477b8b65f318fa4f566c22948673a977b00d589dd80f6", size = 45064, upload-time = "2024-11-11T07:43:24.224Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/1d/7a/eb316761ec35664ea5174709a68bbd3389de60d4a1ebab8808bfc264ed67/webcolors-25.10.0.tar.gz", hash = "sha256:62abae86504f66d0f6364c2a8520de4a0c47b80c03fc3a5f1815fedbef7c19bf", size = 53491, upload-time = "2025-10-31T07:51:03.977Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/60/e8/c0e05e4684d13459f93d312077a9a2efbe04d59c393bc2b8802248c908d4/webcolors-24.11.1-py3-none-any.whl", hash = "sha256:515291393b4cdf0eb19c155749a096f779f7d909f7cceea072791cb9095b92e9", size = 14934, upload-time = "2024-11-11T07:43:22.529Z" },
+ { url = "https://files.pythonhosted.org/packages/e2/cc/e097523dd85c9cf5d354f78310927f1656c422bd7b2613b2db3e3f9a0f2c/webcolors-25.10.0-py3-none-any.whl", hash = "sha256:032c727334856fc0b968f63daa252a1ac93d33db2f5267756623c210e57a4f1d", size = 14905, upload-time = "2025-10-31T07:51:01.778Z" },
]
[[package]]
diff --git a/webhook_server/app.py b/webhook_server/app.py
index d1e0461ac..beac2a2c5 100644
--- a/webhook_server/app.py
+++ b/webhook_server/app.py
@@ -1,28 +1,32 @@
+import asyncio
import ipaddress
import json
-import logging
import os
-import sys
+from collections.abc import AsyncGenerator
from contextlib import asynccontextmanager
-from typing import Any, AsyncGenerator
+from typing import Any
import httpx
import requests
import urllib3
from fastapi import (
- BackgroundTasks,
Depends,
FastAPI,
HTTPException,
+ Query,
Request,
+ Response,
WebSocket,
status,
)
-from fastapi.responses import HTMLResponse, StreamingResponse
+from fastapi.responses import HTMLResponse, JSONResponse, StreamingResponse
from fastapi.staticfiles import StaticFiles
# Import for MCP integration
from fastapi_mcp import FastApiMCP
+from fastapi_mcp.transport.http import FastApiHttpSessionManager
+from mcp.server.streamable_http_manager import StreamableHTTPSessionManager
+from starlette.datastructures import Headers
from webhook_server.libs.config import Config
from webhook_server.libs.exceptions import RepositoryNotFoundInConfigError
@@ -40,12 +44,14 @@
# Constants
APP_URL_ROOT_PATH: str = "/webhook_server"
+LOG_SERVER_ENABLED: bool = os.environ.get("ENABLE_LOG_SERVER") == "true"
# Global variables
ALLOWED_IPS: tuple[ipaddress._BaseNetwork, ...] = ()
LOGGER = get_logger_with_params()
_lifespan_http_client: httpx.AsyncClient | None = None
+_background_tasks: set[asyncio.Task] = set()
# Helper function to wrap the imported gate_by_allowlist_ips with ALLOWED_IPS
@@ -54,8 +60,17 @@ async def gate_by_allowlist_ips_dependency(request: Request) -> None:
await gate_by_allowlist_ips(request, ALLOWED_IPS)
+def require_log_server_enabled() -> None:
+ """Dependency to ensure log server is enabled before accessing log viewer APIs."""
+ if not LOG_SERVER_ENABLED:
+ raise HTTPException(
+ status_code=status.HTTP_404_NOT_FOUND,
+ detail="Log server is disabled. Set ENABLE_LOG_SERVER=true to enable.",
+ )
+
+
@asynccontextmanager
-async def lifespan(app: FastAPI) -> AsyncGenerator[None, None]:
+async def lifespan(_app: FastAPI) -> AsyncGenerator[None, None]:
global _lifespan_http_client
_lifespan_http_client = httpx.AsyncClient(timeout=HTTP_TIMEOUT_SECONDS)
@@ -80,8 +95,8 @@ async def lifespan(app: FastAPI) -> AsyncGenerator[None, None]:
config = Config(logger=LOGGER)
root_config = config.root_data
- verify_github_ips = root_config.get("verify-github-ips")
- verify_cloudflare_ips = root_config.get("verify-cloudflare-ips")
+ verify_github_ips = root_config.get("verify-github-ips", False)
+ verify_cloudflare_ips = root_config.get("verify-cloudflare-ips", False)
disable_ssl_warnings = root_config.get("disable-ssl-warnings", False)
# Conditionally disable urllib3 warnings based on config
@@ -124,7 +139,13 @@ async def lifespan(app: FastAPI) -> AsyncGenerator[None, None]:
ALLOWED_IPS = tuple(networks)
LOGGER.info(f"IP allowlist initialized successfully with {len(ALLOWED_IPS)} networks.")
elif verify_github_ips or verify_cloudflare_ips:
- LOGGER.warning("IP verification enabled but no valid IPs loaded - webhook will accept from any IP")
+ # Fail-close: If IP verification is enabled but no networks loaded, reject all requests
+ LOGGER.error("IP verification enabled but no valid IPs loaded - failing closed for security")
+ raise RuntimeError(
+ "IP verification enabled but no allowlist loaded. "
+ "Cannot start server in insecure state. "
+ "Check network connectivity to GitHub/Cloudflare API endpoints."
+ )
yield
@@ -143,6 +164,20 @@ async def lifespan(app: FastAPI) -> AsyncGenerator[None, None]:
await _lifespan_http_client.aclose()
LOGGER.debug("HTTP client closed")
+ # Optionally wait for pending background tasks for graceful shutdown
+ global _background_tasks
+ if _background_tasks:
+ LOGGER.info(f"Waiting for {len(_background_tasks)} pending background task(s) to complete...")
+ # Wait up to 30 seconds for tasks to complete
+ done, pending = await asyncio.wait(_background_tasks, timeout=30.0, return_when=asyncio.ALL_COMPLETED)
+ if pending:
+ LOGGER.warning(f"{len(pending)} background task(s) did not complete within timeout, cancelling...")
+ for task in pending:
+ task.cancel()
+ # Wait briefly for cancellations to propagate
+ await asyncio.wait(pending, timeout=5.0)
+ LOGGER.debug(f"Background tasks cleanup complete: {len(done)} completed, {len(pending)} cancelled")
+
LOGGER.info("Application shutdown complete.")
@@ -164,23 +199,73 @@ def healthcheck() -> dict[str, Any]:
dependencies=[Depends(gate_by_allowlist_ips_dependency)],
tags=["mcp_exclude"],
)
-async def process_webhook(request: Request, background_tasks: BackgroundTasks) -> dict[str, Any]:
- # Extract headers early for logging
- delivery_id = request.headers.get("X-GitHub-Delivery", "unknown-delivery")
- event_type = request.headers.get("X-GitHub-Event", "unknown-event")
+async def process_webhook(request: Request) -> JSONResponse:
+ """Process GitHub webhooks with immediate 200 OK response and background processing.
+
+ **Critical Design Pattern:**
+ This endpoint returns 200 OK immediately after validating that we have enough
+ data to process the webhook. This design prevents GitHub webhook timeouts (10
+ second limit) while allowing long-running operations to complete asynchronously.
+
+ **Synchronous Validation (must pass to return 200):**
+ 1. Read request body
+ 2. Verify signature (if webhook-secret configured)
+ 3. Parse JSON payload
+ 4. Validate required fields: repository.name, repository.full_name, X-GitHub-Event
+
+ **Background Processing (errors logged only):**
+ - Config loading, repository validation, API initialization
+ - All API calls
+ - All handler processing
+ - All errors (missing repos, API failures, etc.) are caught and logged
+
+ **Why Background Processing:**
+ - GitHub webhook timeout: 10 seconds
+ - Typical processing time: 5-30 seconds (API calls, builds, notifications)
+ - Without background processing: Frequent timeouts, webhook retries, duplicates
+ - With background processing: Instant 200 OK, reliable webhook delivery
+
+ **Implications:**
+ - HTTP 200 OK means webhook payload was valid and queued for processing
+ - HTTP 200 OK does NOT mean webhook was processed successfully
+ - Check logs with delivery_id to verify actual processing results
+
+ Args:
+ request: FastAPI Request object containing webhook payload and headers
+
+ Returns:
+ JSONResponse: 200 OK response with delivery_id and event_type for tracking
- # Use standardized log prefix format (will get repository info after parsing payload)
- log_context = prepare_log_prefix(event_type, delivery_id)
+ Raises:
+ HTTPException 400: Missing required fields (X-GitHub-Event, repository.name,
+ repository.full_name) or invalid JSON payload
+ HTTPException 401: Signature verification failed (if webhook-secret configured)
+ HTTPException 500: Configuration errors during signature verification setup
+
+ Note:
+ All processing errors (missing repos, API failures, etc.)
+ happen in background and are logged only. They do NOT affect the HTTP response.
+ """
+ # Extract headers for validation and logging
+ delivery_id = request.headers.get("X-GitHub-Delivery", "unknown-delivery")
+ event_type = request.headers.get("X-GitHub-Event")
+ log_context = prepare_log_prefix(event_type or "unknown-event", delivery_id)
LOGGER.info(f"{log_context} Processing webhook")
+ # Validate X-GitHub-Event header (required by GithubWebhook.__init__)
+ if not event_type:
+ LOGGER.error(f"{log_context} Missing X-GitHub-Event header")
+ raise HTTPException(status_code=400, detail="Missing X-GitHub-Event header")
+
+ # Read request body
try:
payload_body = await request.body()
except Exception as e:
LOGGER.error(f"{log_context} Failed to read request body: {e}")
- raise HTTPException(status_code=400, detail="Failed to read request body")
+ raise HTTPException(status_code=400, detail="Failed to read request body") from e
- # Load config and verify signature
+ # Verify signature if configured
try:
config = Config(logger=LOGGER)
root_config = config.root_data
@@ -194,62 +279,90 @@ async def process_webhook(request: Request, background_tasks: BackgroundTasks) -
raise
except Exception as e:
LOGGER.error(f"{log_context} Configuration error: {e}")
- raise HTTPException(status_code=500, detail="Configuration error")
+ raise HTTPException(status_code=500, detail="Configuration error") from e
# Parse JSON payload
try:
hook_data: dict[Any, Any] = json.loads(payload_body)
- if "repository" not in hook_data or "name" not in hook_data["repository"]:
- raise ValueError("Missing repository information in payload")
- except json.JSONDecodeError as e:
- LOGGER.error(f"{log_context} Invalid JSON payload: {e}")
- raise HTTPException(status_code=400, detail="Invalid JSON payload")
- except ValueError as e:
- LOGGER.error(f"{log_context} Invalid payload structure: {e}")
- raise HTTPException(status_code=400, detail=str(e))
-
- # Create repository-specific logger
- repository_name = hook_data["repository"]["name"]
- logger = get_logger_with_params(repository_name=repository_name)
- logger.info(f"{log_context} Processing webhook for repository: {repository_name}")
-
- async def process_with_error_handling(_api: GithubWebhook, _logger: logging.Logger) -> None:
+ except json.JSONDecodeError:
+ LOGGER.exception(f"{log_context} Invalid JSON payload")
+ raise HTTPException(status_code=400, detail="Invalid JSON payload") from None
+
+ # Validate required fields for GithubWebhook.__init__()
+ if "repository" not in hook_data:
+ LOGGER.error(f"{log_context} Missing repository in payload")
+ raise HTTPException(status_code=400, detail="Missing repository in payload")
+ if "name" not in hook_data["repository"]:
+ LOGGER.error(f"{log_context} Missing repository.name in payload")
+ raise HTTPException(status_code=400, detail="Missing repository.name in payload")
+ if "full_name" not in hook_data["repository"]:
+ LOGGER.error(f"{log_context} Missing repository.full_name in payload")
+ raise HTTPException(status_code=400, detail="Missing repository.full_name in payload")
+
+ # Return 200 immediately - all validation passed, we can process this webhook
+ LOGGER.info(f"{log_context} Webhook validation passed, queuing for background processing")
+
+ async def process_with_error_handling(
+ _hook_data: dict[Any, Any], _headers: Headers, _delivery_id: str, _event_type: str
+ ) -> None:
+ """Process webhook in background with granular error handling.
+
+ This function runs in a background task after the webhook endpoint has already
+ returned 200 OK to GitHub. Exceptions here do NOT affect the HTTP response,
+ preventing webhook timeouts while still logging all errors for debugging.
+
+ Args:
+ _hook_data: Webhook payload data dictionary
+ _headers: Starlette Headers object from the incoming request
+ _delivery_id: GitHub delivery ID for logging
+ _event_type: GitHub event type for logging
+ """
+ # Create repository-specific logger in background
+ repository_name = _hook_data.get("repository", {}).get("name", "unknown")
+ _logger = get_logger_with_params(repository_name=repository_name)
+ _log_context = prepare_log_prefix(
+ event_type=_event_type, delivery_id=_delivery_id, repository_name=repository_name
+ )
+ _logger.info(f"{_log_context} Processing webhook")
+
try:
+ # Initialize GithubWebhook inside background task to avoid blocking webhook response
+ _api: GithubWebhook = GithubWebhook(hook_data=_hook_data, headers=_headers, logger=_logger)
await _api.process()
- _logger.success(f"{log_context} Webhook processing completed successfully") # type: ignore
- except Exception as e:
- _logger.exception(f"{log_context} Error in background task: {e}")
-
- try:
- api: GithubWebhook = GithubWebhook(hook_data=hook_data, headers=request.headers, logger=logger)
- background_tasks.add_task(process_with_error_handling, _api=api, _logger=logger)
+ except RepositoryNotFoundInConfigError:
+ # Repository-specific error - not exceptional, log as error not exception
+ _logger.error(f"{_log_context} Repository not found in configuration")
+ except (httpx.ConnectError, httpx.RequestError, requests.exceptions.ConnectionError):
+ # Network/connection errors - can be transient
+ _logger.exception(f"{_log_context} API connection error - check network connectivity")
+ except Exception:
+ # Catch-all for unexpected errors
+ _logger.exception(f"{_log_context} Unexpected error in background webhook processing")
+
+ # Start background task immediately using asyncio.create_task
+ # This ensures the HTTP response is sent immediately without waiting
+ # Store task reference for observability and graceful shutdown
+ task = asyncio.create_task(
+ process_with_error_handling(
+ _hook_data=hook_data,
+ _headers=request.headers,
+ _delivery_id=delivery_id,
+ _event_type=event_type,
+ )
+ )
+ _background_tasks.add(task)
+ task.add_done_callback(_background_tasks.discard)
- LOGGER.info(f"{log_context} Webhook queued for background processing")
- return {
+ # Return 200 immediately with JSONResponse for fastest serialization
+ return JSONResponse(
+ status_code=status.HTTP_200_OK,
+ content={
"status": status.HTTP_200_OK,
"message": "Webhook queued for processing",
"delivery_id": delivery_id,
"event_type": event_type,
- }
-
- except RepositoryNotFoundInConfigError as e:
- logger.error(f"{log_context} Repository not found: {e}")
- raise HTTPException(status_code=404, detail=str(e))
-
- except ConnectionError as e:
- logger.error(f"{log_context} API connection error: {e}")
- raise HTTPException(status_code=503, detail=f"API Connection Error: {e}")
-
- except HTTPException:
- raise
-
- except Exception as e:
- logger.exception(f"{log_context} Unexpected error during processing: {e}")
- exc_type, _, exc_tb = sys.exc_info()
- line_no = exc_tb.tb_lineno if exc_tb else "unknown"
- file_name = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] if exc_tb else "unknown"
- error_details = f"Error type: {exc_type.__name__ if exc_type else ''}, File: {file_name}, Line: {line_no}"
- raise HTTPException(status_code=500, detail=f"Internal Server Error: {error_details}")
+ },
+ )
# Module-level singleton instance
@@ -318,7 +431,11 @@ async def _get_log_entries_core(
)
-@FASTAPI_APP.get("/logs/api/entries", operation_id="get_log_entries")
+@FASTAPI_APP.get(
+ "/logs/api/entries",
+ operation_id="get_log_entries",
+ dependencies=[Depends(require_log_server_enabled)],
+)
async def get_log_entries(
hook_id: str | None = None,
pr_number: int | None = None,
@@ -329,8 +446,8 @@ async def get_log_entries(
start_time: str | None = None,
end_time: str | None = None,
search: str | None = None,
- limit: int = 100,
- offset: int = 0,
+ limit: int = Query(default=100, ge=1, le=10000, description="Maximum entries to return (1-10000)"),
+ offset: int = Query(default=0, ge=0, description="Number of entries to skip for pagination"),
controller: LogViewerController = controller_dependency,
) -> dict[str, Any]:
"""Retrieve and filter webhook processing logs with advanced pagination and search capabilities.
@@ -469,9 +586,17 @@ async def _export_logs_core(
)
-@FASTAPI_APP.get("/logs/api/export", operation_id="export_logs")
+@FASTAPI_APP.get(
+ "/logs/api/export",
+ operation_id="export_logs",
+ dependencies=[Depends(require_log_server_enabled)],
+)
async def export_logs(
- format_type: str,
+ format_type: str = Query(
+ default="json",
+ pattern="^json$",
+ description="Export format (currently only 'json' supported)",
+ ),
hook_id: str | None = None,
pr_number: int | None = None,
repository: str | None = None,
@@ -481,7 +606,7 @@ async def export_logs(
start_time: str | None = None,
end_time: str | None = None,
search: str | None = None,
- limit: int = 10000,
+ limit: int = Query(default=10000, ge=1, le=100000, description="Maximum entries to export (1-100000)"),
controller: LogViewerController = controller_dependency,
) -> StreamingResponse:
"""Export filtered webhook logs to downloadable files for offline analysis and reporting.
@@ -615,7 +740,11 @@ async def _get_pr_flow_data_core(
return controller.get_pr_flow_data(hook_id)
-@FASTAPI_APP.get("/logs/api/pr-flow/{hook_id}", operation_id="get_pr_flow_data")
+@FASTAPI_APP.get(
+ "/logs/api/pr-flow/{hook_id}",
+ operation_id="get_pr_flow_data",
+ dependencies=[Depends(require_log_server_enabled)],
+)
async def get_pr_flow_data(hook_id: str, controller: LogViewerController = controller_dependency) -> dict[str, Any]:
"""Get PR workflow visualization data for process analysis and debugging.
@@ -656,7 +785,11 @@ async def _get_workflow_steps_core(
return controller.get_workflow_steps(hook_id)
-@FASTAPI_APP.get("/logs/api/workflow-steps/{hook_id}", operation_id="get_workflow_steps")
+@FASTAPI_APP.get(
+ "/logs/api/workflow-steps/{hook_id}",
+ operation_id="get_workflow_steps",
+ dependencies=[Depends(require_log_server_enabled)],
+)
async def get_workflow_steps(hook_id: str, controller: LogViewerController = controller_dependency) -> dict[str, Any]:
"""Retrieve detailed timeline and execution data for individual workflow steps within a webhook processing flow.
@@ -901,6 +1034,11 @@ async def websocket_log_stream(
level: str | None = None,
) -> None:
"""Handle WebSocket connection for real-time log streaming."""
+ # Check if log server is enabled (manual check since WebSocket doesn't support dependencies same way)
+ if not LOG_SERVER_ENABLED:
+ await websocket.close(code=status.WS_1008_POLICY_VIOLATION, reason="Log server is disabled")
+ return
+
controller = get_log_viewer_controller()
await controller.handle_websocket(
websocket=websocket,
@@ -914,7 +1052,43 @@ async def websocket_log_stream(
# Create MCP instance with the main app
+# NOTE: No authentication configured - MCP server runs without auth
mcp = FastApiMCP(FASTAPI_APP, exclude_tags=["mcp_exclude"])
-mcp.mount_http()
-LOGGER.info("MCP integration initialized successfully")
+# Create stateless HTTP transport to avoid session management issues
+# Override with stateless session manager
+http_transport = FastApiHttpSessionManager(
+ mcp_server=mcp.server,
+ event_store=None, # No event store needed for stateless mode
+ json_response=True,
+)
+# Manually patch to use stateless mode
+http_transport._session_manager = None # Force recreation with stateless=True
+
+
+# Register the HTTP endpoint manually
+@FASTAPI_APP.api_route("/mcp", methods=["GET", "POST", "DELETE"], include_in_schema=False, operation_id="mcp_http")
+async def handle_mcp_streamable_http(request: Request) -> Response:
+ # Ensure session manager is created with stateless=True
+ if http_transport._session_manager is None:
+ http_transport._session_manager = StreamableHTTPSessionManager(
+ app=mcp.server,
+ event_store=http_transport.event_store,
+ json_response=True,
+ stateless=True, # Enable stateless mode - no session management required
+ )
+ # Start the session manager
+
+ async def run_manager() -> None:
+ async with http_transport._session_manager.run():
+ await asyncio.Event().wait()
+
+ http_transport._manager_task = asyncio.create_task(run_manager())
+ http_transport._manager_started = True
+ await asyncio.sleep(0.1) # Give it time to initialize
+
+ return await http_transport.handle_fastapi_request(request)
+
+
+LOGGER.info("MCP integration initialized successfully (no authentication configured)")
+LOGGER.debug("MCP HTTP endpoint mounted at: /mcp")
diff --git a/webhook_server/libs/config.py b/webhook_server/libs/config.py
index 6f8a83452..4686a0541 100644
--- a/webhook_server/libs/config.py
+++ b/webhook_server/libs/config.py
@@ -33,22 +33,49 @@ def repositories_exists(self) -> None:
def root_data(self) -> dict[str, Any]:
try:
with open(self.config_path) as fd:
- return yaml.safe_load(fd)
+ return yaml.safe_load(fd) or {}
+ except FileNotFoundError:
+ # Since existence is validated in __init__, this indicates a race condition.
+ # Re-raise to propagate the error rather than returning empty dict.
+ self.logger.exception(f"Config file not found: {self.config_path}")
+ raise
+ except yaml.YAMLError:
+ self.logger.exception(f"Config file has invalid YAML syntax: {self.config_path}")
+ raise
+ except PermissionError:
+ self.logger.exception(f"Permission denied reading config file: {self.config_path}")
+ raise
except Exception:
- self.logger.error(f"Config file is empty: {self.config_path}")
- return {}
+ self.logger.exception(f"Failed to load config file {self.config_path}")
+ raise
@property
def repository_data(self) -> dict[str, Any]:
return self.root_data["repositories"].get(self.repository, {})
def repository_local_data(self, github_api: github.Github, repository_full_name: str) -> dict[str, Any]:
- if self.repository and repository_full_name:
- # Import here to avoid cyclic imports
- from webhook_server.utils.helpers import get_github_repo_api
+ """
+ Get repository-specific configuration from .github-webhook-server.yaml file.
+
+ Reads configuration from the repository's .github-webhook-server.yaml file,
+ which takes precedence over global config.yaml settings.
+
+ Args:
+ github_api: PyGithub API instance for repository access
+ repository_full_name: Full repository name (owner/repo-name)
+ Returns:
+ Dictionary containing repository configuration, or empty dict if file not found
+
+ Raises:
+ yaml.YAMLError: If repository config file has invalid YAML syntax
+ """
+ if self.repository and repository_full_name:
try:
- repo = get_github_repo_api(github_app_api=github_api, repository=repository_full_name)
+ # Directly use github_api.get_repo instead of importing get_github_repo_api
+ # to avoid circular dependency with helpers.py
+ self.logger.debug(f"Get GitHub API for repository {repository_full_name}")
+ repo = github_api.get_repo(repository_full_name)
try:
_path = repo.get_contents(".github-webhook-server.yaml")
except UnknownObjectException:
@@ -58,8 +85,12 @@ def repository_local_data(self, github_api: github.Github, repository_full_name:
repo_config = yaml.safe_load(config_file.decoded_content)
return repo_config
- except Exception as ex:
- self.logger.error(f"Repository {repository_full_name} config file not found or error. {ex}")
+ except yaml.YAMLError:
+ self.logger.exception(f"Repository {repository_full_name} config has invalid YAML syntax")
+ raise
+
+ except Exception:
+ self.logger.exception(f"Repository {repository_full_name} config file not found or error")
return {}
self.logger.error("self.repository or self.repository_full_name is not defined")
@@ -69,20 +100,43 @@ def get_value(self, value: str, return_on_none: Any = None, extra_dict: dict[str
"""
Get value from config
+ Supports dot notation for nested values (e.g., "docker.username", "pypi.token")
+
Order of getting value:
1. Local repository file (.github-webhook-server.yaml)
2. Repository level global config file (config.yaml)
3. Root level global config file (config.yaml)
"""
- if extra_dict and extra_dict.get(value):
- value = extra_dict[value]
- if value is not None:
- return value
+ if extra_dict:
+ result = self._get_nested_value(value, extra_dict)
+ if result is not None:
+ return result
for scope in (self.repository_data, self.root_data):
- if value in scope:
- value_data = scope[value]
- if value_data is not None:
- return value_data
+ result = self._get_nested_value(value, scope)
+ if result is not None:
+ return result
return return_on_none
+
+ def _get_nested_value(self, key: str, data: dict[str, Any]) -> Any:
+ """
+ Get value from nested dict using dot notation.
+
+ Args:
+ key: Key with optional dot notation (e.g., "docker.username", "pypi.token")
+ data: Dictionary to search
+
+ Returns:
+ Value if found, None otherwise
+ """
+ keys = key.split(".")
+ current = data
+
+ for k in keys:
+ if isinstance(current, dict) and k in current:
+ current = current[k]
+ else:
+ return None
+
+ return current
diff --git a/webhook_server/libs/exceptions.py b/webhook_server/libs/exceptions.py
index a75dd5c69..b10b62b47 100644
--- a/webhook_server/libs/exceptions.py
+++ b/webhook_server/libs/exceptions.py
@@ -1,12 +1,10 @@
class RepositoryNotFoundInConfigError(Exception):
- pass
-
+ """Raised when a repository is not found in the configuration file."""
-class ProcessGithubWebhookError(Exception):
- def __init__(self, err: dict[str, str]):
- self.err = err
- super().__init__(str(err))
+ pass
class NoApiTokenError(Exception):
+ """Raised when no API token is available for GitHub API operations."""
+
pass
diff --git a/webhook_server/libs/github_api.py b/webhook_server/libs/github_api.py
index b23ce068f..d9381e8bb 100644
--- a/webhook_server/libs/github_api.py
+++ b/webhook_server/libs/github_api.py
@@ -2,11 +2,14 @@
import asyncio
import contextlib
-import json
import logging
import os
+import shlex
+import shutil
+import tempfile
from typing import Any
+import github
import requests
from github import GithubException
from github.Commit import Commit
@@ -14,14 +17,14 @@
from github.Repository import Repository
from starlette.datastructures import Headers
-from webhook_server.libs.check_run_handler import CheckRunHandler
from webhook_server.libs.config import Config
from webhook_server.libs.exceptions import RepositoryNotFoundInConfigError
-from webhook_server.libs.issue_comment_handler import IssueCommentHandler
-from webhook_server.libs.owners_files_handler import OwnersFileHandler
-from webhook_server.libs.pull_request_handler import PullRequestHandler
-from webhook_server.libs.pull_request_review_handler import PullRequestReviewHandler
-from webhook_server.libs.push_handler import PushHandler
+from webhook_server.libs.handlers.check_run_handler import CheckRunHandler
+from webhook_server.libs.handlers.issue_comment_handler import IssueCommentHandler
+from webhook_server.libs.handlers.owners_files_handler import OwnersFileHandler
+from webhook_server.libs.handlers.pull_request_handler import PullRequestHandler
+from webhook_server.libs.handlers.pull_request_review_handler import PullRequestReviewHandler
+from webhook_server.libs.handlers.push_handler import PushHandler
from webhook_server.utils.constants import (
BUILD_CONTAINER_STR,
CAN_BE_MERGED_STR,
@@ -35,7 +38,7 @@
get_repository_github_app_api,
)
from webhook_server.utils.helpers import (
- extract_key_from_dict,
+ format_task_fields,
get_api_with_highest_rate_limit,
get_apis_and_tokes_from_config,
get_github_repo_api,
@@ -50,6 +53,7 @@ def __init__(self, hook_data: dict[Any, Any], headers: Headers, logger: logging.
self.hook_data = hook_data
self.repository_name: str = hook_data["repository"]["name"]
self.repository_full_name: str = hook_data["repository"]["full_name"]
+ self._bg_tasks: set[asyncio.Task] = set()
self.parent_committer: str = ""
self.x_github_delivery: str = headers.get("X-GitHub-Delivery", "")
self.github_event: str = headers["X-GitHub-Event"]
@@ -61,6 +65,8 @@ def __init__(self, hook_data: dict[Any, Any], headers: Headers, logger: logging.
self.token: str
self.api_user: str
self.current_pull_request_supported_retest: list[str] = []
+ self.github_api: github.Github | None = None
+ self.initial_rate_limit_remaining: int | None = None
if not self.config.repository_data:
raise RepositoryNotFoundInConfigError(f"Repository {self.repository_name} not found in config file")
@@ -72,6 +78,14 @@ def __init__(self, hook_data: dict[Any, Any], headers: Headers, logger: logging.
)
if github_api and self.token:
+ self.github_api = github_api
+ # Track initial rate limit for token spend calculation
+ # Note: log_prefix not set yet, so we can't use it in error messages here
+ try:
+ initial_rate_limit = github_api.get_rate_limit()
+ self.initial_rate_limit_remaining = initial_rate_limit.rate.remaining
+ except Exception as ex:
+ self.logger.debug(f"Failed to get initial rate limit: {ex}")
self.repository = get_github_repo_api(github_app_api=github_api, repository=self.repository_full_name)
# Once we have a repository, we can get the config from .github-webhook-server.yaml
local_repository_config = self.config.repository_local_data(
@@ -105,90 +119,245 @@ def __init__(self, hook_data: dict[Any, Any], headers: Headers, logger: logging.
self.logger.error(f"{self.log_prefix} Failed to get repository.")
return
- self.clone_repo_dir: str = os.path.join("/tmp", f"{self.repository.name}")
- self.add_api_users_to_auto_verified_and_merged_users
+ # Create unique temp directory to avoid collisions and security issues
+ # Format: /tmp/tmp{random}/github-webhook-{repo_name}
+ # This prevents predictable paths and ensures isolation between concurrent webhook handlers
+ self.clone_repo_dir: str = tempfile.mkdtemp(prefix=f"github-webhook-{self.repository_name}-")
+ # Initialize auto-verified users from API users
+ self.add_api_users_to_auto_verified_and_merged_users()
self.current_pull_request_supported_retest = self._current_pull_request_supported_retest
self.issue_url_for_welcome_msg: str = (
"Report bugs in [Issues](https://github.com/myakove/github-webhook-server/issues)"
)
+ async def _get_token_metrics(self) -> str:
+ """Get token metrics (API rate limit consumption) for this webhook.
+
+ Returns:
+ str: Formatted token metrics string for logging, or empty string if unavailable.
+ """
+ if not self.github_api or self.initial_rate_limit_remaining is None:
+ return ""
+
+ try:
+ final_rate_limit = await asyncio.to_thread(self.github_api.get_rate_limit)
+ final_remaining = final_rate_limit.rate.remaining
+
+ # Calculate token spend (handle case where rate limit reset between checks)
+ # If final > initial, rate limit reset occurred, so we can't calculate accurately
+ if final_remaining > self.initial_rate_limit_remaining:
+ # Rate limit reset happened - log as 0 since we can't determine actual spend
+ token_spend = 0
+ return (
+ f"token {self.token[:8]}... {token_spend} API calls "
+ f"(rate limit reset occurred - initial: {self.initial_rate_limit_remaining}, "
+ f"final: {final_remaining})"
+ )
+ else:
+ token_spend = self.initial_rate_limit_remaining - final_remaining
+ # Return token spend with structured format for parsing
+ return (
+ f"token {self.token[:8]}... {token_spend} API calls "
+ f"(initial: {self.initial_rate_limit_remaining}, "
+ f"final: {final_remaining}, remaining: {final_remaining})"
+ )
+ except Exception as ex:
+ self.logger.debug(f"{self.log_prefix} Failed to get token metrics: {ex}")
+ return ""
+
async def process(self) -> Any:
event_log: str = f"Event type: {self.github_event}. event ID: {self.x_github_delivery}"
- self.logger.step(f"{self.log_prefix} Starting webhook processing: {event_log}") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('webhook_processing', 'webhook_routing', 'started')} "
+ f"Starting webhook processing: {event_log}",
+ )
if self.github_event == "ping":
- self.logger.step(f"{self.log_prefix} Processing ping event") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('webhook_processing', 'webhook_routing', 'processing')} "
+ f"Processing ping event",
+ )
self.logger.debug(f"{self.log_prefix} {event_log}")
+ token_metrics = await self._get_token_metrics()
+ self.logger.success( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('webhook_processing', 'webhook_routing', 'completed')} "
+ f"Webhook processing completed successfully: ping - {token_metrics}",
+ )
return {"status": requests.codes.ok, "message": "pong"}
if self.github_event == "push":
- self.logger.step(f"{self.log_prefix} Processing push event") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('webhook_processing', 'webhook_routing', 'processing')} "
+ f"Processing push event",
+ )
self.logger.debug(f"{self.log_prefix} {event_log}")
- return await PushHandler(github_webhook=self).process_push_webhook_data()
+ await PushHandler(github_webhook=self).process_push_webhook_data()
+ token_metrics = await self._get_token_metrics()
+ self.logger.success( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('webhook_processing', 'webhook_routing', 'completed')} "
+ f"Webhook processing completed successfully: push - {token_metrics}",
+ )
+ return None
+
+ pull_request = await self.get_pull_request()
+ if pull_request:
+ # Log how we got the pull request (for workflow tracking)
+ if self.github_event == "pull_request":
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('webhook_processing', 'webhook_routing', 'processing')} "
+ f"Initializing pull request from webhook payload",
+ )
+ else:
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('webhook_processing', 'webhook_routing', 'processing')} "
+ f"Fetched pull request data via API (event: {self.github_event})",
+ )
- if pull_request := await self.get_pull_request():
self.log_prefix = self.prepare_log_prefix(pull_request=pull_request)
- self.logger.step(f"{self.log_prefix} Processing pull request event: {event_log}") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('webhook_processing', 'webhook_routing', 'processing')} "
+ f"Processing pull request event: {event_log}",
+ )
self.logger.debug(f"{self.log_prefix} {event_log}")
- if pull_request.draft:
- self.logger.step(f"{self.log_prefix} Pull request is draft, skipping processing") # type: ignore
+ if await asyncio.to_thread(lambda: pull_request.draft):
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('webhook_processing', 'webhook_routing', 'processing')} "
+ f"Pull request is draft, skipping processing",
+ )
self.logger.debug(f"{self.log_prefix} Pull request is draft, doing nothing")
+ token_metrics = await self._get_token_metrics()
+ self.logger.success( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('webhook_processing', 'webhook_routing', 'completed')} "
+ f"Webhook processing completed successfully: draft PR (skipped) - {token_metrics}",
+ )
return None
- self.logger.step(f"{self.log_prefix} Initializing pull request data") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('webhook_processing', 'webhook_routing', 'processing')} "
+ f"Initializing pull request data",
+ )
self.last_commit = await self._get_last_commit(pull_request=pull_request)
self.parent_committer = pull_request.user.login
self.last_committer = getattr(self.last_commit.committer, "login", self.parent_committer)
if self.github_event == "issue_comment":
- self.logger.step(f"{self.log_prefix} Initializing OWNERS file handler for issue comment") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('webhook_processing', 'webhook_routing', 'processing')} "
+ f"Initializing OWNERS file handler for issue comment",
+ )
owners_file_handler = OwnersFileHandler(github_webhook=self)
owners_file_handler = await owners_file_handler.initialize(pull_request=pull_request)
- self.logger.step(f"{self.log_prefix} Processing issue comment with IssueCommentHandler") # type: ignore
- return await IssueCommentHandler(
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('webhook_processing', 'webhook_routing', 'processing')} "
+ f"Processing issue comment with IssueCommentHandler",
+ )
+ await IssueCommentHandler(
github_webhook=self, owners_file_handler=owners_file_handler
).process_comment_webhook_data(pull_request=pull_request)
+ token_metrics = await self._get_token_metrics()
+ self.logger.success( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('webhook_processing', 'webhook_routing', 'completed')} "
+ f"Webhook processing completed successfully: issue_comment - {token_metrics}",
+ )
+ return None
elif self.github_event == "pull_request":
- self.logger.step(f"{self.log_prefix} Initializing OWNERS file handler for pull request") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('webhook_processing', 'webhook_routing', 'processing')} "
+ f"Initializing OWNERS file handler for pull request",
+ )
owners_file_handler = OwnersFileHandler(github_webhook=self)
owners_file_handler = await owners_file_handler.initialize(pull_request=pull_request)
- self.logger.step(f"{self.log_prefix} Processing pull request with PullRequestHandler") # type: ignore
- return await PullRequestHandler(
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('webhook_processing', 'webhook_routing', 'processing')} "
+ f"Processing pull request with PullRequestHandler",
+ )
+ await PullRequestHandler(
github_webhook=self, owners_file_handler=owners_file_handler
).process_pull_request_webhook_data(pull_request=pull_request)
+ token_metrics = await self._get_token_metrics()
+ self.logger.success( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('webhook_processing', 'webhook_routing', 'completed')} "
+ f"Webhook processing completed successfully: pull_request - {token_metrics}",
+ )
+ return None
elif self.github_event == "pull_request_review":
- self.logger.step(f"{self.log_prefix} Initializing OWNERS file handler for pull request review") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('webhook_processing', 'webhook_routing', 'processing')} "
+ f"Initializing OWNERS file handler for pull request review",
+ )
owners_file_handler = OwnersFileHandler(github_webhook=self)
owners_file_handler = await owners_file_handler.initialize(pull_request=pull_request)
- self.logger.step(f"{self.log_prefix} Processing pull request review with PullRequestReviewHandler") # type: ignore
- return await PullRequestReviewHandler(
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('webhook_processing', 'webhook_routing', 'processing')} "
+ f"Processing pull request review with PullRequestReviewHandler",
+ )
+ await PullRequestReviewHandler(
github_webhook=self, owners_file_handler=owners_file_handler
).process_pull_request_review_webhook_data(
pull_request=pull_request,
)
+ token_metrics = await self._get_token_metrics()
+ self.logger.success( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('webhook_processing', 'webhook_routing', 'completed')} "
+ f"Webhook processing completed successfully: pull_request_review - {token_metrics}",
+ )
+ return None
elif self.github_event == "check_run":
- self.logger.step(f"{self.log_prefix} Initializing OWNERS file handler for check run") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('webhook_processing', 'webhook_routing', 'processing')} "
+ f"Initializing OWNERS file handler for check run",
+ )
owners_file_handler = OwnersFileHandler(github_webhook=self)
owners_file_handler = await owners_file_handler.initialize(pull_request=pull_request)
- self.logger.step(f"{self.log_prefix} Processing check run with CheckRunHandler") # type: ignore
- if await CheckRunHandler(
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('webhook_processing', 'webhook_routing', 'processing')} "
+ f"Processing check run with CheckRunHandler",
+ )
+ handled = await CheckRunHandler(
github_webhook=self, owners_file_handler=owners_file_handler
- ).process_pull_request_check_run_webhook_data(pull_request=pull_request):
+ ).process_pull_request_check_run_webhook_data(pull_request=pull_request)
+ if handled:
if self.hook_data["check_run"]["name"] != CAN_BE_MERGED_STR:
- self.logger.step(f"{self.log_prefix} Checking if pull request can be merged after check run") # type: ignore
- return await PullRequestHandler(
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} "
+ f"{format_task_fields('webhook_processing', 'webhook_routing', 'processing')} "
+ f"Checking if pull request can be merged after check run",
+ )
+ await PullRequestHandler(
github_webhook=self, owners_file_handler=owners_file_handler
).check_if_can_be_merged(pull_request=pull_request)
+ # Log completion regardless of whether check run was processed or skipped
+ token_metrics = await self._get_token_metrics()
+ self.logger.success( # type: ignore[attr-defined]
+ f"{self.log_prefix} "
+ f"{format_task_fields('webhook_processing', 'webhook_routing', 'completed')} "
+ f"Webhook processing completed successfully: check_run - {token_metrics}",
+ )
+ return None
+
+ else:
+ # Log warning when no PR found
+ self.logger.warning(
+ f"{self.log_prefix} "
+ f"{format_task_fields('webhook_processing', 'webhook_routing', 'skipped')} "
+ f"No pull request found for {self.github_event} event - skipping processing"
+ )
+ token_metrics = await self._get_token_metrics()
+ self.logger.success( # type: ignore[attr-defined]
+ f"{self.log_prefix} "
+ f"{format_task_fields('webhook_processing', 'webhook_routing', 'completed')} "
+ f"Webhook processing completed: no PR found - {token_metrics}"
+ )
+ return None
- @property
def add_api_users_to_auto_verified_and_merged_users(self) -> None:
apis_and_tokens = get_apis_and_tokes_from_config(config=self.config)
for _api, _ in apis_and_tokens:
@@ -231,8 +400,19 @@ def _repo_data_from_config(self, repository_config: dict[str, Any]) -> None:
self.container_repository: str = self.build_and_push_container["repository"]
self.dockerfile: str = self.build_and_push_container.get("dockerfile", "Dockerfile")
self.container_tag: str = self.build_and_push_container.get("tag", "latest")
- self.container_build_args: str = self.build_and_push_container.get("build-args", "")
- self.container_command_args: str = self.build_and_push_container.get("args", "")
+ _build_args = self.build_and_push_container.get("build-args", [])
+ _cmd_args = self.build_and_push_container.get("args", [])
+ # Normalize to lists
+ if isinstance(_build_args, str):
+ _build_args = [a for a in shlex.split(_build_args) if a]
+ elif not isinstance(_build_args, list):
+ _build_args = []
+ if isinstance(_cmd_args, str):
+ _cmd_args = [a for a in shlex.split(_cmd_args) if a]
+ elif not isinstance(_cmd_args, list):
+ _cmd_args = []
+ self.container_build_args: list[str] = [str(a) for a in _build_args]
+ self.container_command_args: list[str] = [str(a) for a in _cmd_args]
self.container_release: bool = self.build_and_push_container.get("release", False)
self.pre_commit: bool = self.config.get_value(
@@ -266,29 +446,52 @@ def _repo_data_from_config(self, repository_config: dict[str, Any]) -> None:
async def get_pull_request(self, number: int | None = None) -> PullRequest | None:
if number:
+ self.logger.debug(f"{self.log_prefix} Attempting to get PR by number: {number}")
return await asyncio.to_thread(self.repository.get_pull, number)
- for _number in extract_key_from_dict(key="number", _dict=self.hook_data):
- try:
- return await asyncio.to_thread(self.repository.get_pull, _number)
- except GithubException:
- continue
+ # Try to get PR number from hook_data
+ self.logger.debug(f"{self.log_prefix} Attempting to get PR from webhook payload")
+ pr_data = self.hook_data.get("pull_request") or self.hook_data.get("issue", {})
+ if pr_data and isinstance(pr_data, dict):
+ pr_number = pr_data.get("number")
+ if pr_number:
+ self.logger.debug(f"{self.log_prefix} Found PR number in payload: {pr_number}")
+ try:
+ return await asyncio.to_thread(self.repository.get_pull, pr_number)
+ except GithubException as ex:
+ self.logger.debug(f"{self.log_prefix} Failed to get PR {pr_number} from payload: {ex}")
+ else:
+ self.logger.debug(f"{self.log_prefix} No PR number found in payload")
+ else:
+ self.logger.debug(f"{self.log_prefix} No PR data in webhook payload")
commit: dict[str, Any] = self.hook_data.get("commit", {})
if commit:
+ self.logger.debug(f"{self.log_prefix} Attempting to get PR from commit SHA: {commit.get('sha', 'unknown')}")
commit_obj = await asyncio.to_thread(self.repository.get_commit, commit["sha"])
with contextlib.suppress(Exception):
_pulls = await asyncio.to_thread(commit_obj.get_pulls)
- return _pulls[0]
+ if _pulls:
+ self.logger.debug(f"{self.log_prefix} Found PR from commit SHA: {_pulls[0].number}")
+ return _pulls[0]
+ self.logger.debug(f"{self.log_prefix} No PR found for commit SHA")
+ else:
+ self.logger.debug(f"{self.log_prefix} No commit data in webhook payload")
if self.github_event == "check_run":
+ head_sha = self.hook_data["check_run"]["head_sha"]
+ self.logger.debug(f"{self.log_prefix} Searching open PRs for check_run head SHA: {head_sha}")
for _pull_request in await asyncio.to_thread(self.repository.get_pulls, state="open"):
- if _pull_request.head.sha == self.hook_data["check_run"]["head_sha"]:
+ if _pull_request.head.sha == head_sha:
self.logger.debug(
- f"{self.log_prefix} Found pull request {_pull_request.title} [{_pull_request.number}] for check run {self.hook_data['check_run']['name']}"
+ f"{self.log_prefix} Found pull request {_pull_request.title} "
+ f"[{_pull_request.number}] for check run "
+ f"{self.hook_data['check_run']['name']}"
)
return _pull_request
+ self.logger.debug(f"{self.log_prefix} No open PR found matching check_run head SHA")
+ self.logger.debug(f"{self.log_prefix} All PR lookup strategies exhausted, no PR found")
return None
async def _get_last_commit(self, pull_request: PullRequest) -> Commit:
@@ -328,19 +531,6 @@ def container_repository_and_tag(
self.logger.error(f"{self.log_prefix} container tag not found")
return None
- def send_slack_message(self, message: str, webhook_url: str) -> None:
- slack_data: dict[str, str] = {"text": message}
- self.logger.info(f"{self.log_prefix} Sending message to slack: {message}")
- response: requests.Response = requests.post(
- webhook_url,
- data=json.dumps(slack_data),
- headers={"Content-Type": "application/json"},
- )
- if response.status_code != 200:
- raise ValueError(
- f"Request to slack returned an error {response.status_code} with the following message: {response.text}"
- )
-
@property
def _current_pull_request_supported_retest(self) -> list[str]:
current_pull_request_supported_retest: list[str] = []
@@ -360,3 +550,19 @@ def _current_pull_request_supported_retest(self) -> list[str]:
if self.conventional_title:
current_pull_request_supported_retest.append(CONVENTIONAL_TITLE_STR)
return current_pull_request_supported_retest
+
+ def __del__(self) -> None:
+ """Cleanup temporary clone directory on object destruction.
+
+ This ensures the base temp directory created by tempfile.mkdtemp() is removed
+ when the webhook handler is destroyed, preventing temp directory leaks.
+ The subdirectories (created with -uuid4() suffix) are cleaned up by
+ _prepare_cloned_repo_dir context manager in handlers.
+ """
+ if hasattr(self, "clone_repo_dir") and os.path.exists(self.clone_repo_dir):
+ try:
+ shutil.rmtree(self.clone_repo_dir, ignore_errors=True)
+ if hasattr(self, "logger"):
+ self.logger.debug(f"Cleaned up temp directory: {self.clone_repo_dir}")
+ except Exception:
+ pass # Ignore errors during cleanup
diff --git a/webhook_server/libs/handlers/__init__.py b/webhook_server/libs/handlers/__init__.py
new file mode 100644
index 000000000..bb6c967ad
--- /dev/null
+++ b/webhook_server/libs/handlers/__init__.py
@@ -0,0 +1 @@
+"""Handler modules for processing GitHub webhook events."""
diff --git a/webhook_server/libs/check_run_handler.py b/webhook_server/libs/handlers/check_run_handler.py
similarity index 76%
rename from webhook_server/libs/check_run_handler.py
rename to webhook_server/libs/handlers/check_run_handler.py
index a05a14660..a95b1559f 100644
--- a/webhook_server/libs/check_run_handler.py
+++ b/webhook_server/libs/handlers/check_run_handler.py
@@ -5,8 +5,8 @@
from github.PullRequest import PullRequest
from github.Repository import Repository
-from webhook_server.libs.labels_handler import LabelsHandler
-from webhook_server.libs.owners_files_handler import OwnersFileHandler
+from webhook_server.libs.handlers.labels_handler import LabelsHandler
+from webhook_server.libs.handlers.owners_files_handler import OwnersFileHandler
from webhook_server.utils.constants import (
AUTOMERGE_LABEL_STR,
BUILD_CONTAINER_STR,
@@ -22,6 +22,7 @@
TOX_STR,
VERIFIED_LABEL_STR,
)
+from webhook_server.utils.helpers import format_task_fields, strip_ansi_codes
if TYPE_CHECKING:
from webhook_server.libs.github_api import GithubWebhook
@@ -46,18 +47,28 @@ async def process_pull_request_check_run_webhook_data(self, pull_request: PullRe
_check_run: dict[str, Any] = self.hook_data["check_run"]
check_run_name: str = _check_run["name"]
- self.logger.step(f"{self.log_prefix} Processing check run: {check_run_name}") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('check_run', 'ci_check', 'processing')} "
+ f"Processing check run: {check_run_name}",
+ )
if self.hook_data.get("action", "") != "completed":
self.logger.debug(
- f"{self.log_prefix} check run {check_run_name} action is {self.hook_data.get('action', 'N/A')} and not completed, skipping"
+ f"{self.log_prefix} check run {check_run_name} action is "
+ f"{self.hook_data.get('action', 'N/A')} and not completed, skipping"
+ )
+ # Log completion - task_status reflects the result of our action (skipping is acceptable)
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('check_run', 'ci_check', 'completed')} "
+ f"Processing check run: {check_run_name} (action not completed - skipped)",
)
return False
check_run_status: str = _check_run["status"]
check_run_conclusion: str = _check_run["conclusion"]
self.logger.debug(
- f"{self.log_prefix} processing check_run - Name: {check_run_name} Status: {check_run_status} Conclusion: {check_run_conclusion}"
+ f"{self.log_prefix} processing check_run - Name: {check_run_name} "
+ f"Status: {check_run_status} Conclusion: {check_run_conclusion}"
)
if check_run_name == CAN_BE_MERGED_STR:
@@ -66,24 +77,55 @@ async def process_pull_request_check_run_webhook_data(self, pull_request: PullRe
label=AUTOMERGE_LABEL_STR, pull_request=pull_request
):
try:
- self.logger.step(f"{self.log_prefix} Executing auto-merge for PR #{pull_request.number}") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('check_run', 'automerge', 'processing')} "
+ f"Executing auto-merge for PR #{pull_request.number}",
+ )
await asyncio.to_thread(pull_request.merge, merge_method="SQUASH")
- self.logger.step(f"{self.log_prefix} Auto-merge completed successfully") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('check_run', 'automerge', 'completed')} "
+ f"Auto-merge completed successfully",
+ )
self.logger.info(
f"{self.log_prefix} Successfully auto-merged pull request #{pull_request.number}"
)
+ # Log completion for main check_run processing
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('check_run', 'ci_check', 'completed')} "
+ f"Processing check run: {check_run_name} (auto-merged)",
+ )
return False
except Exception as ex:
self.logger.error(
f"{self.log_prefix} Failed to auto-merge pull request #{pull_request.number}: {ex}"
)
+ # Log failure for automerge
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('check_run', 'automerge', 'failed')} "
+ f"Failed to auto-merge PR #{pull_request.number}: {ex}",
+ )
# Continue processing to allow manual intervention
+ # Log completion for main check_run processing (continuing after failed automerge)
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('check_run', 'ci_check', 'completed')} "
+ f"Processing check run: {check_run_name} (auto-merge failed, continuing)",
+ )
return True
else:
self.logger.debug(f"{self.log_prefix} check run is {CAN_BE_MERGED_STR}, skipping")
+ # Log completion - task_status reflects the result of our action
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('check_run', 'ci_check', 'completed')} "
+ f"Processing check run: {check_run_name} (skipped - conditions not met)",
+ )
return False
+ # Log completion - task_status reflects the result of our action
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('check_run', 'ci_check', 'completed')} "
+ f"Processing check run: {check_run_name} (completed)",
+ )
return True
async def set_verify_check_queued(self) -> None:
@@ -218,14 +260,27 @@ async def set_check_run_status(
msg: str = f"{self.log_prefix} check run {check_run} status: {status or conclusion}"
# Log workflow steps for check run status changes
+ # task_status reflects the result of our action, not what we're setting the check to
if status == QUEUED_STR:
- self.logger.step(f"{self.log_prefix} Setting {check_run} check to queued") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('check_run', 'ci_check', 'completed')} "
+ f"Setting {check_run} check to queued",
+ )
elif status == IN_PROGRESS_STR:
- self.logger.step(f"{self.log_prefix} Setting {check_run} check to in-progress") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('check_run', 'ci_check', 'completed')} "
+ f"Setting {check_run} check to in-progress",
+ )
elif conclusion == SUCCESS_STR:
- self.logger.step(f"{self.log_prefix} Setting {check_run} check to success") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('check_run', 'ci_check', 'completed')} "
+ f"Setting {check_run} check to success",
+ )
elif conclusion == FAILURE_STR:
- self.logger.step(f"{self.log_prefix} Setting {check_run} check to failure") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('check_run', 'ci_check', 'failed')} "
+ f"Setting {check_run} check to failure",
+ )
try:
self.logger.debug(f"{self.log_prefix} Set check run status with {kwargs}")
@@ -240,12 +295,16 @@ async def set_check_run_status(
await asyncio.to_thread(self.github_webhook.repository_by_github_app.create_check_run, **kwargs)
def get_check_run_text(self, err: str, out: str) -> str:
- total_len: int = len(err) + len(out)
+ # Strip ANSI escape codes from output to prevent scrambled characters in GitHub UI
+ err_clean = strip_ansi_codes(err)
+ out_clean = strip_ansi_codes(out)
+
+ total_len: int = len(err_clean) + len(out_clean)
if total_len > 65534: # GitHub limit is 65535 characters
- _output = f"```\n{err}\n\n{out}\n```"[:65534]
+ _output = f"```\n{err_clean}\n\n{out_clean}\n```"[:65534]
else:
- _output = f"```\n{err}\n\n{out}\n```"
+ _output = f"```\n{err_clean}\n\n{out_clean}\n```"
_hased_str = "*****"
@@ -302,11 +361,11 @@ async def required_check_failed_or_no_status(
if failed_check_run not in check_runs_in_progress
]
msg += f"Some check runs failed: {', '.join(exclude_in_progress)}\n"
- self.logger.debug(f"failed_check_runs: {failed_check_runs}")
+ self.logger.debug(f"{self.log_prefix} failed_check_runs: {failed_check_runs}")
if no_status_check_runs:
msg += f"Some check runs not started: {', '.join(no_status_check_runs)}\n"
- self.logger.debug(f"no_status_check_runs: {no_status_check_runs}")
+ self.logger.debug(f"{self.log_prefix} no_status_check_runs: {no_status_check_runs}")
return msg
@@ -343,7 +402,7 @@ async def get_branch_required_status_checks(self, pull_request: PullRequest) ->
pull_request_branch = await asyncio.to_thread(self.repository.get_branch, pull_request.base.ref)
branch_protection = await asyncio.to_thread(pull_request_branch.get_protection)
branch_required_status_checks = branch_protection.required_status_checks.contexts
- self.logger.debug(f"branch_required_status_checks: {branch_required_status_checks}")
+ self.logger.debug(f"{self.log_prefix} branch_required_status_checks: {branch_required_status_checks}")
return branch_required_status_checks
async def required_check_in_progress(
diff --git a/webhook_server/libs/issue_comment_handler.py b/webhook_server/libs/handlers/issue_comment_handler.py
similarity index 73%
rename from webhook_server/libs/issue_comment_handler.py
rename to webhook_server/libs/handlers/issue_comment_handler.py
index 2f9877b77..19aa8ba7d 100644
--- a/webhook_server/libs/issue_comment_handler.py
+++ b/webhook_server/libs/handlers/issue_comment_handler.py
@@ -2,16 +2,17 @@
import asyncio
from asyncio import Task
-from typing import TYPE_CHECKING, Any, Callable, Coroutine, Union
+from collections.abc import Callable, Coroutine
+from typing import TYPE_CHECKING, Any
from github.PullRequest import PullRequest
from github.Repository import Repository
-from webhook_server.libs.check_run_handler import CheckRunHandler
-from webhook_server.libs.labels_handler import LabelsHandler
-from webhook_server.libs.owners_files_handler import OwnersFileHandler
-from webhook_server.libs.pull_request_handler import PullRequestHandler
-from webhook_server.libs.runner_handler import RunnerHandler
+from webhook_server.libs.handlers.check_run_handler import CheckRunHandler
+from webhook_server.libs.handlers.labels_handler import LabelsHandler
+from webhook_server.libs.handlers.owners_files_handler import OwnersFileHandler
+from webhook_server.libs.handlers.pull_request_handler import PullRequestHandler
+from webhook_server.libs.handlers.runner_handler import RunnerHandler
from webhook_server.utils.constants import (
AUTOMERGE_LABEL_STR,
BUILD_AND_PUSH_CONTAINER_STR,
@@ -33,13 +34,14 @@
VERIFIED_LABEL_STR,
WIP_STR,
)
+from webhook_server.utils.helpers import format_task_fields
if TYPE_CHECKING:
from webhook_server.libs.github_api import GithubWebhook
class IssueCommentHandler:
- def __init__(self, github_webhook: "GithubWebhook", owners_file_handler: OwnersFileHandler):
+ def __init__(self, github_webhook: GithubWebhook, owners_file_handler: OwnersFileHandler):
self.github_webhook = github_webhook
self.owners_file_handler = owners_file_handler
@@ -60,35 +62,97 @@ def __init__(self, github_webhook: "GithubWebhook", owners_file_handler: OwnersF
async def process_comment_webhook_data(self, pull_request: PullRequest) -> None:
comment_action = self.hook_data["action"]
- self.logger.step(f"{self.log_prefix} Starting issue comment processing: action={comment_action}") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('issue_comment', 'pr_management', 'started')} "
+ f"Starting issue comment processing: action={comment_action}",
+ )
if comment_action in ("edited", "deleted"):
- self.logger.step(f"{self.log_prefix} Skipping comment processing: action is {comment_action}") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('issue_comment', 'pr_management', 'processing')} "
+ f"Skipping comment processing: action is {comment_action}",
+ )
self.logger.debug(f"{self.log_prefix} Not processing comment. action is {comment_action}")
+ # Log completion - task_status reflects the result of our action (skipping is acceptable)
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('issue_comment', 'pr_management', 'completed')} "
+ f"Skipping comment processing: action is {comment_action} (completed)",
+ )
return
- self.logger.step(f"{self.log_prefix} Processing issue comment for issue {self.hook_data['issue']['number']}") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('issue_comment', 'pr_management', 'processing')} "
+ f"Processing issue comment for issue {self.hook_data['issue']['number']}",
+ )
self.logger.info(f"{self.log_prefix} Processing issue {self.hook_data['issue']['number']}")
body: str = self.hook_data["comment"]["body"]
if self.github_webhook.issue_url_for_welcome_msg in body:
self.logger.debug(f"{self.log_prefix} Welcome message found in issue {pull_request.title}. Not processing")
+ # Log completion - task_status reflects the result of our action (skipping welcome message is acceptable)
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('issue_comment', 'pr_management', 'completed')} "
+ f"Processing issue comment for issue {self.hook_data['issue']['number']} (welcome message - skipped)",
+ )
return
_user_commands: list[str] = [_cmd.strip("/") for _cmd in body.strip().splitlines() if _cmd.startswith("/")]
if _user_commands:
- self.logger.step(f"{self.log_prefix} Found {len(_user_commands)} user commands: {_user_commands}") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('issue_comment', 'pr_management', 'processing')} "
+ f"Found {len(_user_commands)} user commands: {_user_commands}",
+ )
user_login: str = self.hook_data["sender"]["login"]
- for user_command in _user_commands:
- self.logger.step(f"{self.log_prefix} Executing user command: /{user_command} by {user_login}") # type: ignore
- await self.user_commands(
- pull_request=pull_request,
- command=user_command,
- reviewed_user=user_login,
- issue_comment_id=self.hook_data["comment"]["id"],
+
+ # Execute all commands in parallel
+ if _user_commands:
+ tasks: list[Coroutine[Any, Any, Any] | Task[Any]] = []
+ for user_command in _user_commands:
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} "
+ f"{format_task_fields('issue_comment', 'pr_management', 'processing')} "
+ f"Executing user command: /{user_command} by {user_login}",
+ )
+ task = asyncio.create_task(
+ self.user_commands(
+ pull_request=pull_request,
+ command=user_command,
+ reviewed_user=user_login,
+ issue_comment_id=self.hook_data["comment"]["id"],
+ )
+ )
+ tasks.append(task)
+
+ # Execute all commands concurrently
+ results = await asyncio.gather(*tasks, return_exceptions=True)
+
+ # Log results and handle exceptions
+ for idx, result in enumerate(results):
+ user_command = _user_commands[idx]
+ if isinstance(result, Exception):
+ self.logger.error(f"{self.log_prefix} Command execution failed: /{user_command} - {result}")
+ else:
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('issue_comment', 'pr_management', 'completed')} "
+ f"Executed user command: /{user_command} by {user_login}",
+ )
+
+ # Log completion for main processing - task_status reflects the result of our action
+ if not _user_commands:
+ # No commands found, log completion
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('issue_comment', 'pr_management', 'completed')} "
+ f"Processing issue comment for issue {self.hook_data['issue']['number']} (no commands found)",
+ )
+ else:
+ # Commands were processed, log completion
+ issue_num = self.hook_data["issue"]["number"]
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('issue_comment', 'pr_management', 'completed')} "
+ f"Processing issue comment for issue {issue_num} (processed {len(_user_commands)} commands)",
)
async def user_commands(
@@ -194,10 +258,12 @@ async def user_commands(
wip_for_title: str = f"{WIP_STR.upper()}:"
if remove:
await self.labels_handler._remove_label(pull_request=pull_request, label=WIP_STR)
- await asyncio.to_thread(pull_request.edit, title=pull_request.title.replace(wip_for_title, ""))
+ pr_title = await asyncio.to_thread(lambda: pull_request.title)
+ await asyncio.to_thread(pull_request.edit, title=pr_title.replace(wip_for_title, ""))
else:
await self.labels_handler._add_label(pull_request=pull_request, label=WIP_STR)
- await asyncio.to_thread(pull_request.edit, title=f"{wip_for_title} {pull_request.title}")
+ pr_title = await asyncio.to_thread(lambda: pull_request.title)
+ await asyncio.to_thread(pull_request.edit, title=f"{wip_for_title} {pr_title}")
elif _command == HOLD_LABEL_STR:
if reviewed_user not in self.owners_file_handler.all_pull_request_approvers:
@@ -240,7 +306,7 @@ async def _add_reviewer_by_user_comment(self, pull_request: PullRequest, reviewe
reviewer = reviewer.strip("@")
self.logger.info(f"{self.log_prefix} Adding reviewer {reviewer} by user comment")
repo_contributors = list(await asyncio.to_thread(self.repository.get_contributors))
- self.logger.debug(f"Repo contributors are: {repo_contributors}")
+ self.logger.debug(f"{self.log_prefix} Repo contributors are: {repo_contributors}")
for contributer in repo_contributors:
if contributer.login == reviewer:
@@ -266,7 +332,8 @@ async def process_cherry_pick_command(
except Exception:
_non_exits_target_branches_msg += f"Target branch `{_target_branch}` does not exist\n"
self.logger.debug(
- f"{self.log_prefix} Found target branches {_exits_target_branches} and not found {_non_exits_target_branches_msg}"
+ f"{self.log_prefix} Found target branches {_exits_target_branches} "
+ f"and not found {_non_exits_target_branches_msg}"
)
if _non_exits_target_branches_msg:
@@ -313,7 +380,7 @@ async def process_retest_command(
PYTHON_MODULE_INSTALL_STR: self.runner_handler.run_install_python_module,
CONVENTIONAL_TITLE_STR: self.runner_handler.run_conventional_title_check,
}
- self.logger.debug(f"Retest map is {_retests_to_func_map}")
+ self.logger.debug(f"{self.log_prefix} Retest map is {_retests_to_func_map}")
if not _target_tests:
msg = "No test defined to retest"
@@ -341,8 +408,8 @@ async def process_retest_command(
else:
_not_supported_retests.append(_test)
- self.logger.debug(f"Supported retests are {_supported_retests}")
- self.logger.debug(f"Not supported retests are {_not_supported_retests}")
+ self.logger.debug(f"{self.log_prefix} Supported retests are {_supported_retests}")
+ self.logger.debug(f"{self.log_prefix} Not supported retests are {_not_supported_retests}")
if _not_supported_retests:
msg = f"No {' '.join(_not_supported_retests)} configured for this repository"
@@ -351,7 +418,7 @@ async def process_retest_command(
await asyncio.to_thread(pull_request.create_issue_comment, msg)
if _supported_retests:
- tasks: list[Union[Coroutine[Any, Any, Any], Task[Any]]] = []
+ tasks: list[Coroutine[Any, Any, Any] | Task[Any]] = []
for _test in _supported_retests:
self.logger.debug(f"{self.log_prefix} running retest {_test}")
task = asyncio.create_task(_retests_to_func_map[_test](pull_request=pull_request))
diff --git a/webhook_server/libs/labels_handler.py b/webhook_server/libs/handlers/labels_handler.py
similarity index 78%
rename from webhook_server/libs/labels_handler.py
rename to webhook_server/libs/handlers/labels_handler.py
index 3493f1d70..f9aa59578 100644
--- a/webhook_server/libs/labels_handler.py
+++ b/webhook_server/libs/handlers/labels_handler.py
@@ -7,7 +7,7 @@
from github.Repository import Repository
from timeout_sampler import TimeoutWatch
-from webhook_server.libs.owners_files_handler import OwnersFileHandler
+from webhook_server.libs.handlers.owners_files_handler import OwnersFileHandler
from webhook_server.utils.constants import (
ADD_STR,
APPROVE_STR,
@@ -23,6 +23,7 @@
STATIC_LABELS_DICT,
WIP_STR,
)
+from webhook_server.utils.helpers import format_task_fields
if TYPE_CHECKING:
from webhook_server.libs.github_api import GithubWebhook
@@ -46,35 +47,79 @@ async def pull_request_labels_names(self, pull_request: PullRequest) -> list[str
return [lb.name for lb in labels]
async def _remove_label(self, pull_request: PullRequest, label: str) -> bool:
- self.logger.step(f"{self.log_prefix} Removing label '{label}' from PR") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('labels', 'pr_management', 'processing')} "
+ f"Removing label '{label}' from PR",
+ )
self.logger.debug(f"{self.log_prefix} Removing label {label}")
try:
if await self.label_exists_in_pull_request(pull_request=pull_request, label=label):
self.logger.info(f"{self.log_prefix} Removing label {label}")
await asyncio.to_thread(pull_request.remove_from_labels, label)
- return await self.wait_for_label(pull_request=pull_request, label=label, exists=False)
+ success = await self.wait_for_label(pull_request=pull_request, label=label, exists=False)
+ # Log completion - task_status reflects the result of our action
+ if success:
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('labels', 'pr_management', 'completed')} "
+ f"Removed label '{label}' from PR",
+ )
+ else:
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('labels', 'pr_management', 'failed')} "
+ f"Failed to remove label '{label}' from PR (timeout waiting for removal)",
+ )
+ return success
except Exception as exp:
self.logger.debug(f"{self.log_prefix} Failed to remove {label} label. Exception: {exp}")
+ # Log failure - task_status reflects the result of our action
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('labels', 'pr_management', 'failed')} "
+ f"Failed to remove label '{label}' from PR (exception: {exp})",
+ )
return False
+ # Label doesn't exist - this is an acceptable outcome (we don't check first to save API calls)
+ # Log completion - task_status reflects the result of our action (attempting to remove completed successfully)
self.logger.debug(f"{self.log_prefix} Label {label} not found and cannot be removed")
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('labels', 'pr_management', 'completed')} "
+ f"Removing label '{label}' from PR (label does not exist - acceptable)",
+ )
return False
async def _add_label(self, pull_request: PullRequest, label: str) -> None:
label = label.strip()
- self.logger.step(f"{self.log_prefix} Adding label '{label}' to PR") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('labels', 'pr_management', 'processing')} "
+ f"Adding label '{label}' to PR",
+ )
self.logger.debug(f"{self.log_prefix} Adding label {label}")
if len(label) > 49:
self.logger.debug(f"{label} is too long, not adding.")
+ # Log completion - task_status reflects the result of our action
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('labels', 'pr_management', 'completed')} "
+ f"Adding label '{label}' to PR (label too long - skipped)",
+ )
return
if await self.label_exists_in_pull_request(pull_request=pull_request, label=label):
self.logger.debug(f"{self.log_prefix} Label {label} already assign")
+ # Log completion - task_status reflects the result of our action (label already exists is acceptable)
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('labels', 'pr_management', 'completed')} "
+ f"Adding label '{label}' to PR (label already exists - acceptable)",
+ )
return
if label in STATIC_LABELS_DICT:
self.logger.info(f"{self.log_prefix} Adding pull request label {label}")
await asyncio.to_thread(pull_request.add_to_labels, label)
+ # Log completion - task_status reflects the result of our action
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('labels', 'pr_management', 'completed')} "
+ f"Added label '{label}' to PR",
+ )
return
color = self._get_label_color(label)
@@ -92,6 +137,11 @@ async def _add_label(self, pull_request: PullRequest, label: str) -> None:
self.logger.info(f"{self.log_prefix} Adding pull request label {label}")
await asyncio.to_thread(pull_request.add_to_labels, label)
await self.wait_for_label(pull_request=pull_request, label=label, exists=True)
+ # Log completion - task_status reflects the result of our action
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('labels', 'pr_management', 'completed')} "
+ f"Added label '{label}' to PR",
+ )
async def wait_for_label(self, pull_request: PullRequest, label: str, exists: bool) -> bool:
self.logger.debug(f"{self.log_prefix} waiting for label {label} to {'exists' if exists else 'not exists'}")
@@ -116,7 +166,7 @@ def _get_label_color(self, label: str) -> str:
size_name = label[len(SIZE_LABEL_PREFIX) :]
thresholds = self._get_custom_pr_size_thresholds()
- for threshold, label_name, color_hex in thresholds:
+ for _threshold, label_name, color_hex in thresholds:
if label_name == size_name:
return color_hex
@@ -211,7 +261,10 @@ def get_size(self, pull_request: PullRequest) -> str:
async def add_size_label(self, pull_request: PullRequest) -> None:
"""Add a size label to the pull request based on its additions and deletions."""
- self.logger.step(f"{self.log_prefix} Calculating and applying PR size label") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('labels', 'pr_management', 'processing')} "
+ f"Calculating and applying PR size label",
+ )
size_label = self.get_size(pull_request=pull_request)
self.logger.debug(f"{self.log_prefix} size label is {size_label}")
if not size_label:
@@ -232,7 +285,10 @@ async def add_size_label(self, pull_request: PullRequest) -> None:
await self._remove_label(pull_request=pull_request, label=exists_size_label[0])
await self._add_label(pull_request=pull_request, label=size_label)
- self.logger.step(f"{self.log_prefix} Applied size label '{size_label}' to PR") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('labels', 'pr_management', 'completed')} "
+ f"Applied size label '{size_label}' to PR",
+ )
async def label_by_user_comment(
self,
@@ -328,7 +384,7 @@ async def manage_reviewed_by_label(
f"{self.log_prefix} PR {pull_request.number} got unsupported review state: {review_state}"
)
- def wip_or_hold_lables_exists(self, labels: list[str]) -> str:
+ def wip_or_hold_labels_exists(self, labels: list[str]) -> str:
failure_output = ""
if HOLD_LABEL_STR in labels:
diff --git a/webhook_server/libs/owners_files_handler.py b/webhook_server/libs/handlers/owners_files_handler.py
similarity index 70%
rename from webhook_server/libs/owners_files_handler.py
rename to webhook_server/libs/handlers/owners_files_handler.py
index d90645c0c..79a37026f 100644
--- a/webhook_server/libs/owners_files_handler.py
+++ b/webhook_server/libs/handlers/owners_files_handler.py
@@ -1,6 +1,7 @@
import asyncio
+from collections.abc import Coroutine
from pathlib import Path
-from typing import TYPE_CHECKING, Any, Coroutine
+from typing import TYPE_CHECKING, Any
import yaml
from asyncstdlib import functools
@@ -8,10 +9,12 @@
from github.GithubException import GithubException
from github.NamedUser import NamedUser
from github.PaginatedList import PaginatedList
+from github.Permissions import Permissions
from github.PullRequest import PullRequest
from github.Repository import Repository
-from webhook_server.utils.constants import COMMAND_ADD_ALLOWED_USER_STR
+from webhook_server.utils.constants import COMMAND_ADD_ALLOWED_USER_STR, ROOT_APPROVERS_KEY
+from webhook_server.utils.helpers import format_task_fields
if TYPE_CHECKING:
from webhook_server.libs.github_api import GithubWebhook
@@ -25,14 +28,29 @@ def __init__(self, github_webhook: "GithubWebhook") -> None:
self.repository: Repository = self.github_webhook.repository
async def initialize(self, pull_request: PullRequest) -> "OwnersFileHandler":
- self.changed_files = await self.list_changed_files(pull_request=pull_request)
- self.all_repository_approvers_and_reviewers = await self.get_all_repository_approvers_and_reviewers(
- pull_request=pull_request
+ """Initialize handler with PR data (optimized with parallel operations).
+
+ Phase 1: Fetch independent data in parallel (changed files + OWNERS data)
+ Phase 2: Process derived data in parallel (approvers + reviewers)
+ """
+ # Phase 1: Parallel data fetching - independent GitHub API operations
+ self.changed_files, self.all_repository_approvers_and_reviewers = await asyncio.gather(
+ self.list_changed_files(pull_request=pull_request),
+ self.get_all_repository_approvers_and_reviewers(pull_request=pull_request),
+ )
+
+ # Phase 2: Parallel data processing - all depend on phase 1 but independent of each other
+ (
+ self.all_repository_approvers,
+ self.all_repository_reviewers,
+ self.all_pull_request_approvers,
+ self.all_pull_request_reviewers,
+ ) = await asyncio.gather(
+ self.get_all_repository_approvers(),
+ self.get_all_repository_reviewers(),
+ self.get_all_pull_request_approvers(),
+ self.get_all_pull_request_reviewers(),
)
- self.all_repository_approvers = await self.get_all_repository_approvers()
- self.all_repository_reviewers = await self.get_all_repository_reviewers()
- self.all_pull_request_approvers = await self.get_all_pull_request_approvers()
- self.all_pull_request_reviewers = await self.get_all_pull_request_reviewers()
return self
@@ -99,7 +117,6 @@ async def _get_file_content(self, content_path: str, pull_request: PullRequest)
return _path, content_path
- @functools.lru_cache
async def get_all_repository_approvers_and_reviewers(self, pull_request: PullRequest) -> dict[str, dict[str, Any]]:
# Dictionary mapping OWNERS file paths to their approvers and reviewers
_owners: dict[str, dict[str, Any]] = {}
@@ -169,7 +186,7 @@ async def get_all_repository_reviewers(self) -> list[str]:
async def get_all_pull_request_approvers(self) -> list[str]:
_approvers: list[str] = []
- changed_files = await self.owners_data_for_changed_files()
+ changed_files = await self.owners_data_for_changed_files
for list_of_approvers in changed_files.values():
for _approver in list_of_approvers.get("approvers", []):
@@ -182,7 +199,7 @@ async def get_all_pull_request_approvers(self) -> list[str]:
async def get_all_pull_request_reviewers(self) -> list[str]:
_reviewers: list[str] = []
- changed_files = await self.owners_data_for_changed_files()
+ changed_files = await self.owners_data_for_changed_files
for list_of_reviewers in changed_files.values():
for _reviewer in list_of_reviewers.get("reviewers", []):
@@ -190,16 +207,22 @@ async def get_all_pull_request_reviewers(self) -> list[str]:
_reviewers = list(set(_reviewers))
_reviewers.sort()
- self.logger.debug(f"Pull request reviewers are: {_reviewers}")
+ self.logger.debug(f"{self.log_prefix} Pull request reviewers are: {_reviewers}")
return _reviewers
+ @functools.cached_property
async def owners_data_for_changed_files(self) -> dict[str, dict[str, Any]]:
+ """Get OWNERS data for directories containing changed files.
+
+ Uses @functools.cached_property to cache results and avoid redundant computation
+ of folder matching logic across multiple calls during initialization.
+ """
self._ensure_initialized()
data: dict[str, dict[str, Any]] = {}
changed_folders = {Path(cf).parent for cf in self.changed_files}
- self.logger.debug(f"Changed folders: {changed_folders}")
+ self.logger.debug(f"{self.log_prefix} Changed folders: {changed_folders}")
changed_folder_match: list[Path] = []
@@ -219,7 +242,7 @@ async def owners_data_for_changed_files(self) -> dict[str, dict[str, Any]]:
f"{self.log_prefix} Matched changed folder: {changed_folder} with owners dir: {_owners_dir}"
)
if require_root_approvers is None:
- require_root_approvers = owners_data.get("root-approvers", True)
+ require_root_approvers = owners_data.get(ROOT_APPROVERS_KEY, True)
if require_root_approvers or require_root_approvers is None:
self.logger.debug(f"{self.log_prefix} require root_approvers")
@@ -231,43 +254,78 @@ async def owners_data_for_changed_files(self) -> dict[str, dict[str, Any]]:
if _folder == _changed_path or _changed_path in _folder.parents:
continue
else:
- self.logger.debug(f"Adding root approvers for {_folder}")
+ self.logger.debug(f"{self.log_prefix} Adding root approvers for {_folder}")
data["."] = self.all_repository_approvers_and_reviewers.get(".", {})
break
- self.logger.debug(f"Final owners data for changed files: {data}")
+ self.logger.debug(f"{self.log_prefix} Final owners data for changed files: {data}")
+
return data
async def assign_reviewers(self, pull_request: PullRequest) -> None:
self._ensure_initialized()
- self.logger.step(f"{self.log_prefix} Starting reviewer assignment based on OWNERS files") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('owners', 'pr_management', 'started')} "
+ f"Starting reviewer assignment based on OWNERS files",
+ )
self.logger.info(f"{self.log_prefix} Assign reviewers")
_to_add: list[str] = list(set(self.all_pull_request_reviewers))
self.logger.debug(f"{self.log_prefix} Reviewers to add: {', '.join(_to_add)}")
if _to_add:
- self.logger.step(f"{self.log_prefix} Assigning {len(_to_add)} reviewers to PR") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('owners', 'pr_management', 'processing')} "
+ f"Assigning {len(_to_add)} reviewers to PR",
+ )
else:
- self.logger.step(f"{self.log_prefix} No reviewers to assign") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('owners', 'pr_management', 'processing')} "
+ f"No reviewers to assign",
+ )
+ # Log completion - task_status reflects the result of our action (no reviewers to assign is acceptable)
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('owners', 'pr_management', 'completed')} "
+ f"No reviewers to assign (completed)",
+ )
return
+ assigned_count = 0
+ failed_count = 0
for reviewer in _to_add:
if reviewer != pull_request.user.login:
self.logger.debug(f"{self.log_prefix} Adding reviewer {reviewer}")
try:
await asyncio.to_thread(pull_request.create_review_request, [reviewer])
- self.logger.step(f"{self.log_prefix} Successfully assigned reviewer {reviewer}") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('owners', 'pr_management', 'processing')} "
+ f"Successfully assigned reviewer {reviewer}",
+ )
+ assigned_count += 1
except GithubException as ex:
- self.logger.step(f"{self.log_prefix} Failed to assign reviewer {reviewer}") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('owners', 'pr_management', 'failed')} "
+ f"Failed to assign reviewer {reviewer}",
+ )
self.logger.debug(f"{self.log_prefix} Failed to add reviewer {reviewer}. {ex}")
await asyncio.to_thread(
pull_request.create_issue_comment, f"{reviewer} can not be added as reviewer. {ex}"
)
-
- self.logger.step(f"{self.log_prefix} Reviewer assignment completed") # type: ignore
+ failed_count += 1
+
+ # Log completion - task_status reflects the result of our action
+ if failed_count > 0:
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('owners', 'pr_management', 'failed')} "
+ f"Assigned {assigned_count} reviewers to PR ({failed_count} failed)",
+ )
+ else:
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('owners', 'pr_management', 'completed')} "
+ f"Assigned {assigned_count} reviewers to PR",
+ )
async def is_user_valid_to_run_commands(self, pull_request: PullRequest, reviewed_user: str) -> bool:
self._ensure_initialized()
@@ -283,7 +341,7 @@ async def is_user_valid_to_run_commands(self, pull_request: PullRequest, reviewe
- {"\n - ".join(allowed_user_to_approve)}
"""
valid_users = await self.valid_users_to_run_commands
- self.logger.debug(f"Valid users to run commands: {valid_users}")
+ self.logger.debug(f"{self.log_prefix} Valid users to run commands: {valid_users}")
if reviewed_user not in valid_users:
for comment in [
@@ -319,23 +377,31 @@ async def valid_users_to_run_commands(self) -> set[str]:
async def get_all_repository_contributors(self) -> list[str]:
contributors = await self.repository_contributors
- return [val.login for val in contributors]
+ return await asyncio.to_thread(lambda: [val.login for val in contributors])
async def get_all_repository_collaborators(self) -> list[str]:
collaborators = await self.repository_collaborators
- return [val.login for val in collaborators]
+ return await asyncio.to_thread(lambda: [val.login for val in collaborators])
async def get_all_repository_maintainers(self) -> list[str]:
maintainers: list[str] = []
- for user in await self.repository_collaborators:
- permissions = user.permissions
- self.logger.debug(f"User {user.login} permissions: {permissions}")
+ # Fix #1: Convert PaginatedList to list in thread pool to avoid blocking during iteration
+ collaborators = await self.repository_collaborators
+ collaborators_list = await asyncio.to_thread(lambda: list(collaborators))
+
+ for user in collaborators_list:
+ # Fix #2: Wrap permissions access in thread pool (property makes blocking API call)
+ def get_user_permissions(u: NamedUser = user) -> Permissions:
+ return u.permissions
+
+ permissions = await asyncio.to_thread(get_user_permissions)
+ self.logger.debug(f"{self.log_prefix} User {user.login} permissions: {permissions}")
if permissions.admin or permissions.maintain:
maintainers.append(user.login)
- self.logger.debug(f"Maintainers: {maintainers}")
+ self.logger.debug(f"{self.log_prefix} Maintainers: {maintainers}")
return maintainers
@functools.cached_property
diff --git a/webhook_server/libs/pull_request_handler.py b/webhook_server/libs/handlers/pull_request_handler.py
similarity index 61%
rename from webhook_server/libs/pull_request_handler.py
rename to webhook_server/libs/handlers/pull_request_handler.py
index 34151e28f..d0d73dd78 100644
--- a/webhook_server/libs/pull_request_handler.py
+++ b/webhook_server/libs/handlers/pull_request_handler.py
@@ -1,15 +1,17 @@
from __future__ import annotations
import asyncio
-from typing import TYPE_CHECKING, Any, Coroutine
+from collections.abc import Coroutine
+from typing import TYPE_CHECKING, Any
+from github import GithubException
from github.PullRequest import PullRequest
from github.Repository import Repository
-from webhook_server.libs.check_run_handler import CheckRunHandler
-from webhook_server.libs.labels_handler import LabelsHandler
-from webhook_server.libs.owners_files_handler import OwnersFileHandler
-from webhook_server.libs.runner_handler import RunnerHandler
+from webhook_server.libs.handlers.check_run_handler import CheckRunHandler
+from webhook_server.libs.handlers.labels_handler import LabelsHandler
+from webhook_server.libs.handlers.owners_files_handler import OwnersFileHandler
+from webhook_server.libs.handlers.runner_handler import RunnerHandler
from webhook_server.utils.constants import (
APPROVED_BY_LABEL_PREFIX,
AUTOMERGE_LABEL_STR,
@@ -34,13 +36,14 @@
VERIFIED_LABEL_STR,
WIP_STR,
)
+from webhook_server.utils.helpers import format_task_fields
if TYPE_CHECKING:
from webhook_server.libs.github_api import GithubWebhook
class PullRequestHandler:
- def __init__(self, github_webhook: "GithubWebhook", owners_file_handler: OwnersFileHandler):
+ def __init__(self, github_webhook: GithubWebhook, owners_file_handler: OwnersFileHandler):
self.github_webhook = github_webhook
self.owners_file_handler = owners_file_handler
@@ -60,7 +63,10 @@ def __init__(self, github_webhook: "GithubWebhook", owners_file_handler: OwnersF
async def process_pull_request_webhook_data(self, pull_request: PullRequest) -> None:
hook_action: str = self.hook_data["action"]
- self.logger.step(f"{self.log_prefix} Starting pull request processing: action={hook_action}") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('pr_handler', 'pr_management', 'started')} "
+ f"Starting pull request processing: action={hook_action}",
+ )
self.logger.info(f"{self.log_prefix} hook_action is: {hook_action}")
self.logger.debug(f"{self.log_prefix} pull_request: {pull_request.title} ({pull_request.number})")
@@ -71,9 +77,18 @@ async def process_pull_request_webhook_data(self, pull_request: PullRequest) ->
if self.github_webhook.conventional_title and self.hook_data["changes"].get("title"):
self.logger.info(f"{self.log_prefix} PR title changed, running conventional title check")
await self.runner_handler.run_conventional_title_check(pull_request=pull_request)
+ # Log completion - task_status reflects the result of our action
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('pr_handler', 'pr_management', 'completed')} "
+ f"Starting pull request processing: action={hook_action} (completed)",
+ )
+ return
if hook_action in ("opened", "reopened", "ready_for_review"):
- self.logger.step(f"{self.log_prefix} Processing PR {hook_action} event: initializing new pull request") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('pr_handler', 'pr_management', 'processing')} "
+ f"Processing PR {hook_action} event: initializing new pull request",
+ )
tasks: list[Coroutine[Any, Any, Any]] = []
if hook_action in ("opened", "ready_for_review"):
@@ -91,9 +106,18 @@ async def process_pull_request_webhook_data(self, pull_request: PullRequest) ->
# Set auto merge only after all initialization of a new PR is done.
await self.set_pull_request_automerge(pull_request=pull_request)
+ # Log completion - task_status reflects the result of our action
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('pr_handler', 'pr_management', 'completed')} "
+ f"Starting pull request processing: action={hook_action} (completed)",
+ )
+ return
if hook_action == "synchronize":
- self.logger.step(f"{self.log_prefix} Processing PR synchronize event: handling new commits") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('pr_handler', 'pr_management', 'processing')} "
+ f"Processing PR synchronize event: handling new commits",
+ )
sync_tasks: list[Coroutine[Any, Any, Any]] = []
sync_tasks.append(self.process_opened_or_synchronize_pull_request(pull_request=pull_request))
@@ -104,16 +128,29 @@ async def process_pull_request_webhook_data(self, pull_request: PullRequest) ->
for result in results:
if isinstance(result, Exception):
self.logger.error(f"{self.log_prefix} Async task failed: {result}")
+ # Log completion - task_status reflects the result of our action
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('pr_handler', 'pr_management', 'completed')} "
+ f"Starting pull request processing: action={hook_action} (completed)",
+ )
+ return
if hook_action == "closed":
- self.logger.step(f"{self.log_prefix} Processing PR closed event: cleaning up resources") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('pr_handler', 'pr_management', 'processing')} "
+ f"Processing PR closed event: cleaning up resources",
+ )
await self.close_issue_for_merged_or_closed_pr(pull_request=pull_request, hook_action=hook_action)
await self.delete_remote_tag_for_merged_or_closed_pr(pull_request=pull_request)
if is_merged := pull_request_data.get("merged", False):
- self.logger.step(f"{self.log_prefix} PR was merged: processing post-merge tasks") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('pr_handler', 'pr_management', 'processing')} "
+ f"PR was merged: processing post-merge tasks",
+ )
self.logger.info(f"{self.log_prefix} PR is merged")
- for _label in pull_request.labels:
+ labels = await asyncio.to_thread(lambda: list(pull_request.labels))
+ for _label in labels:
_label_name = _label.name
if _label_name.startswith(CHERRY_PICK_LABEL_PREFIX):
await self.runner_handler.cherry_pick(
@@ -128,6 +165,12 @@ async def process_pull_request_webhook_data(self, pull_request: PullRequest) ->
)
await self.label_all_opened_pull_requests_merge_state_after_merged()
+ # Log completion - task_status reflects the result of our action
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('pr_handler', 'pr_management', 'completed')} "
+ f"Starting pull request processing: action={hook_action} (completed)",
+ )
+ return
if hook_action in ("labeled", "unlabeled"):
_check_for_merge: bool = False
@@ -136,13 +179,22 @@ async def process_pull_request_webhook_data(self, pull_request: PullRequest) ->
labeled = self.hook_data["label"]["name"]
labeled_lower = labeled.lower()
- self.logger.step(f"{self.log_prefix} Processing label {hook_action} event: {labeled}") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('pr_handler', 'pr_management', 'processing')} "
+ f"Processing label {hook_action} event: {labeled}",
+ )
if labeled_lower == CAN_BE_MERGED_STR:
+ # Log completion - task_status reflects the result of our action (skipping is acceptable)
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('pr_handler', 'pr_management', 'completed')} "
+ f"Starting pull request processing: action={hook_action} (skipped - can-be-merged label)",
+ )
return
self.logger.info(f"{self.log_prefix} PR {pull_request.number} {hook_action} with {labeled}")
- self.logger.debug(f"PR labels are {pull_request.labels}")
+ labels = await asyncio.to_thread(lambda: list(pull_request.labels))
+ self.logger.debug(f"{self.log_prefix} PR labels are {labels}")
_split_label = labeled.split(LABELS_SEPARATOR, 1)
@@ -161,11 +213,15 @@ async def process_pull_request_webhook_data(self, pull_request: PullRequest) ->
+ self.owners_file_handler.root_approvers
):
_check_for_merge = True
- self.logger.debug(f"PR approved by label action, will check for merge. user: {_user}")
+ self.logger.debug(
+ f"{self.log_prefix} PR approved by label action, will check for merge. user: {_user}"
+ )
if self.github_webhook.verified_job and labeled_lower == VERIFIED_LABEL_STR:
_check_for_merge = True
- self.logger.debug(f"PR verified label action, will check for merge. label: {labeled_lower}")
+ self.logger.debug(
+ f"{self.log_prefix} PR verified label action, will check for merge. label: {labeled_lower}"
+ )
if action_labeled:
await self.check_run_handler.set_verify_check_success()
@@ -174,10 +230,22 @@ async def process_pull_request_webhook_data(self, pull_request: PullRequest) ->
if labeled_lower in (WIP_STR, HOLD_LABEL_STR, AUTOMERGE_LABEL_STR):
_check_for_merge = True
- self.logger.debug(f"PR has {labeled_lower} label, will check for merge.")
+ self.logger.debug(f"{self.log_prefix} PR has {labeled_lower} label, will check for merge.")
if _check_for_merge:
await self.check_if_can_be_merged(pull_request=pull_request)
+ # Log completion - task_status reflects the result of our action
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('pr_handler', 'pr_management', 'completed')} "
+ f"Starting pull request processing: action={hook_action} (completed)",
+ )
+ return
+
+ # Log completion for any unhandled actions - task_status reflects the result of our action
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('pr_handler', 'pr_management', 'completed')} "
+ f"Starting pull request processing: action={hook_action} (no action handler - completed)",
+ )
async def set_wip_label_based_on_title(self, pull_request: PullRequest) -> None:
if pull_request.title.lower().startswith(f"{WIP_STR}:"):
@@ -198,15 +266,19 @@ def _prepare_welcome_comment(self) -> str:
is_auto_verified = self.github_webhook.parent_committer in self.github_webhook.auto_verified_and_merged_users
auto_verified_note = ""
if is_auto_verified:
- auto_verified_note = """
-
-> **Note**: You are an auto-verified user. Your PRs will be automatically verified and may be auto-merged when all requirements are met.
-"""
+ auto_verified_note = (
+ "\n"
+ "> **Note**: You are an auto-verified user. Your PRs will be automatically verified "
+ "and may be auto-merged when all requirements are met.\n"
+ )
# Check if issue creation is enabled
issue_creation_note = ""
if self.github_webhook.create_issue_for_new_pr:
- issue_creation_note = "* **Issue Creation**: A tracking issue is created for this PR and will be closed when the PR is merged or closed\n"
+ issue_creation_note = (
+ "* **Issue Creation**: A tracking issue is created for this PR "
+ "and will be closed when the PR is merged or closed\n"
+ )
else:
issue_creation_note = "* **Issue Creation**: Disabled for this repository\n"
@@ -218,9 +290,13 @@ def _prepare_welcome_comment(self) -> str:
This pull request will be automatically processed with the following features:{auto_verified_note}
### 🔄 Automatic Actions
-* **Reviewer Assignment**: Reviewers are automatically assigned based on the OWNERS file in the repository root
-* **Size Labeling**: PR size labels (XS, S, M, L, XL, XXL) are automatically applied based on changes
-{issue_creation_note}* **Pre-commit Checks**: [pre-commit](https://pre-commit.ci/) runs automatically if `.pre-commit-config.yaml` exists
+* **Reviewer Assignment**: Reviewers are automatically assigned based on the "
+ "OWNERS file in the repository root\n"
+ "* **Size Labeling**: PR size labels (XS, S, M, L, XL, XXL) are "
+ "automatically applied based on changes\n"
+ f"{issue_creation_note}"
+ "* **Pre-commit Checks**: [pre-commit](https://pre-commit.ci/) runs "
+ "automatically if `.pre-commit-config.yaml` exists\n"
* **Branch Labeling**: Branch-specific labels are applied to track the target branch
* **Auto-verification**: Auto-verified users have their PRs automatically marked as verified
@@ -344,18 +420,33 @@ async def label_all_opened_pull_requests_merge_state_after_merged(self) -> None:
self.logger.info(f"{self.log_prefix} Sleep for {time_sleep} seconds before getting all opened PRs")
await asyncio.sleep(time_sleep)
- for pull_request in self.repository.get_pulls(state="open"):
+ pulls = await asyncio.to_thread(lambda: list(self.repository.get_pulls(state="open")))
+ for pull_request in pulls:
self.logger.info(f"{self.log_prefix} check label pull request after merge")
await self.label_pull_request_by_merge_state(pull_request=pull_request)
async def delete_remote_tag_for_merged_or_closed_pr(self, pull_request: PullRequest) -> None:
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('tag_deletion', 'pr_management', 'processing')} "
+ f"Deleting remote tag for PR #{pull_request.number}",
+ )
self.logger.debug(f"{self.log_prefix} Checking if need to delete remote tag for {pull_request.number}")
if not self.github_webhook.build_and_push_container:
self.logger.info(f"{self.log_prefix} repository do not have container configured")
+ # Log completion - task_status reflects the result of our action (skipping is acceptable)
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('tag_deletion', 'pr_management', 'completed')} "
+ f"Deleting remote tag for PR #{pull_request.number} (skipped - container not configured)",
+ )
return
repository_full_tag = self.github_webhook.container_repository_and_tag(pull_request=pull_request)
if not repository_full_tag:
+ # Log completion - task_status reflects the result of our action (no tag to delete)
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('tag_deletion', 'pr_management', 'completed')} "
+ f"Deleting remote tag for PR #{pull_request.number} (no tag configured)",
+ )
return
pr_tag = repository_full_tag.split(":")[-1]
@@ -367,9 +458,157 @@ async def delete_remote_tag_for_merged_or_closed_pr(self, pull_request: PullRequ
f"{self.log_prefix} No registry host found in "
f"{self.github_webhook.container_repository}; skipping tag deletion"
)
+ # Log completion - task_status reflects the result of our action (skipping is acceptable)
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('tag_deletion', 'pr_management', 'completed')} "
+ f"Deleting remote tag for PR #{pull_request.number} (skipped - no registry host)",
+ )
return
registry_url = registry_info[0]
+
+ # Check if this is GitHub Container Registry (GHCR)
+ if registry_url == "ghcr.io":
+ # Use GitHub Packages API for GHCR
+ await self._delete_ghcr_tag_via_github_api(
+ pull_request=pull_request, repository_full_tag=repository_full_tag, pr_tag=pr_tag
+ )
+ else:
+ # Use regctl for other registries (Quay, Docker Hub, etc.)
+ await self._delete_registry_tag_via_regctl(
+ pull_request=pull_request,
+ repository_full_tag=repository_full_tag,
+ pr_tag=pr_tag,
+ registry_url=registry_url,
+ )
+
+ async def _delete_ghcr_tag_via_github_api(
+ self, pull_request: PullRequest, repository_full_tag: str, pr_tag: str
+ ) -> None:
+ """Delete GHCR tag using GitHub Packages REST API."""
+ if not self.github_webhook.github_api or not self.github_webhook.token:
+ # Log failure - task_status reflects the result of our action
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('tag_deletion', 'pr_management', 'failed')} "
+ f"Failed to delete tag: {repository_full_tag} (GitHub API not available)",
+ )
+ self.logger.error(f"{self.log_prefix} GitHub API or token not available for tag deletion")
+ return
+
+ # Extract organization and package name from container repository
+ # Format: ghcr.io/org/package-name -> org, package-name
+ # Format: ghcr.io/org/services/api-server -> org, services/api-server
+ registry_info = self.github_webhook.container_repository.split("/")
+ if len(registry_info) < 3:
+ # Log failure - task_status reflects the result of our action
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('tag_deletion', 'pr_management', 'failed')} "
+ f"Failed to delete tag: {repository_full_tag} (invalid repository format)",
+ )
+ self.logger.error(
+ f"{self.log_prefix} Invalid container repository format: {self.github_webhook.container_repository}"
+ )
+ return
+
+ owner_name = registry_info[1]
+ # Join all segments after the owner to support nested paths
+ package_name = "/".join(registry_info[2:])
+
+ try:
+ package_api_base: str | None = None
+ versions: list[dict[str, Any]] | None = None
+
+ # GHCR packages can live under organisations *and* personal scopes - try both.
+ for scope in ("orgs", "users"):
+ candidate_base = f"/{scope}/{owner_name}/packages/container/{package_name}"
+ try:
+ _, versions = await asyncio.to_thread(
+ self.github_webhook.github_api.requester.requestJsonAndCheck,
+ "GET",
+ f"{candidate_base}/versions",
+ )
+ package_api_base = candidate_base
+ break
+ except GithubException as ex:
+ if ex.status == 404:
+ continue
+ raise
+
+ if not versions or not package_api_base:
+ # Log completion - task_status reflects the result of our action (package not found is acceptable)
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('tag_deletion', 'pr_management', 'completed')} "
+ f"Deleting remote tag for PR #{pull_request.number} (package not found)",
+ )
+ self.logger.warning(
+ f"{self.log_prefix} Package {package_name} not found for owner {owner_name} on GHCR"
+ )
+ return
+ # Find version with matching tag
+ version_to_delete_id: int | None = None
+ for version in versions:
+ # Check metadata.tags for the tag we're looking for
+ metadata = version.get("metadata", {})
+ container_metadata = metadata.get("container", {})
+ version_tags = container_metadata.get("tags", [])
+ if pr_tag in version_tags:
+ version_to_delete_id = version["id"]
+ break
+
+ if not version_to_delete_id:
+ # Log completion - task_status reflects the result of our action (tag not found is acceptable)
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('tag_deletion', 'pr_management', 'completed')} "
+ f"Deleting remote tag for PR #{pull_request.number} (tag not found in package)",
+ )
+ self.logger.warning(f"{self.log_prefix} Tag {pr_tag} not found in package {package_name} versions")
+ return
+
+ # Delete the package version
+ # DELETE /{scope}/{owner}/packages/{package_type}/{package_name}/versions/{package_version_id}
+ delete_url = f"{package_api_base}/versions/{version_to_delete_id}"
+ try:
+ await asyncio.to_thread(
+ self.github_webhook.github_api.requester.requestJsonAndCheck, "DELETE", delete_url
+ )
+ except GithubException as ex:
+ if ex.status == 404:
+ # Version already deleted or doesn't exist - treat as success
+ self.logger.warning(
+ f"{self.log_prefix} Package version {version_to_delete_id} not found "
+ "(may have been already deleted)"
+ )
+ else:
+ raise
+
+ await asyncio.to_thread(
+ pull_request.create_issue_comment, f"Successfully removed PR tag: {repository_full_tag}."
+ )
+ # Log completion - task_status reflects the result of our action
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('tag_deletion', 'pr_management', 'completed')} "
+ f"Deleted remote tag: {repository_full_tag}",
+ )
+
+ except GithubException:
+ # Log failure - task_status reflects the result of our action
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('tag_deletion', 'pr_management', 'failed')} "
+ f"Failed to delete tag: {repository_full_tag}",
+ )
+ self.logger.exception(f"{self.log_prefix} Failed to delete GHCR tag: {repository_full_tag}")
+ except Exception:
+ # Log failure - task_status reflects the result of our action
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('tag_deletion', 'pr_management', 'failed')} "
+ f"Failed to delete tag: {repository_full_tag}",
+ )
+ self.logger.exception(f"{self.log_prefix} Failed to delete GHCR tag: {repository_full_tag}")
+
+ async def _delete_registry_tag_via_regctl(
+ self, pull_request: PullRequest, repository_full_tag: str, pr_tag: str, registry_url: str
+ ) -> None:
+ """Delete registry tag using regctl (for non-GHCR registries like Quay, Docker Hub)."""
reg_login_cmd = (
f"regctl registry login {registry_url} "
f"-u {self.github_webhook.container_repository_username} "
@@ -386,24 +625,46 @@ async def delete_remote_tag_for_merged_or_closed_pr(self, pull_request: PullRequ
if rc and out:
tag_del_cmd = f"regctl tag delete {repository_full_tag}"
- rc, _, _ = await self.runner_handler.run_podman_command(command=tag_del_cmd)
+ rc, del_out, del_err = await self.runner_handler.run_podman_command(command=tag_del_cmd)
if rc:
await asyncio.to_thread(
pull_request.create_issue_comment, f"Successfully removed PR tag: {repository_full_tag}."
)
+ # Log completion - task_status reflects the result of our action
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('tag_deletion', 'pr_management', 'completed')} "
+ f"Deleted remote tag: {repository_full_tag}",
+ )
else:
+ # Log failure - task_status reflects the result of our action
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('tag_deletion', 'pr_management', 'failed')} "
+ f"Failed to delete tag: {repository_full_tag}",
+ )
self.logger.error(
- f"{self.log_prefix} Failed to delete tag: {repository_full_tag}. OUT:{out}. ERR:{err}"
+ f"{self.log_prefix} Failed to delete tag: {repository_full_tag}. "
+ f"OUT:{del_out}. ERR:{del_err}"
)
else:
+ # Log completion - task_status reflects the result of our action (tag not found is acceptable)
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('tag_deletion', 'pr_management', 'completed')} "
+ f"Deleting remote tag for PR #{pull_request.number} (tag not found in registry)",
+ )
self.logger.warning(
- f"{self.log_prefix} {pr_tag} tag not found in registry {self.github_webhook.container_repository}. "
+ f"{self.log_prefix} {pr_tag} tag not found in registry "
+ f"{self.github_webhook.container_repository}. "
f"OUT:{out}. ERR:{err}"
)
finally:
await self.runner_handler.run_podman_command(command="regctl registry logout")
else:
+ # Log failure - task_status reflects the result of our action
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('tag_deletion', 'pr_management', 'failed')} "
+ f"Failed to delete tag: {repository_full_tag} (registry login failed)",
+ )
await asyncio.to_thread(
pull_request.create_issue_comment,
f"Failed to delete tag: {repository_full_tag}. Please delete it manually.",
@@ -423,10 +684,16 @@ async def close_issue_for_merged_or_closed_pr(self, pull_request: PullRequest, h
break
async def process_opened_or_synchronize_pull_request(self, pull_request: PullRequest) -> None:
- self.logger.step(f"{self.log_prefix} Starting PR processing workflow") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('pr_handler', 'pr_management', 'started')} "
+ f"Starting PR processing workflow",
+ )
# Stage 1: Initial setup and check queue tasks
- self.logger.step(f"{self.log_prefix} Stage: Initial setup and check queuing") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('pr_handler', 'pr_management', 'processing')} "
+ f"Stage: Initial setup and check queuing",
+ )
setup_tasks: list[Coroutine[Any, Any, Any]] = []
setup_tasks.append(self.owners_file_handler.assign_reviewers(pull_request=pull_request))
@@ -449,17 +716,24 @@ async def process_opened_or_synchronize_pull_request(self, pull_request: PullReq
if self.github_webhook.conventional_title:
setup_tasks.append(self.check_run_handler.set_conventional_title_queued())
- self.logger.step(f"{self.log_prefix} Executing setup tasks") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('pr_handler', 'pr_management', 'processing')} Executing setup tasks"
+ )
setup_results = await asyncio.gather(*setup_tasks, return_exceptions=True)
for result in setup_results:
if isinstance(result, Exception):
self.logger.error(f"{self.log_prefix} Setup task failed: {result}")
- self.logger.step(f"{self.log_prefix} Setup tasks completed") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('pr_handler', 'pr_management', 'completed')} Setup tasks completed"
+ )
# Stage 2: CI/CD execution tasks
- self.logger.step(f"{self.log_prefix} Stage: CI/CD execution") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('pr_handler', 'pr_management', 'processing')} "
+ f"Stage: CI/CD execution",
+ )
ci_tasks: list[Coroutine[Any, Any, Any]] = []
ci_tasks.append(self.runner_handler.run_tox(pull_request=pull_request))
@@ -470,14 +744,20 @@ async def process_opened_or_synchronize_pull_request(self, pull_request: PullReq
if self.github_webhook.conventional_title:
ci_tasks.append(self.runner_handler.run_conventional_title_check(pull_request=pull_request))
- self.logger.step(f"{self.log_prefix} Executing CI/CD tasks") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('pr_handler', 'pr_management', 'processing')} "
+ f"Executing CI/CD tasks",
+ )
ci_results = await asyncio.gather(*ci_tasks, return_exceptions=True)
for result in ci_results:
if isinstance(result, Exception):
self.logger.error(f"{self.log_prefix} CI/CD task failed: {result}")
- self.logger.step(f"{self.log_prefix} PR processing workflow completed") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('pr_handler', 'pr_management', 'completed')} "
+ f"PR processing workflow completed",
+ )
async def create_issue_for_new_pull_request(self, pull_request: PullRequest) -> None:
if not self.github_webhook.create_issue_for_new_pr:
@@ -524,7 +804,8 @@ async def set_pull_request_automerge(self, pull_request: PullRequest) -> None:
try:
if not pull_request.raw_data.get("auto_merge"):
self.logger.info(
- f"{self.log_prefix} will be merged automatically. owner: {self.github_webhook.parent_committer} "
+ f"{self.log_prefix} will be merged automatically. "
+ f"owner: {self.github_webhook.parent_committer} "
f"is part of auto merge enabled rules"
)
@@ -537,7 +818,8 @@ async def set_pull_request_automerge(self, pull_request: PullRequest) -> None:
async def remove_labels_when_pull_request_sync(self, pull_request: PullRequest) -> None:
tasks: list[Coroutine[Any, Any, Any]] = []
- for _label in pull_request.labels:
+ labels = await asyncio.to_thread(lambda: list(pull_request.labels))
+ for _label in labels:
_label_name = _label.name
if (
_label_name.startswith(APPROVED_BY_LABEL_PREFIX)
@@ -559,7 +841,7 @@ async def remove_labels_when_pull_request_sync(self, pull_request: PullRequest)
self.logger.error(f"{self.log_prefix} Async task failed: {result}")
async def label_pull_request_by_merge_state(self, pull_request: PullRequest) -> None:
- merge_state = pull_request.mergeable_state
+ merge_state = await asyncio.to_thread(lambda: pull_request.mergeable_state)
self.logger.debug(f"{self.log_prefix} Mergeable state is {merge_state}")
if merge_state == "unknown":
return
@@ -593,8 +875,9 @@ async def _process_verified_for_update_or_new_pull_request(self, pull_request: P
if self.github_webhook.parent_committer in self.github_webhook.auto_verified_and_merged_users:
self.logger.info(
- f"{self.log_prefix} Committer {self.github_webhook.parent_committer} is part of {self.github_webhook.auto_verified_and_merged_users}"
- ", Setting verified label"
+ f"{self.log_prefix} Committer {self.github_webhook.parent_committer} "
+ f"is part of {self.github_webhook.auto_verified_and_merged_users}, "
+ f"Setting verified label"
)
await self.labels_handler._add_label(pull_request=pull_request, label=VERIFIED_LABEL_STR)
await self.check_run_handler.set_verify_check_success()
@@ -607,13 +890,13 @@ async def _process_verified_for_update_or_new_pull_request(self, pull_request: P
async def add_pull_request_owner_as_assingee(self, pull_request: PullRequest) -> None:
try:
self.logger.info(f"{self.log_prefix} Adding PR owner as assignee")
- pull_request.add_to_assignees(pull_request.user.login)
+ await asyncio.to_thread(pull_request.add_to_assignees, pull_request.user.login)
except Exception as exp:
self.logger.debug(f"{self.log_prefix} Exception while adding PR owner as assignee: {exp}")
if self.owners_file_handler.root_approvers:
self.logger.debug(f"{self.log_prefix} Falling back to first approver as assignee")
- pull_request.add_to_assignees(self.owners_file_handler.root_approvers[0])
+ await asyncio.to_thread(pull_request.add_to_assignees, self.owners_file_handler.root_approvers[0])
async def check_if_can_be_merged(self, pull_request: PullRequest) -> None:
"""
@@ -627,8 +910,12 @@ async def check_if_can_be_merged(self, pull_request: PullRequest) -> None:
PR status is not 'dirty'.
PR has no changed requests from approvers.
"""
- self.logger.step(f"{self.log_prefix} Starting merge eligibility check") # type: ignore
- if self.skip_if_pull_request_already_merged(pull_request=pull_request):
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} "
+ f"{format_task_fields('pr_handler', 'pr_management', 'started')} "
+ f"Starting merge eligibility check"
+ )
+ if await self.skip_if_pull_request_already_merged(pull_request=pull_request):
self.logger.debug(f"{self.log_prefix} Pull request already merged")
return
@@ -647,7 +934,7 @@ async def check_if_can_be_merged(self, pull_request: PullRequest) -> None:
_labels = await self.labels_handler.pull_request_labels_names(pull_request=pull_request)
self.logger.debug(f"{self.log_prefix} check if can be merged. PR labels are: {_labels}")
- is_pr_mergable = pull_request.mergeable
+ is_pr_mergable = await asyncio.to_thread(lambda: pull_request.mergeable)
self.logger.debug(f"{self.log_prefix} PR mergeable is {is_pr_mergable}")
if not is_pr_mergable:
failure_output += f"PR is not mergeable: {is_pr_mergable}\n"
@@ -662,10 +949,10 @@ async def check_if_can_be_merged(self, pull_request: PullRequest) -> None:
failure_output += required_check_in_progress_failure_output
self.logger.debug(f"{self.log_prefix} required_check_in_progress_failure_output: {failure_output}")
- labels_failure_output = self.labels_handler.wip_or_hold_lables_exists(labels=_labels)
+ labels_failure_output = self.labels_handler.wip_or_hold_labels_exists(labels=_labels)
if labels_failure_output:
failure_output += labels_failure_output
- self.logger.debug(f"{self.log_prefix} wip_or_hold_lables_exists: {failure_output}")
+ self.logger.debug(f"{self.log_prefix} wip_or_hold_labels_exists: {failure_output}")
required_check_failed_failure_output = await self.check_run_handler.required_check_failed_or_no_status(
pull_request=pull_request,
@@ -689,16 +976,27 @@ async def check_if_can_be_merged(self, pull_request: PullRequest) -> None:
if not failure_output:
await self.labels_handler._add_label(pull_request=pull_request, label=CAN_BE_MERGED_STR)
await self.check_run_handler.set_merge_check_success()
-
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('pr_handler', 'pr_management', 'completed')} "
+ f"Merge eligibility check completed successfully",
+ )
self.logger.info(f"{self.log_prefix} Pull request can be merged")
return
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('pr_handler', 'pr_management', 'failed')} "
+ f"Merge eligibility check failed",
+ )
self.logger.debug(f"{self.log_prefix} cannot be merged: {failure_output}")
output["text"] = failure_output
await self.labels_handler._remove_label(pull_request=pull_request, label=CAN_BE_MERGED_STR)
await self.check_run_handler.set_merge_check_failure(output=output)
except Exception as ex:
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('pr_handler', 'pr_management', 'failed')} "
+ f"Merge eligibility check failed with exception",
+ )
self.logger.error(
f"{self.log_prefix} Failed to check if can be merged, set check run to {FAILURE_STR} {ex}"
)
@@ -709,7 +1007,7 @@ async def check_if_can_be_merged(self, pull_request: PullRequest) -> None:
async def _check_if_pr_approved(self, labels: list[str]) -> str:
self.logger.info(f"{self.log_prefix} Check if pull request is approved by pull request labels.")
- self.logger.debug(f"labels are {labels}")
+ self.logger.debug(f"{self.log_prefix} labels are {labels}")
error: str = ""
approved_by = []
@@ -720,11 +1018,11 @@ async def _check_if_pr_approved(self, labels: list[str]) -> str:
+ self.owners_file_handler.root_approvers.copy()
+ self.owners_file_handler.root_reviewers.copy()
)
- self.logger.debug(f"all_reviewers: {all_reviewers}")
+ self.logger.debug(f"{self.log_prefix} all_reviewers: {all_reviewers}")
all_reviewers_without_pr_owner = {
_reviewer for _reviewer in all_reviewers if _reviewer != self.github_webhook.parent_committer
}
- self.logger.debug(f"all_reviewers_without_pr_owner: {all_reviewers_without_pr_owner}")
+ self.logger.debug(f"{self.log_prefix} all_reviewers_without_pr_owner: {all_reviewers_without_pr_owner}")
all_reviewers_without_pr_owner_and_lgtmed = all_reviewers_without_pr_owner.copy()
@@ -734,17 +1032,17 @@ async def _check_if_pr_approved(self, labels: list[str]) -> str:
if LGTM_BY_LABEL_PREFIX.lower() in _label.lower() and reviewer in all_reviewers_without_pr_owner:
lgtm_count += 1
all_reviewers_without_pr_owner_and_lgtmed.remove(reviewer)
- self.logger.debug(f"lgtm_count: {lgtm_count}")
+ self.logger.debug(f"{self.log_prefix} lgtm_count: {lgtm_count}")
for _label in labels:
if APPROVED_BY_LABEL_PREFIX.lower() in _label.lower():
approved_by.append(_label.split(LABELS_SEPARATOR)[-1])
- self.logger.debug(f"approved_by: {approved_by}")
+ self.logger.debug(f"{self.log_prefix} approved_by: {approved_by}")
missing_approvers = list(set(self.owners_file_handler.all_pull_request_approvers.copy()))
- self.logger.debug(f"missing_approvers: {missing_approvers}")
- owners_data_changed_files = await self.owners_file_handler.owners_data_for_changed_files()
- self.logger.debug(f"owners_data_changed_files: {owners_data_changed_files}")
+ self.logger.debug(f"{self.log_prefix} missing_approvers: {missing_approvers}")
+ owners_data_changed_files = await self.owners_file_handler.owners_data_for_changed_files
+ self.logger.debug(f"{self.log_prefix} owners_data_changed_files: {owners_data_changed_files}")
# If any of root approvers is in approved_by list, the pull request is approved
for _approver in approved_by:
@@ -758,7 +1056,8 @@ async def _check_if_pr_approved(self, labels: list[str]) -> str:
for required_pr_approver in required_pr_approvers:
if required_pr_approver in approved_by:
- # Once we found approver in approved_by list, we remove all approvers from missing_approvers list for this owners file
+ # Once we found approver in approved_by list, we remove all approvers "
+ # from missing_approvers list for this owners file
for _approver in required_pr_approvers:
if _approver in missing_approvers:
missing_approvers.remove(_approver)
@@ -766,7 +1065,7 @@ async def _check_if_pr_approved(self, labels: list[str]) -> str:
break
missing_approvers = list(set(missing_approvers))
- self.logger.debug(f"missing_approvers after check: {missing_approvers}")
+ self.logger.debug(f"{self.log_prefix} missing_approvers after check: {missing_approvers}")
if missing_approvers:
error += f"Missing approved from approvers: {', '.join(missing_approvers)}\n"
@@ -774,12 +1073,16 @@ async def _check_if_pr_approved(self, labels: list[str]) -> str:
if lgtm_count < self.github_webhook.minimum_lgtm:
if lgtm_count == len(all_reviewers_without_pr_owner):
self.logger.debug(
- f"{self.log_prefix} minimum_lgtm is {self.github_webhook.minimum_lgtm}, but number of reviewers is {len(all_reviewers_without_pr_owner)}. PR approved."
+ f"{self.log_prefix} minimum_lgtm is {self.github_webhook.minimum_lgtm}, "
+ f"but number of reviewers is {len(all_reviewers_without_pr_owner)}. "
+ f"PR approved."
)
else:
+ reviewers_str = ", ".join(all_reviewers_without_pr_owner)
error += (
"Missing lgtm from reviewers. "
- f"Minimum {self.github_webhook.minimum_lgtm} required, ({lgtm_count} given). Reviewers: {', '.join(all_reviewers_without_pr_owner)}.\n"
+ f"Minimum {self.github_webhook.minimum_lgtm} required, "
+ f"({lgtm_count} given). Reviewers: {reviewers_str}.\n"
)
return error
@@ -793,21 +1096,21 @@ def _check_labels_for_can_be_merged(self, labels: list[str]) -> str:
change_request_user = _label.split(LABELS_SEPARATOR)[-1]
if change_request_user in self.owners_file_handler.all_pull_request_approvers:
failure_output += "PR has changed requests from approvers\n"
- self.logger.debug(f"Found changed request by {change_request_user}")
+ self.logger.debug(f"{self.log_prefix} Found changed request by {change_request_user}")
missing_required_labels = []
for _req_label in self.github_webhook.can_be_merged_required_labels:
if _req_label not in labels:
missing_required_labels.append(_req_label)
- self.logger.debug(f"Missing required label {_req_label}")
+ self.logger.debug(f"{self.log_prefix} Missing required label {_req_label}")
if missing_required_labels:
failure_output += f"Missing required labels: {', '.join(missing_required_labels)}\n"
return failure_output
- def skip_if_pull_request_already_merged(self, pull_request: PullRequest) -> bool:
- if pull_request and pull_request.is_merged():
+ async def skip_if_pull_request_already_merged(self, pull_request: PullRequest) -> bool:
+ if pull_request and await asyncio.to_thread(lambda: pull_request.is_merged()):
self.logger.info(f"{self.log_prefix}: PR is merged, not processing")
return True
diff --git a/webhook_server/libs/pull_request_review_handler.py b/webhook_server/libs/handlers/pull_request_review_handler.py
similarity index 93%
rename from webhook_server/libs/pull_request_review_handler.py
rename to webhook_server/libs/handlers/pull_request_review_handler.py
index e9617c395..619118665 100644
--- a/webhook_server/libs/pull_request_review_handler.py
+++ b/webhook_server/libs/handlers/pull_request_review_handler.py
@@ -2,8 +2,8 @@
from github.PullRequest import PullRequest
-from webhook_server.libs.labels_handler import LabelsHandler
-from webhook_server.libs.owners_files_handler import OwnersFileHandler
+from webhook_server.libs.handlers.labels_handler import LabelsHandler
+from webhook_server.libs.handlers.owners_files_handler import OwnersFileHandler
from webhook_server.utils.constants import ADD_STR, APPROVE_STR
if TYPE_CHECKING:
diff --git a/webhook_server/libs/handlers/push_handler.py b/webhook_server/libs/handlers/push_handler.py
new file mode 100644
index 000000000..ae3071935
--- /dev/null
+++ b/webhook_server/libs/handlers/push_handler.py
@@ -0,0 +1,174 @@
+import asyncio
+import re
+from typing import TYPE_CHECKING
+from uuid import uuid4
+
+from github.Repository import Repository
+
+from webhook_server.libs.handlers.check_run_handler import CheckRunHandler
+from webhook_server.libs.handlers.runner_handler import RunnerHandler
+from webhook_server.utils.helpers import format_task_fields, run_command
+from webhook_server.utils.notification_utils import send_slack_message
+
+if TYPE_CHECKING:
+ from webhook_server.libs.github_api import GithubWebhook
+
+
+class PushHandler:
+ def __init__(self, github_webhook: "GithubWebhook"):
+ self.github_webhook = github_webhook
+
+ self.hook_data = self.github_webhook.hook_data
+ self.logger = self.github_webhook.logger
+ self.log_prefix: str = self.github_webhook.log_prefix
+ self.repository: Repository = self.github_webhook.repository
+ self.check_run_handler = CheckRunHandler(github_webhook=self.github_webhook)
+ self.runner_handler = RunnerHandler(github_webhook=self.github_webhook)
+
+ async def process_push_webhook_data(self) -> None:
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('push_processing', 'webhook_event', 'started')} "
+ f"Starting push webhook processing", # pragma: allowlist secret
+ )
+ tag = re.search(r"^refs/tags/(.+)$", self.hook_data["ref"])
+ if tag:
+ tag_name = tag.group(1)
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('push_processing', 'webhook_event', 'processing')} "
+ f"Processing tag push: {tag_name}",
+ )
+ self.logger.info(f"{self.log_prefix} Processing push for tag: {tag.group(1)}")
+ self.logger.debug(f"{self.log_prefix} Tag: {tag_name}")
+ if self.github_webhook.pypi:
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('push_processing', 'webhook_event', 'started')} "
+ f"Starting PyPI upload for tag: {tag_name}",
+ )
+ self.logger.info(f"{self.log_prefix} Processing upload to pypi for tag: {tag_name}")
+ try:
+ await self.upload_to_pypi(tag_name=tag_name)
+ except Exception:
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('push_processing', 'webhook_event', 'failed')} "
+ f"PyPI upload failed with exception",
+ )
+ self.logger.exception(f"{self.log_prefix} PyPI upload failed")
+
+ if self.github_webhook.build_and_push_container and self.github_webhook.container_release:
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('push_processing', 'webhook_event', 'started')} "
+ f"Starting container build and push for tag: {tag_name}",
+ )
+ self.logger.info(f"{self.log_prefix} Processing build and push container for tag: {tag_name}")
+ try:
+ await self.runner_handler.run_build_container(push=True, set_check=False, tag=tag_name)
+ # Note: run_build_container logs completion/failure internally
+ except Exception as ex:
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('push_processing', 'webhook_event', 'failed')} "
+ f"Container build and push failed with exception",
+ )
+ self.logger.exception(f"{self.log_prefix} Container build and push failed: {ex}")
+ else:
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('push_processing', 'webhook_event', 'processing')} "
+ f"Non-tag push detected, skipping processing",
+ )
+
+ async def upload_to_pypi(self, tag_name: str) -> None:
+ async def _issue_on_error(_error: str) -> None:
+ # Sanitize title: replace newlines, remove backticks, strip whitespace, truncate
+ sanitized_title = _error.replace("\n", " ").replace("`", "").replace("\r", "").strip()
+ # Truncate to safe length (GitHub issue title limit is ~256 chars, use 250 for safety)
+ if len(sanitized_title) > 250:
+ sanitized_title = sanitized_title[:247] + "..."
+ await asyncio.to_thread(
+ self.repository.create_issue,
+ title=sanitized_title,
+ body=f"""
+Publish to PYPI failed: `{_error}`
+""",
+ )
+
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('push_processing', 'webhook_event', 'started')} "
+ f"Starting PyPI upload process for tag: {tag_name}",
+ )
+ clone_repo_dir = f"{self.github_webhook.clone_repo_dir}-{uuid4()}"
+ uv_cmd_dir = f"--directory {clone_repo_dir}"
+ self.logger.info(f"{self.log_prefix} Start uploading to pypi")
+ self.logger.debug(f"{self.log_prefix} Clone repo dir: {clone_repo_dir}")
+ _dist_dir: str = f"{clone_repo_dir}/pypi-dist"
+
+ async with self.runner_handler._prepare_cloned_repo_dir(
+ checkout=tag_name, clone_repo_dir=clone_repo_dir
+ ) as _res:
+ if not _res[0]:
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('push_processing', 'webhook_event', 'failed')} "
+ f"PyPI upload failed: repository preparation failed",
+ )
+ _error = self.check_run_handler.get_check_run_text(out=_res[1], err=_res[2])
+ await _issue_on_error(_error=_error)
+ return
+
+ rc, out, err = await run_command(
+ command=f"uv {uv_cmd_dir} build --sdist --out-dir {_dist_dir}", log_prefix=self.log_prefix
+ )
+ if not rc:
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('push_processing', 'webhook_event', 'failed')} "
+ f"PyPI upload failed: build command failed",
+ )
+ _error = self.check_run_handler.get_check_run_text(out=out, err=err)
+ await _issue_on_error(_error=_error)
+ return
+
+ rc, tar_gz_file, err = await run_command(command=f"ls {_dist_dir}", log_prefix=self.log_prefix)
+ if not rc:
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('push_processing', 'webhook_event', 'failed')} "
+ f"PyPI upload failed: listing dist directory failed",
+ )
+ _error = self.check_run_handler.get_check_run_text(out=tar_gz_file, err=err)
+ await _issue_on_error(_error=_error)
+ return
+
+ tar_gz_file = tar_gz_file.strip()
+
+ pypi_token = self.github_webhook.pypi["token"]
+ commands: list[str] = [
+ f"uvx {uv_cmd_dir} twine check {_dist_dir}/{tar_gz_file}",
+ f"uvx {uv_cmd_dir} twine upload --username __token__ "
+ f"--password {pypi_token} "
+ f"{_dist_dir}/{tar_gz_file} --skip-existing",
+ ]
+
+ for cmd in commands:
+ rc, out, err = await run_command(command=cmd, log_prefix=self.log_prefix, redact_secrets=[pypi_token])
+ if not rc:
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('push_processing', 'webhook_event', 'failed')} "
+ f"PyPI upload failed: command execution failed",
+ )
+ _error = self.check_run_handler.get_check_run_text(out=out, err=err)
+ await _issue_on_error(_error=_error)
+ return
+
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('push_processing', 'webhook_event', 'completed')} "
+ f"PyPI upload completed successfully for tag: {tag_name}",
+ )
+ self.logger.info(f"{self.log_prefix} Publish to pypi finished")
+ if self.github_webhook.slack_webhook_url:
+ message: str = f"""
+```
+{self.github_webhook.repository_name} Version {tag_name} published to PYPI.
+```
+"""
+ send_slack_message(
+ message=message,
+ webhook_url=self.github_webhook.slack_webhook_url,
+ logger=self.logger,
+ log_prefix=self.log_prefix,
+ )
diff --git a/webhook_server/libs/runner_handler.py b/webhook_server/libs/handlers/runner_handler.py
similarity index 56%
rename from webhook_server/libs/runner_handler.py
rename to webhook_server/libs/handlers/runner_handler.py
index 9b88d0c6e..95dd48aa4 100644
--- a/webhook_server/libs/runner_handler.py
+++ b/webhook_server/libs/handlers/runner_handler.py
@@ -2,7 +2,8 @@
import contextlib
import re
import shutil
-from typing import TYPE_CHECKING, Any, AsyncGenerator
+from collections.abc import AsyncGenerator
+from typing import TYPE_CHECKING, Any
from uuid import uuid4
import shortuuid
@@ -10,8 +11,8 @@
from github.PullRequest import PullRequest
from github.Repository import Repository
-from webhook_server.libs.check_run_handler import CheckRunHandler
-from webhook_server.libs.owners_files_handler import OwnersFileHandler
+from webhook_server.libs.handlers.check_run_handler import CheckRunHandler
+from webhook_server.libs.handlers.owners_files_handler import OwnersFileHandler
from webhook_server.utils.constants import (
BUILD_CONTAINER_STR,
CHERRY_PICKED_LABEL_PREFIX,
@@ -21,7 +22,8 @@
PYTHON_MODULE_INSTALL_STR,
TOX_STR,
)
-from webhook_server.utils.helpers import run_command
+from webhook_server.utils.helpers import _redact_secrets, format_task_fields, run_command
+from webhook_server.utils.notification_utils import send_slack_message
if TYPE_CHECKING:
from webhook_server.libs.github_api import GithubWebhook
@@ -40,6 +42,11 @@ def __init__(self, github_webhook: "GithubWebhook", owners_file_handler: OwnersF
github_webhook=self.github_webhook, owners_file_handler=self.owners_file_handler
)
+ @property
+ def mask_sensitive(self) -> bool:
+ """Get mask_sensitive configuration value."""
+ return self.github_webhook.config.get_value("mask-sensitive-data", return_on_none=True)
+
@contextlib.asynccontextmanager
async def _prepare_cloned_repo_dir(
self,
@@ -56,10 +63,13 @@ async def _prepare_cloned_repo_dir(
try:
# Clone the repository
+ github_token = self.github_webhook.token
+ clone_url_with_token = self.repository.clone_url.replace("https://", f"https://{github_token}@")
rc, out, err = await run_command(
- command=f"git clone {self.repository.clone_url.replace('https://', f'https://{self.github_webhook.token}@')} "
- f"{clone_repo_dir}",
+ command=(f"git clone {clone_url_with_token} {clone_repo_dir}"),
log_prefix=self.log_prefix,
+ redact_secrets=[github_token],
+ mask_sensitive=self.mask_sensitive,
)
if not rc:
result = (rc, out, err)
@@ -67,7 +77,9 @@ async def _prepare_cloned_repo_dir(
if success:
rc, out, err = await run_command(
- command=f"{git_cmd} config user.name '{self.repository.owner.login}'", log_prefix=self.log_prefix
+ command=f"{git_cmd} config user.name '{self.repository.owner.login}'",
+ log_prefix=self.log_prefix,
+ mask_sensitive=self.mask_sensitive,
)
if not rc:
result = (rc, out, err)
@@ -75,7 +87,9 @@ async def _prepare_cloned_repo_dir(
if success:
rc, out, err = await run_command(
- f"{git_cmd} config user.email '{self.repository.owner.email}'", log_prefix=self.log_prefix
+ command=f"{git_cmd} config user.email '{self.repository.owner.email}'",
+ log_prefix=self.log_prefix,
+ mask_sensitive=self.mask_sensitive,
)
if not rc:
result = (rc, out, err)
@@ -83,30 +97,42 @@ async def _prepare_cloned_repo_dir(
if success:
rc, out, err = await run_command(
- command=f"{git_cmd} config --local --add remote.origin.fetch +refs/pull/*/head:refs/remotes/origin/pr/*",
+ command=(
+ f"{git_cmd} config --local --add remote.origin.fetch +refs/pull/*/head:refs/remotes/origin/pr/*"
+ ),
log_prefix=self.log_prefix,
+ mask_sensitive=self.mask_sensitive,
)
if not rc:
result = (rc, out, err)
success = False
if success:
- rc, out, err = await run_command(command=f"{git_cmd} remote update", log_prefix=self.log_prefix)
+ rc, out, err = await run_command(
+ command=f"{git_cmd} remote update",
+ log_prefix=self.log_prefix,
+ mask_sensitive=self.mask_sensitive,
+ )
if not rc:
result = (rc, out, err)
success = False
# Checkout to requested branch/tag
if checkout and success:
- rc, out, err = await run_command(f"{git_cmd} checkout {checkout}", log_prefix=self.log_prefix)
+ rc, out, err = await run_command(
+ command=f"{git_cmd} checkout {checkout}",
+ log_prefix=self.log_prefix,
+ mask_sensitive=self.mask_sensitive,
+ )
if not rc:
result = (rc, out, err)
success = False
if success and pull_request:
rc, out, err = await run_command(
- f"{git_cmd} merge origin/{pull_request.base.ref} -m 'Merge {pull_request.base.ref}'",
+ command=f"{git_cmd} merge origin/{pull_request.base.ref} -m 'Merge {pull_request.base.ref}'",
log_prefix=self.log_prefix,
+ mask_sensitive=self.mask_sensitive,
)
if not rc:
result = (rc, out, err)
@@ -119,6 +145,7 @@ async def _prepare_cloned_repo_dir(
rc, out, err = await run_command(
command=f"{git_cmd} checkout {pull_request.base.ref}",
log_prefix=self.log_prefix,
+ mask_sensitive=self.mask_sensitive,
)
if not rc:
result = (rc, out, err)
@@ -126,7 +153,9 @@ async def _prepare_cloned_repo_dir(
elif tag_name:
rc, out, err = await run_command(
- command=f"{git_cmd} checkout {tag_name}", log_prefix=self.log_prefix
+ command=f"{git_cmd} checkout {tag_name}",
+ log_prefix=self.log_prefix,
+ mask_sensitive=self.mask_sensitive,
)
if not rc:
result = (rc, out, err)
@@ -138,6 +167,7 @@ async def _prepare_cloned_repo_dir(
rc, out, err = await run_command(
command=f"{git_cmd} checkout origin/pr/{_pull_request.number}",
log_prefix=self.log_prefix,
+ mask_sensitive=self.mask_sensitive,
)
if not rc:
result = (rc, out, err)
@@ -145,8 +175,12 @@ async def _prepare_cloned_repo_dir(
if pull_request and success:
rc, out, err = await run_command(
- f"{git_cmd} merge origin/{pull_request.base.ref} -m 'Merge {pull_request.base.ref}'",
+ command=(
+ f"{git_cmd} merge origin/{pull_request.base.ref} "
+ f"-m 'Merge {pull_request.base.ref}'"
+ ),
log_prefix=self.log_prefix,
+ mask_sensitive=self.mask_sensitive,
)
if not rc:
result = (rc, out, err)
@@ -154,7 +188,7 @@ async def _prepare_cloned_repo_dir(
finally:
yield result
self.logger.debug(f"{self.log_prefix} Deleting {clone_repo_dir}")
- shutil.rmtree(clone_repo_dir)
+ shutil.rmtree(clone_repo_dir, ignore_errors=True)
def is_podman_bug(self, err: str) -> bool:
_err = "Error: current system boot ID differs from cached boot ID; an unhandled reboot has occurred"
@@ -165,15 +199,24 @@ def fix_podman_bug(self) -> None:
shutil.rmtree("/tmp/storage-run-1000/containers", ignore_errors=True)
shutil.rmtree("/tmp/storage-run-1000/libpod/tmp", ignore_errors=True)
- async def run_podman_command(self, command: str) -> tuple[bool, str, str]:
- rc, out, err = await run_command(command=command, log_prefix=self.log_prefix)
+ async def run_podman_command(
+ self, command: str, redact_secrets: list[str] | None = None, mask_sensitive: bool = True
+ ) -> tuple[bool, str, str]:
+ rc, out, err = await run_command(
+ command=command, log_prefix=self.log_prefix, redact_secrets=redact_secrets, mask_sensitive=mask_sensitive
+ )
if rc:
return rc, out, err
if self.is_podman_bug(err=err):
self.fix_podman_bug()
- return await run_command(command=command, log_prefix=self.log_prefix)
+ return await run_command(
+ command=command,
+ log_prefix=self.log_prefix,
+ redact_secrets=redact_secrets,
+ mask_sensitive=mask_sensitive,
+ )
return rc, out, err
@@ -182,7 +225,9 @@ async def run_tox(self, pull_request: PullRequest) -> None:
self.logger.debug(f"{self.log_prefix} Tox not configured for this repository")
return
- self.logger.step(f"{self.log_prefix} Starting tox tests execution") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'started')} Starting tox tests execution"
+ )
if await self.check_run_handler.is_check_run_in_progress(check_run=TOX_STR):
self.logger.debug(f"{self.log_prefix} Check run is in progress, re-running {TOX_STR}.")
@@ -198,11 +243,17 @@ async def run_tox(self, pull_request: PullRequest) -> None:
tests = _tox_tests.replace(" ", "")
cmd += f" -e {tests}"
- self.logger.step(f"{self.log_prefix} Setting tox check status to in-progress") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} "
+ f"Setting tox check status to in-progress",
+ )
await self.check_run_handler.set_run_tox_check_in_progress()
self.logger.debug(f"{self.log_prefix} Tox command to run: {cmd}")
- self.logger.step(f"{self.log_prefix} Preparing repository clone for tox execution") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} "
+ f"Preparing repository clone for tox execution",
+ )
async with self._prepare_cloned_repo_dir(clone_repo_dir=clone_repo_dir, pull_request=pull_request) as _res:
output: dict[str, Any] = {
"title": "Tox",
@@ -210,20 +261,33 @@ async def run_tox(self, pull_request: PullRequest) -> None:
"text": None,
}
if not _res[0]:
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'failed')} "
+ f"Repository preparation failed for tox",
+ )
self.logger.error(f"{self.log_prefix} Repository preparation failed for tox")
output["text"] = self.check_run_handler.get_check_run_text(out=_res[1], err=_res[2])
return await self.check_run_handler.set_run_tox_check_failure(output=output)
- self.logger.step(f"{self.log_prefix} Executing tox command") # type: ignore
- rc, out, err = await run_command(command=cmd, log_prefix=self.log_prefix)
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} Executing tox command"
+ )
+ rc, out, err = await run_command(
+ command=cmd, log_prefix=self.log_prefix, mask_sensitive=self.mask_sensitive
+ )
output["text"] = self.check_run_handler.get_check_run_text(err=err, out=out)
if rc:
- self.logger.step(f"{self.log_prefix} Tox tests completed successfully") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'completed')} "
+ f"Tox tests completed successfully",
+ )
return await self.check_run_handler.set_run_tox_check_success(output=output)
else:
- self.logger.step(f"{self.log_prefix} Tox tests failed") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'failed')} Tox tests failed"
+ )
return await self.check_run_handler.set_run_tox_check_failure(output=output)
async def run_pre_commit(self, pull_request: PullRequest) -> None:
@@ -231,7 +295,10 @@ async def run_pre_commit(self, pull_request: PullRequest) -> None:
self.logger.debug(f"{self.log_prefix} Pre-commit not configured for this repository")
return
- self.logger.step(f"{self.log_prefix} Starting pre-commit checks execution") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'started')} "
+ f"Starting pre-commit checks execution",
+ )
if await self.check_run_handler.is_check_run_in_progress(check_run=PRE_COMMIT_STR):
self.logger.debug(f"{self.log_prefix} Check run is in progress, re-running {PRE_COMMIT_STR}.")
@@ -239,10 +306,16 @@ async def run_pre_commit(self, pull_request: PullRequest) -> None:
clone_repo_dir = f"{self.github_webhook.clone_repo_dir}-{uuid4()}"
cmd = f" uvx --directory {clone_repo_dir} {PREK_STR} run --all-files"
- self.logger.step(f"{self.log_prefix} Setting pre-commit check status to in-progress") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} "
+ f"Setting pre-commit check status to in-progress",
+ )
await self.check_run_handler.set_run_pre_commit_check_in_progress()
- self.logger.step(f"{self.log_prefix} Preparing repository clone for pre-commit execution") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} "
+ f"Preparing repository clone for pre-commit execution",
+ )
async with self._prepare_cloned_repo_dir(pull_request=pull_request, clone_repo_dir=clone_repo_dir) as _res:
output: dict[str, Any] = {
"title": "Pre-Commit",
@@ -250,20 +323,34 @@ async def run_pre_commit(self, pull_request: PullRequest) -> None:
"text": None,
}
if not _res[0]:
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'failed')} "
+ f"Repository preparation failed for pre-commit",
+ )
self.logger.error(f"{self.log_prefix} Repository preparation failed for pre-commit")
output["text"] = self.check_run_handler.get_check_run_text(out=_res[1], err=_res[2])
return await self.check_run_handler.set_run_pre_commit_check_failure(output=output)
- self.logger.step(f"{self.log_prefix} Executing pre-commit command") # type: ignore
- rc, out, err = await run_command(command=cmd, log_prefix=self.log_prefix)
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} "
+ f"Executing pre-commit command",
+ )
+ rc, out, err = await run_command(
+ command=cmd, log_prefix=self.log_prefix, mask_sensitive=self.mask_sensitive
+ )
output["text"] = self.check_run_handler.get_check_run_text(err=err, out=out)
if rc:
- self.logger.step(f"{self.log_prefix} Pre-commit checks completed successfully") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'completed')} "
+ f"Pre-commit checks completed successfully",
+ )
return await self.check_run_handler.set_run_pre_commit_check_success(output=output)
else:
- self.logger.step(f"{self.log_prefix} Pre-commit checks failed") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'failed')} Pre-commit checks failed"
+ )
return await self.check_run_handler.set_run_pre_commit_check_failure(output=output)
async def run_build_container(
@@ -279,7 +366,9 @@ async def run_build_container(
if not self.github_webhook.build_and_push_container:
return
- self.logger.step(f"{self.log_prefix} Starting container build process") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'started')} Starting container build process",
+ )
if (
self.owners_file_handler
@@ -297,14 +386,21 @@ async def run_build_container(
if await self.check_run_handler.is_check_run_in_progress(check_run=BUILD_CONTAINER_STR) and not is_merged:
self.logger.info(f"{self.log_prefix} Check run is in progress, re-running {BUILD_CONTAINER_STR}.")
- self.logger.step(f"{self.log_prefix} Setting container build check status to in-progress") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} "
+ f"Setting container build check status to in-progress",
+ )
await self.check_run_handler.set_container_build_in_progress()
_container_repository_and_tag = self.github_webhook.container_repository_and_tag(
pull_request=pull_request, is_merged=is_merged, tag=tag
)
no_cache: str = " --no-cache" if is_merged else ""
- build_cmd: str = f"--network=host {no_cache} -f {clone_repo_dir}/{self.github_webhook.dockerfile} {clone_repo_dir} -t {_container_repository_and_tag}"
+ build_cmd: str = (
+ f"--network=host {no_cache} -f "
+ f"{clone_repo_dir}/{self.github_webhook.dockerfile} "
+ f"{clone_repo_dir} -t {_container_repository_and_tag}"
+ )
if self.github_webhook.container_build_args:
build_args = " ".join(f"--build-arg {arg}" for arg in self.github_webhook.container_build_args)
@@ -318,7 +414,10 @@ async def run_build_container(
podman_build_cmd: str = f"podman build {build_cmd}"
self.logger.debug(f"{self.log_prefix} Podman build command to run: {podman_build_cmd}")
- self.logger.step(f"{self.log_prefix} Preparing repository clone for container build") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} "
+ f"Preparing repository clone for container build",
+ )
async with self._prepare_cloned_repo_dir(
pull_request=pull_request,
is_merged=is_merged,
@@ -331,31 +430,63 @@ async def run_build_container(
"text": None,
}
if not _res[0]:
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'failed')} "
+ f"Repository preparation failed for container build",
+ )
output["text"] = self.check_run_handler.get_check_run_text(out=_res[1], err=_res[2])
if pull_request and set_check:
return await self.check_run_handler.set_container_build_failure(output=output)
- self.logger.step(f"{self.log_prefix} Executing container build command") # type: ignore
- build_rc, build_out, build_err = await self.run_podman_command(command=podman_build_cmd)
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} "
+ f"Executing container build command",
+ )
+ build_rc, build_out, build_err = await self.run_podman_command(
+ command=podman_build_cmd, mask_sensitive=self.mask_sensitive
+ )
output["text"] = self.check_run_handler.get_check_run_text(err=build_err, out=build_out)
if build_rc:
- self.logger.step(f"{self.log_prefix} Container build completed successfully") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'completed')} "
+ f"Container build completed successfully",
+ )
self.logger.info(f"{self.log_prefix} Done building {_container_repository_and_tag}")
if pull_request and set_check:
return await self.check_run_handler.set_container_build_success(output=output)
else:
- self.logger.step(f"{self.log_prefix} Container build failed") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'failed')} Container build failed"
+ )
self.logger.error(f"{self.log_prefix} Failed to build {_container_repository_and_tag}")
if pull_request and set_check:
return await self.check_run_handler.set_container_build_failure(output=output)
if push and build_rc:
- self.logger.step(f"{self.log_prefix} Starting container push to registry") # type: ignore
- cmd = f"podman push --creds {self.github_webhook.container_repository_username}:{self.github_webhook.container_repository_password} {_container_repository_and_tag}"
- push_rc, _, _ = await self.run_podman_command(command=cmd)
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'started')} "
+ f"Starting container push to registry",
+ )
+ cmd = (
+ f"podman push --creds "
+ f"{self.github_webhook.container_repository_username}:"
+ f"{self.github_webhook.container_repository_password} "
+ f"{_container_repository_and_tag}"
+ )
+ push_rc, _, _ = await self.run_podman_command(
+ command=cmd,
+ redact_secrets=[
+ self.github_webhook.container_repository_username,
+ self.github_webhook.container_repository_password,
+ ],
+ mask_sensitive=self.mask_sensitive,
+ )
if push_rc:
- self.logger.step(f"{self.log_prefix} Container push completed successfully") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'completed')} "
+ f"Container push completed successfully",
+ )
push_msg: str = f"New container for {_container_repository_and_tag} published"
if pull_request:
await asyncio.to_thread(pull_request.create_issue_comment, push_msg)
@@ -366,13 +497,19 @@ async def run_build_container(
{self.github_webhook.repository_full_name} {push_msg}.
```
"""
- self.github_webhook.send_slack_message(
- message=message, webhook_url=self.github_webhook.slack_webhook_url
+ send_slack_message(
+ message=message,
+ webhook_url=self.github_webhook.slack_webhook_url,
+ logger=self.logger,
+ log_prefix=self.log_prefix,
)
self.logger.info(f"{self.log_prefix} Done push {_container_repository_and_tag}")
else:
err_msg: str = f"Failed to build and push {_container_repository_and_tag}"
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'failed')} Container push failed",
+ )
if pull_request:
await asyncio.to_thread(pull_request.create_issue_comment, err_msg)
@@ -382,24 +519,37 @@ async def run_build_container(
{self.github_webhook.repository_full_name} {err_msg}.
```
"""
- self.github_webhook.send_slack_message(
- message=message, webhook_url=self.github_webhook.slack_webhook_url
+ send_slack_message(
+ message=message,
+ webhook_url=self.github_webhook.slack_webhook_url,
+ logger=self.logger,
+ log_prefix=self.log_prefix,
)
async def run_install_python_module(self, pull_request: PullRequest) -> None:
if not self.github_webhook.pypi:
return
- self.logger.step(f"{self.log_prefix} Starting Python module installation") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} "
+ f"{format_task_fields('runner', 'ci_check', 'started')} "
+ f"Starting Python module installation"
+ )
if await self.check_run_handler.is_check_run_in_progress(check_run=PYTHON_MODULE_INSTALL_STR):
self.logger.info(f"{self.log_prefix} Check run is in progress, re-running {PYTHON_MODULE_INSTALL_STR}.")
clone_repo_dir = f"{self.github_webhook.clone_repo_dir}-{uuid4()}"
self.logger.info(f"{self.log_prefix} Installing python module")
- self.logger.step(f"{self.log_prefix} Setting Python module install check status to in-progress") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} "
+ f"Setting Python module install check status to in-progress",
+ )
await self.check_run_handler.set_python_module_install_in_progress()
- self.logger.step(f"{self.log_prefix} Preparing repository clone for Python module installation") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} "
+ f"Preparing repository clone for Python module installation",
+ )
async with self._prepare_cloned_repo_dir(
pull_request=pull_request,
clone_repo_dir=clone_repo_dir,
@@ -410,29 +560,46 @@ async def run_install_python_module(self, pull_request: PullRequest) -> None:
"text": None,
}
if not _res[0]:
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'failed')} "
+ f"Repository preparation failed for Python module installation",
+ )
output["text"] = self.check_run_handler.get_check_run_text(out=_res[1], err=_res[2])
return await self.check_run_handler.set_python_module_install_failure(output=output)
- self.logger.step(f"{self.log_prefix} Executing Python module installation command") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} "
+ f"Executing Python module installation command",
+ )
rc, out, err = await run_command(
command=f"uvx pip wheel --no-cache-dir -w {clone_repo_dir}/dist {clone_repo_dir}",
log_prefix=self.log_prefix,
+ mask_sensitive=self.mask_sensitive,
)
output["text"] = self.check_run_handler.get_check_run_text(err=err, out=out)
if rc:
- self.logger.step(f"{self.log_prefix} Python module installation completed successfully") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'completed')} "
+ f"Python module installation completed successfully",
+ )
return await self.check_run_handler.set_python_module_install_success(output=output)
- self.logger.step(f"{self.log_prefix} Python module installation failed") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} "
+ f"{format_task_fields('runner', 'ci_check', 'failed')} "
+ f"Python module installation failed"
+ )
return await self.check_run_handler.set_python_module_install_failure(output=output)
async def run_conventional_title_check(self, pull_request: PullRequest) -> None:
if not self.github_webhook.conventional_title:
return
- self.logger.step(f"{self.log_prefix} Starting conventional title check") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'started')} Starting conventional title check"
+ )
output: dict[str, str] = {
"title": "Conventional Title",
@@ -443,17 +610,27 @@ async def run_conventional_title_check(self, pull_request: PullRequest) -> None:
if await self.check_run_handler.is_check_run_in_progress(check_run=CONVENTIONAL_TITLE_STR):
self.logger.info(f"{self.log_prefix} Check run is in progress, re-running {CONVENTIONAL_TITLE_STR}.")
- self.logger.step(f"{self.log_prefix} Setting conventional title check status to in-progress") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} "
+ f"Setting conventional title check status to in-progress",
+ )
await self.check_run_handler.set_conventional_title_in_progress()
allowed_names = self.github_webhook.conventional_title.split(",")
title = pull_request.title
self.logger.debug(f"{self.log_prefix} Conventional title check for title: {title}, allowed: {allowed_names}")
if any([re.search(rf"{_name}(.*):", title) for _name in allowed_names]):
- self.logger.step(f"{self.log_prefix} Conventional title check completed successfully") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'completed')} "
+ f"Conventional title check completed successfully",
+ )
await self.check_run_handler.set_conventional_title_success(output=output)
else:
- self.logger.step(f"{self.log_prefix} Conventional title check failed") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} "
+ f"{format_task_fields('runner', 'ci_check', 'failed')} "
+ f"Conventional title check failed"
+ )
output["summary"] = "Failed"
output["text"] = f"Pull request title must starts with allowed title: {': ,'.join(allowed_names)}"
await self.check_run_handler.set_conventional_title_failure(output=output)
@@ -463,34 +640,50 @@ async def is_branch_exists(self, branch: str) -> Branch:
async def cherry_pick(self, pull_request: PullRequest, target_branch: str, reviewed_user: str = "") -> None:
requested_by = reviewed_user or "by target-branch label"
- self.logger.step(f"{self.log_prefix} Starting cherry-pick process to {target_branch}") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} "
+ f"{format_task_fields('runner', 'ci_check', 'started')} "
+ f"Starting cherry-pick process to {target_branch}"
+ )
self.logger.info(f"{self.log_prefix} Cherry-pick requested by user: {requested_by}")
new_branch_name = f"{CHERRY_PICKED_LABEL_PREFIX}-{pull_request.head.ref}-{shortuuid.uuid()[:5]}"
if not await self.is_branch_exists(branch=target_branch):
err_msg = f"cherry-pick failed: {target_branch} does not exists"
- self.logger.step(f"{self.log_prefix} Cherry-pick failed: target branch does not exist") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} "
+ f"{format_task_fields('runner', 'ci_check', 'failed')} "
+ f"Cherry-pick failed: target branch does not exist"
+ )
self.logger.error(err_msg)
await asyncio.to_thread(pull_request.create_issue_comment, err_msg)
else:
- self.logger.step(f"{self.log_prefix} Setting cherry-pick check status to in-progress") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} "
+ f"{format_task_fields('runner', 'ci_check', 'processing')} "
+ f"Setting cherry-pick check status to in-progress"
+ )
await self.check_run_handler.set_cherry_pick_in_progress()
commit_hash = pull_request.merge_commit_sha
commit_msg_striped = pull_request.title.replace("'", "")
pull_request_url = pull_request.html_url
clone_repo_dir = f"{self.github_webhook.clone_repo_dir}-{uuid4()}"
git_cmd = f"git --work-tree={clone_repo_dir} --git-dir={clone_repo_dir}/.git"
- hub_cmd = f"GITHUB_TOKEN={self.github_webhook.token} hub --work-tree={clone_repo_dir} --git-dir={clone_repo_dir}/.git"
+ github_token = self.github_webhook.token
+ hub_cmd = f"GITHUB_TOKEN={github_token} hub --work-tree={clone_repo_dir} --git-dir={clone_repo_dir}/.git"
commands: list[str] = [
f"{git_cmd} checkout {target_branch}",
f"{git_cmd} pull origin {target_branch}",
f"{git_cmd} checkout -b {new_branch_name} origin/{target_branch}",
f"{git_cmd} cherry-pick {commit_hash}",
f"{git_cmd} push origin {new_branch_name}",
- f"bash -c \"{hub_cmd} pull-request -b {target_branch} -h {new_branch_name} -l {CHERRY_PICKED_LABEL_PREFIX} -m '{CHERRY_PICKED_LABEL_PREFIX}: [{target_branch}] {commit_msg_striped}' -m 'cherry-pick {pull_request_url} into {target_branch}' -m 'requested-by {requested_by}'\"",
+ f'bash -c "{hub_cmd} pull-request -b {target_branch} '
+ f"-h {new_branch_name} -l {CHERRY_PICKED_LABEL_PREFIX} "
+ f"-m '{CHERRY_PICKED_LABEL_PREFIX}: [{target_branch}] "
+ f"{commit_msg_striped}' -m 'cherry-pick {pull_request_url} "
+ f"into {target_branch}' -m 'requested-by {requested_by}'\"",
]
- self.logger.debug(f"{self.log_prefix} Cherry pick commands to run: {commands}")
rc, out, err = None, "", ""
async with self._prepare_cloned_repo_dir(pull_request=pull_request, clone_repo_dir=clone_repo_dir) as _res:
@@ -500,17 +693,36 @@ async def cherry_pick(self, pull_request: PullRequest, target_branch: str, revie
"text": None,
}
if not _res[0]:
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'failed')} "
+ f"Repository preparation failed for cherry-pick",
+ )
output["text"] = self.check_run_handler.get_check_run_text(out=_res[1], err=_res[2])
await self.check_run_handler.set_cherry_pick_failure(output=output)
- self.logger.step(f"{self.log_prefix} Executing cherry-pick commands") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} "
+ f"{format_task_fields('runner', 'ci_check', 'processing')} "
+ f"Executing cherry-pick commands"
+ )
for cmd in commands:
- rc, out, err = await run_command(command=cmd, log_prefix=self.log_prefix)
+ rc, out, err = await run_command(
+ command=cmd,
+ log_prefix=self.log_prefix,
+ redact_secrets=[github_token],
+ mask_sensitive=self.mask_sensitive,
+ )
if not rc:
- self.logger.step(f"{self.log_prefix} Cherry-pick command failed") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} "
+ f"{format_task_fields('runner', 'ci_check', 'failed')} "
+ f"Cherry-pick command failed"
+ )
output["text"] = self.check_run_handler.get_check_run_text(err=err, out=out)
await self.check_run_handler.set_cherry_pick_failure(output=output)
- self.logger.error(f"{self.log_prefix} Cherry pick failed: {out} --- {err}")
+ redacted_out = _redact_secrets(out, [github_token], mask_sensitive=self.mask_sensitive)
+ redacted_err = _redact_secrets(err, [github_token], mask_sensitive=self.mask_sensitive)
+ self.logger.error(f"{self.log_prefix} Cherry pick failed: {redacted_out} --- {redacted_err}")
local_branch_name = f"{pull_request.head.ref}-{target_branch}"
await asyncio.to_thread(
pull_request.create_issue_comment,
@@ -530,7 +742,10 @@ async def cherry_pick(self, pull_request: PullRequest, target_branch: str, revie
output["text"] = self.check_run_handler.get_check_run_text(err=err, out=out)
- self.logger.step(f"{self.log_prefix} Cherry-pick completed successfully") # type: ignore
+ self.logger.step( # type: ignore[attr-defined]
+ f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'completed')} "
+ f"Cherry-pick completed successfully",
+ )
await self.check_run_handler.set_cherry_pick_success(output=output)
await asyncio.to_thread(
pull_request.create_issue_comment, f"Cherry-picked PR {pull_request.title} into {target_branch}"
diff --git a/webhook_server/libs/log_parser.py b/webhook_server/libs/log_parser.py
index 21c2b1f3f..b05610d24 100644
--- a/webhook_server/libs/log_parser.py
+++ b/webhook_server/libs/log_parser.py
@@ -3,9 +3,10 @@
import asyncio
import datetime
import re
+from collections.abc import AsyncGenerator
from dataclasses import dataclass
from pathlib import Path
-from typing import Any, AsyncGenerator
+from typing import Any
from simple_logger.logger import get_logger
@@ -23,6 +24,10 @@ class LogEntry:
repository: str | None = None
pr_number: int | None = None
github_user: str | None = None
+ task_id: str | None = None
+ task_type: str | None = None
+ task_status: str | None = None
+ token_spend: int | None = None
def to_dict(self) -> dict[str, Any]:
"""Convert LogEntry to dictionary for JSON serialization."""
@@ -36,6 +41,10 @@ def to_dict(self) -> dict[str, Any]:
"repository": self.repository,
"pr_number": self.pr_number,
"github_user": self.github_user,
+ "task_id": self.task_id,
+ "task_type": self.task_type,
+ "task_status": self.task_status,
+ "token_spend": self.token_spend,
}
@@ -57,30 +66,51 @@ def __init__(self) -> None:
# With PR: "{colored_repo} [{event}][{delivery_id}][{user}][PR {number}]: {message}"
# Without PR: "{colored_repo} [{event}][{delivery_id}][{user}]: {message}"
# Full log format: "timestamp logger level colored_repo [event][delivery_id][user][PR number]: message"
- # Example: "2025-07-31T10:30:00.123000 GithubWebhook INFO repo-name [pull_request][abc123][user][PR 123]: Processing webhook"
+ # Example: "2025-07-31T10:30:00.123000 GithubWebhook INFO repo-name
+ # [pull_request][abc123][user][PR 123]: Processing webhook"
+ # Supports:
+ # - Optional fractional seconds
+ # - Optional timezone (Z or ±HH:MM format, e.g., +00:00, -05:00)
+ # - Flexible whitespace between fields
+ # - Logger names with dots/hyphens
LOG_PATTERN = re.compile(
- r"^(\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d+) (\w+) (?:\x1b\[[\d;]*m)?(\w+)(?:\x1b\[[\d;]*m)? (.+)$"
+ r"^(\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(?:\.\d+)?(?:Z|[+-]\d{2}:\d{2})?)\s+"
+ r"([\w.-]+)\s+(?:\x1b\[[\d;]*m)?([\w.-]+)(?:\x1b\[[\d;]*m)?\s+(.+)$"
)
# Pattern to extract GitHub context from prepare_log_prefix format
# Matches: colored_repo [event][delivery_id][user][PR number]: message
GITHUB_CONTEXT_PATTERN = re.compile(
- r"(?:\x1b\[[0-9;]*m)?([^\x1b\[\s]+)(?:\x1b\[[0-9;]*m)? \[([^\]]+)\]\[([^\]]+)\]\[([^\]]+)\](?:\[PR (\d+)\])?: (.+)"
+ r"(?:\x1b\[[0-9;]*m)?([^\x1b\[\s]+)(?:\x1b\[[0-9;]*m)? "
+ r"\[([^\]]+)\]\[([^\]]+)\]\[([^\]]+)\](?:\[PR (\d+)\])?: (.+)"
)
ANSI_ESCAPE_PATTERN = re.compile(r"\x1b\[[0-9;]*m")
+ # Precompiled patterns for task field extraction (performance optimization)
+ # Handle escaped brackets: (?:\\.|[^\]])+ matches escaped chars or non-] chars
+ TASK_ID_PATTERN = re.compile(r"\[task_id=((?:\\.|[^\]])+)\]")
+ TASK_TYPE_PATTERN = re.compile(r"\[task_type=((?:\\.|[^\]])+)\]")
+ TASK_STATUS_PATTERN = re.compile(r"\[task_status=((?:\\.|[^\]])+)\]")
+ # Pattern for token spend: handles both original and masked formats
+ # Original: "Token spend: 35 API calls"
+ # Masked: "token ***** 35 API calls" (when "token" is redacted by secret masking)
+ TOKEN_SPEND_PATTERN = re.compile(r"(?:Token spend|token\s+\*+)\s*:?\s*(\d+)\s+API calls")
+
def is_workflow_step(self, entry: LogEntry) -> bool:
"""
- Check if a log entry is a workflow step (logger.step call).
+ Check if a log entry is a workflow milestone step.
+
+ Only entries with task_id AND task_status are considered workflow milestones.
+ This filters out internal/initialization steps and only shows meaningful business events.
Args:
entry: LogEntry to check
Returns:
- True if this is a workflow step entry
+ True if this is a workflow milestone entry (has task_id and task_status)
"""
- return entry.level.upper() == "STEP"
+ return bool(entry.task_id and entry.task_status)
def extract_workflow_steps(self, entries: list[LogEntry], hook_id: str) -> list[LogEntry]:
"""
@@ -116,24 +146,39 @@ def parse_log_entry(self, log_line: str) -> LogEntry | None:
timestamp_str, logger_name, level, message = match.groups()
# Parse ISO timestamp format: "2025-07-31T10:30:00.123000"
+ # Handle 'Z' timezone suffix which fromisoformat doesn't accept
try:
- timestamp = datetime.datetime.fromisoformat(timestamp_str)
+ normalized_timestamp = timestamp_str.replace("Z", "+00:00")
+ timestamp = datetime.datetime.fromisoformat(normalized_timestamp)
+ # Ensure timestamp is timezone-aware (assume UTC if naive)
+ if timestamp.tzinfo is None:
+ timestamp = timestamp.replace(tzinfo=datetime.UTC)
except ValueError:
return None
# Extract GitHub webhook context from prepare_log_prefix format
repository, event_type, hook_id, github_user, pr_number, cleaned_message = self._extract_github_context(message)
+ # Extract task correlation fields from message and strip them from the message
+ task_id, task_type, task_status, final_message = self._extract_task_fields(cleaned_message)
+
+ # Extract token spend from message
+ token_spend = self.extract_token_spend(final_message)
+
return LogEntry(
timestamp=timestamp,
level=level,
logger_name=logger_name,
- message=cleaned_message,
+ message=final_message,
hook_id=hook_id,
event_type=event_type,
repository=repository,
pr_number=pr_number,
github_user=github_user,
+ task_id=task_id,
+ task_type=task_type,
+ task_status=task_status,
+ token_spend=token_spend,
)
def _extract_github_context(
@@ -170,6 +215,69 @@ def _extract_github_context(
cleaned_message = self.ANSI_ESCAPE_PATTERN.sub("", message)
return None, None, None, None, None, cleaned_message
+ def _extract_task_fields(self, message: str) -> tuple[str | None, str | None, str | None, str]:
+ """Extract task correlation fields from log message.
+
+ Extracts task_id, task_type, and task_status from patterns like:
+ [task_id=check_tox] [task_type=ci_check] [task_status=started]
+
+ The task tokens are removed from the returned message to avoid duplication
+ and improve free-text search, as these values are stored in dedicated fields.
+
+ Args:
+ message: Log message to extract from
+
+ Returns:
+ Tuple of (task_id, task_type, task_status, cleaned_message)
+ """
+ task_id = None
+ task_type = None
+ task_status = None
+ cleaned_message = message
+
+ # Extract task_id using precompiled pattern
+ if task_id_match := self.TASK_ID_PATTERN.search(cleaned_message):
+ task_id = self._unescape_task_value(task_id_match.group(1))
+ cleaned_message = self.TASK_ID_PATTERN.sub("", cleaned_message, count=1).strip()
+
+ # Extract task_type using precompiled pattern
+ if task_type_match := self.TASK_TYPE_PATTERN.search(cleaned_message):
+ task_type = self._unescape_task_value(task_type_match.group(1))
+ cleaned_message = self.TASK_TYPE_PATTERN.sub("", cleaned_message, count=1).strip()
+
+ # Extract task_status using precompiled pattern
+ if task_status_match := self.TASK_STATUS_PATTERN.search(cleaned_message):
+ task_status = self._unescape_task_value(task_status_match.group(1))
+ cleaned_message = self.TASK_STATUS_PATTERN.sub("", cleaned_message, count=1).strip()
+
+ return task_id, task_type, task_status, cleaned_message
+
+ @staticmethod
+ def _unescape_task_value(value: str) -> str:
+ """Unescape brackets in task field values."""
+ return value.replace("\\]", "]").replace("\\[", "[")
+
+ def extract_token_spend(self, message: str) -> int | None:
+ """Extract token spend from log message.
+
+ Parses messages like:
+ - "Token spend: 35 API calls (initial: 2831, final: 2796, remaining: 2796)"
+ - "token ***** 35 API calls (initial: 2831, final: 2796, remaining: 2796)" (when masked)
+
+ Args:
+ message: Log message to extract from
+
+ Returns:
+ Token spend as integer, or None if not found
+ """
+ match = self.TOKEN_SPEND_PATTERN.search(message)
+ if match:
+ try:
+ return int(match.group(1))
+ except ValueError:
+ return None
+ return None
+
def parse_log_file(self, file_path: Path) -> list[LogEntry]:
"""
Parse an entire log file and return list of LogEntry objects.
@@ -185,8 +293,8 @@ def parse_log_file(self, file_path: Path) -> list[LogEntry]:
failed_lines = 0
try:
- with open(file_path, "r", encoding="utf-8") as f:
- for line_num, line in enumerate(f, 1):
+ with open(file_path, encoding="utf-8") as f:
+ for _line_num, line in enumerate(f, 1):
total_lines += 1
entry = self.parse_log_entry(line)
if entry:
@@ -216,7 +324,7 @@ async def tail_log_file(self, file_path: Path, follow: bool = True) -> AsyncGene
if not file_path.exists():
return
- with open(file_path, "r", encoding="utf-8") as f:
+ with open(file_path, encoding="utf-8") as f:
# Move to end of file
f.seek(0, 2)
@@ -249,7 +357,7 @@ async def monitor_log_directory(self, log_dir: Path, pattern: str = "*.log") ->
# Find all existing log files including rotated ones
log_files: list[Path] = []
- log_files.extend(log_dir.glob("*.log"))
+ log_files.extend(log_dir.glob(pattern))
# Only monitor current log file, not rotated ones for real-time
current_log_files = [
f for f in log_files if not any(f.name.endswith(ext) for ext in [".1", ".2", ".3", ".4", ".5"])
diff --git a/webhook_server/libs/push_handler.py b/webhook_server/libs/push_handler.py
deleted file mode 100644
index 24f544905..000000000
--- a/webhook_server/libs/push_handler.py
+++ /dev/null
@@ -1,105 +0,0 @@
-import re
-from typing import TYPE_CHECKING
-from uuid import uuid4
-
-from github.Repository import Repository
-
-from webhook_server.libs.check_run_handler import CheckRunHandler
-from webhook_server.libs.runner_handler import RunnerHandler
-from webhook_server.utils.helpers import run_command
-
-if TYPE_CHECKING:
- from webhook_server.libs.github_api import GithubWebhook
-
-
-class PushHandler:
- def __init__(self, github_webhook: "GithubWebhook"):
- self.github_webhook = github_webhook
-
- self.hook_data = self.github_webhook.hook_data
- self.logger = self.github_webhook.logger
- self.log_prefix: str = self.github_webhook.log_prefix
- self.repository: Repository = self.github_webhook.repository
- self.check_run_handler = CheckRunHandler(github_webhook=self.github_webhook)
- self.runner_handler = RunnerHandler(github_webhook=self.github_webhook)
-
- async def process_push_webhook_data(self) -> None:
- self.logger.step(f"{self.log_prefix} Starting push webhook processing") # type: ignore
- tag = re.search(r"refs/tags/?(.*)", self.hook_data["ref"])
- if tag:
- tag_name = tag.group(1)
- self.logger.step(f"{self.log_prefix} Processing tag push: {tag_name}") # type: ignore
- self.logger.info(f"{self.log_prefix} Processing push for tag: {tag.group(1)}")
- self.logger.debug(f"{self.log_prefix} Tag: {tag_name}")
- if self.github_webhook.pypi:
- self.logger.step(f"{self.log_prefix} Starting PyPI upload for tag: {tag_name}") # type: ignore
- self.logger.info(f"{self.log_prefix} Processing upload to pypi for tag: {tag_name}")
- await self.upload_to_pypi(tag_name=tag_name)
-
- if self.github_webhook.build_and_push_container and self.github_webhook.container_release:
- self.logger.step(f"{self.log_prefix} Starting container build and push for tag: {tag_name}") # type: ignore
- self.logger.info(f"{self.log_prefix} Processing build and push container for tag: {tag_name}")
- await self.runner_handler.run_build_container(push=True, set_check=False, tag=tag_name)
- else:
- self.logger.step(f"{self.log_prefix} Non-tag push detected, skipping processing") # type: ignore
-
- async def upload_to_pypi(self, tag_name: str) -> None:
- def _issue_on_error(_error: str) -> None:
- self.repository.create_issue(
- title=_error,
- body=f"""
-Publish to PYPI failed: `{_error}`
-""",
- )
-
- self.logger.step(f"{self.log_prefix} Starting PyPI upload process for tag: {tag_name}") # type: ignore
- clone_repo_dir = f"{self.github_webhook.clone_repo_dir}-{uuid4()}"
- uv_cmd_dir = f"--directory {clone_repo_dir}"
- self.logger.info(f"{self.log_prefix} Start uploading to pypi")
- self.logger.debug(f"{self.log_prefix} Clone repo dir: {clone_repo_dir}")
- _dist_dir: str = f"{clone_repo_dir}/pypi-dist"
-
- async with self.runner_handler._prepare_cloned_repo_dir(
- checkout=tag_name, clone_repo_dir=clone_repo_dir
- ) as _res:
- if not _res[0]:
- _error = self.check_run_handler.get_check_run_text(out=_res[1], err=_res[2])
- return _issue_on_error(_error=_error)
-
- rc, out, err = await run_command(
- command=f"uv {uv_cmd_dir} build --sdist --out-dir {_dist_dir}", log_prefix=self.log_prefix
- )
- if not rc:
- _error = self.check_run_handler.get_check_run_text(out=out, err=err)
- return _issue_on_error(_error=_error)
-
- rc, tar_gz_file, err = await run_command(command=f"ls {_dist_dir}", log_prefix=self.log_prefix)
- if not rc:
- _error = self.check_run_handler.get_check_run_text(out=tar_gz_file, err=err)
- return _issue_on_error(_error=_error)
-
- tar_gz_file = tar_gz_file.strip()
-
- commands: list[str] = [
- f"uvx {uv_cmd_dir} twine check {_dist_dir}/{tar_gz_file}",
- f"uvx {uv_cmd_dir} twine upload --username __token__ --password {self.github_webhook.pypi['token']} {_dist_dir}/{tar_gz_file} --skip-existing",
- ]
- self.logger.debug(f"Commands to run: {commands}")
-
- for cmd in commands:
- rc, out, err = await run_command(command=cmd, log_prefix=self.log_prefix)
- if not rc:
- _error = self.check_run_handler.get_check_run_text(out=out, err=err)
- return _issue_on_error(_error=_error)
-
- self.logger.step(f"{self.log_prefix} PyPI upload completed successfully for tag: {tag_name}") # type: ignore
- self.logger.info(f"{self.log_prefix} Publish to pypi finished")
- if self.github_webhook.slack_webhook_url:
- message: str = f"""
-```
-{self.github_webhook.repository_name} Version {tag_name} published to PYPI.
-```
-"""
- self.github_webhook.send_slack_message(
- message=message, webhook_url=self.github_webhook.slack_webhook_url
- )
diff --git a/webhook_server/tests/conftest.py b/webhook_server/tests/conftest.py
index d81b3cea9..f2d30dc46 100644
--- a/webhook_server/tests/conftest.py
+++ b/webhook_server/tests/conftest.py
@@ -1,10 +1,14 @@
+import logging as python_logging
import os
+from dataclasses import dataclass
+from datetime import datetime, timedelta
import pytest
import yaml
from starlette.datastructures import Headers
-from webhook_server.libs.owners_files_handler import OwnersFileHandler
+from webhook_server.libs.handlers.owners_files_handler import OwnersFileHandler
+from webhook_server.libs.log_parser import LogEntry
os.environ["WEBHOOK_SERVER_DATA_DIR"] = "webhook_server/tests/manifests"
os.environ["ENABLE_LOG_SERVER"] = "true"
@@ -87,9 +91,9 @@ def get_contents(self, path: str, ref: str):
return ContentFile(folder5_owners_data)
+@dataclass
class Label:
- def __init__(self, name: str):
- self.name = name
+ name: str
class PullRequest:
@@ -123,8 +127,6 @@ def github_webhook(mocker, request):
mocker.patch(f"{base_import_path}.GithubWebhook.add_api_users_to_auto_verified_and_merged_users", return_value=None)
# Use standard Python logger for caplog compatibility
- import logging as python_logging
-
test_logger = python_logging.getLogger("GithubWebhook")
test_logger.setLevel(python_logging.DEBUG)
@@ -154,10 +156,6 @@ def owners_file_handler(github_webhook):
@pytest.fixture
def sample_log_entries():
"""Pre-generated sample log entries for performance tests."""
- from datetime import datetime, timedelta
-
- from webhook_server.libs.log_parser import LogEntry
-
entries = []
base_time = datetime(2025, 7, 31, 10, 0, 0)
@@ -182,7 +180,6 @@ def sample_log_entries():
@pytest.fixture(autouse=True)
def optimize_test_environment():
"""Auto-applied fixture to optimize test environment."""
- import logging as python_logging
# Disable unnecessary logging during tests
python_logging.getLogger("httpx").setLevel(python_logging.WARNING)
diff --git a/webhook_server/tests/test_add_reviewer_action.py b/webhook_server/tests/test_add_reviewer_action.py
index 6ee951203..7a0e033b5 100644
--- a/webhook_server/tests/test_add_reviewer_action.py
+++ b/webhook_server/tests/test_add_reviewer_action.py
@@ -2,7 +2,7 @@
import pytest
-from webhook_server.libs.issue_comment_handler import IssueCommentHandler
+from webhook_server.libs.handlers.issue_comment_handler import IssueCommentHandler
class User:
diff --git a/webhook_server/tests/test_app.py b/webhook_server/tests/test_app.py
index 0ca18d036..bb8f7f9f6 100644
--- a/webhook_server/tests/test_app.py
+++ b/webhook_server/tests/test_app.py
@@ -10,6 +10,7 @@
import pytest
from fastapi.testclient import TestClient
+from webhook_server import app as app_module
from webhook_server.app import FASTAPI_APP
from webhook_server.libs.exceptions import RepositoryNotFoundInConfigError
from webhook_server.utils.app_utils import (
@@ -120,14 +121,19 @@ def test_process_webhook_missing_repository(self, client: TestClient, webhook_se
response = client.post("/webhook_server", content=payload_json, headers=headers)
assert response.status_code == 400
- assert "Missing repository information" in response.json()["detail"]
+ assert "Missing repository in payload" in response.json()["detail"]
@patch.dict(os.environ, {"WEBHOOK_SERVER_DATA_DIR": "webhook_server/tests/manifests"})
@patch("webhook_server.app.GithubWebhook")
def test_process_webhook_repository_not_found(
self, mock_github_webhook: Mock, client: TestClient, valid_webhook_payload: dict[str, Any], webhook_secret: str
) -> None:
- """Test webhook processing when repository is not found in config."""
+ """Test webhook processing when repository is not found in config.
+
+ Note: RepositoryNotFoundInConfigError is now handled in background task,
+ so the HTTP response is 200 OK. The error is logged but doesn't affect
+ the webhook response to prevent GitHub webhook timeouts.
+ """
# Mock GithubWebhook to raise RepositoryNotFoundError
mock_github_webhook.side_effect = RepositoryNotFoundInConfigError("Repository not found in configuration")
@@ -143,8 +149,9 @@ def test_process_webhook_repository_not_found(
response = client.post("/webhook_server", content=payload_json, headers=headers)
- assert response.status_code == 404
- assert "Repository not found in configuration" in response.json()["detail"]
+ # Returns 200 OK immediately - error is handled in background
+ assert response.status_code == 200
+ assert response.json()["message"] == "Webhook queued for processing"
@patch.dict(os.environ, {"WEBHOOK_SERVER_DATA_DIR": "webhook_server/tests/manifests"})
def test_process_webhook_signature_verification_failure(
@@ -171,7 +178,12 @@ def test_process_webhook_signature_verification_failure(
def test_process_webhook_connection_error(
self, mock_github_webhook: Mock, client: TestClient, valid_webhook_payload: dict[str, Any], webhook_secret: str
) -> None:
- """Test webhook processing when connection error occurs."""
+ """Test webhook processing when connection error occurs.
+
+ Note: Connection errors are now handled in background task,
+ so the HTTP response is 200 OK. The error is logged but doesn't affect
+ the webhook response to prevent GitHub webhook timeouts.
+ """
mock_github_webhook.side_effect = ConnectionError("API connection failed")
payload_json = json.dumps(valid_webhook_payload)
@@ -186,15 +198,21 @@ def test_process_webhook_connection_error(
response = client.post("/webhook_server", content=payload_json, headers=headers)
- assert response.status_code == 503
- assert "API Connection Error" in response.json()["detail"]
+ # Returns 200 OK immediately - error is handled in background
+ assert response.status_code == 200
+ assert response.json()["message"] == "Webhook queued for processing"
@patch.dict(os.environ, {"WEBHOOK_SERVER_DATA_DIR": "webhook_server/tests/manifests"})
@patch("webhook_server.app.GithubWebhook")
def test_process_webhook_unexpected_error(
self, mock_github_webhook: Mock, client: TestClient, valid_webhook_payload: dict[str, Any], webhook_secret: str
) -> None:
- """Test webhook processing when unexpected error occurs."""
+ """Test webhook processing when unexpected error occurs.
+
+ Note: Unexpected errors are now handled in background task,
+ so the HTTP response is 200 OK. The error is logged but doesn't affect
+ the webhook response to prevent GitHub webhook timeouts.
+ """
mock_github_webhook.side_effect = Exception("Unexpected error")
payload_json = json.dumps(valid_webhook_payload)
@@ -209,8 +227,9 @@ def test_process_webhook_unexpected_error(
response = client.post("/webhook_server", content=payload_json, headers=headers)
- assert response.status_code == 500
- assert "Internal Server Error" in response.json()["detail"]
+ # Returns 200 OK immediately - error is handled in background
+ assert response.status_code == 200
+ assert response.json()["message"] == "Webhook queued for processing"
@patch("webhook_server.app.get_github_allowlist")
@patch("webhook_server.app.get_cloudflare_allowlist")
@@ -236,8 +255,6 @@ async def test_get_github_allowlist_success(self, mock_get: Mock) -> None:
mock_response.json.return_value = {"hooks": ["192.30.252.0/22", "185.199.108.0/22"]}
mock_response.raise_for_status.return_value = None
# Use AsyncMock for the client
- from unittest.mock import AsyncMock
-
async_client = AsyncMock()
async_client.get.return_value = mock_response
@@ -248,8 +265,6 @@ async def test_get_github_allowlist_success(self, mock_get: Mock) -> None:
@patch("httpx.AsyncClient.get")
async def test_get_github_allowlist_error(self, mock_get: Mock) -> None:
"""Test GitHub allowlist fetching with error."""
- from unittest.mock import AsyncMock
-
async_client = AsyncMock()
async_client.get.side_effect = httpx.RequestError("Network error")
@@ -264,8 +279,6 @@ async def test_get_cloudflare_allowlist_success(self, mock_get: Mock) -> None:
"result": {"ipv4_cidrs": ["103.21.244.0/22"], "ipv6_cidrs": ["2400:cb00::/32"]}
}
mock_response.raise_for_status.return_value = None
- from unittest.mock import AsyncMock
-
async_client = AsyncMock()
async_client.get.return_value = mock_response
@@ -384,18 +397,14 @@ def test_process_webhook_no_webhook_secret(
@patch("httpx.AsyncClient.get")
async def test_get_github_allowlist_unexpected_error(self, mock_get: Mock) -> None:
"""Test GitHub allowlist fetching with unexpected error."""
- from unittest.mock import AsyncMock
-
async_client = AsyncMock()
async_client.get.side_effect = Exception("Unexpected error")
- with pytest.raises(Exception):
+ with pytest.raises(Exception, match="Unexpected error"):
await get_github_allowlist(async_client)
async def test_get_cloudflare_allowlist_request_error(self) -> None:
"""Test Cloudflare allowlist fetching with request error."""
- from unittest.mock import AsyncMock
-
async_client = AsyncMock()
async_client.get.side_effect = httpx.RequestError("Network error")
@@ -405,21 +414,15 @@ async def test_get_cloudflare_allowlist_request_error(self) -> None:
@patch("httpx.AsyncClient.get")
async def test_get_cloudflare_allowlist_unexpected_error(self, mock_get: Mock) -> None:
"""Test Cloudflare allowlist fetching with unexpected error."""
- from unittest.mock import AsyncMock
-
async_client = AsyncMock()
async_client.get.side_effect = Exception("Unexpected error")
- with pytest.raises(Exception):
+ with pytest.raises(Exception, match="Unexpected error"):
await get_cloudflare_allowlist(async_client)
@patch("httpx.AsyncClient.get")
async def test_get_cloudflare_allowlist_http_error(self, mock_get: Mock) -> None:
"""Test Cloudflare allowlist fetching with HTTP error."""
- from unittest.mock import AsyncMock
-
- import httpx
-
async_client = AsyncMock()
mock_response = Mock()
req = httpx.Request("GET", "https://api.cloudflare.com/client/v4/ips")
@@ -434,10 +437,6 @@ async def test_get_cloudflare_allowlist_http_error(self, mock_get: Mock) -> None
@patch("httpx.AsyncClient.get")
async def test_get_github_allowlist_http_error(self, mock_get: Mock) -> None:
"""Test GitHub allowlist fetching with HTTP error."""
- from unittest.mock import AsyncMock
-
- import httpx
-
async_client = AsyncMock()
mock_response = Mock()
req = httpx.Request("GET", "https://api.github.com/meta")
@@ -457,10 +456,6 @@ async def test_lifespan_success(
self, mock_urllib3: Mock, mock_config: Mock, mock_cf_allowlist: Mock, mock_gh_allowlist: Mock
) -> None:
"""Test successful lifespan function execution."""
- from unittest.mock import AsyncMock
- from unittest.mock import patch as patcher
-
- from webhook_server import app as app_module
# Mock config
mock_config_instance = Mock()
@@ -475,7 +470,7 @@ async def test_lifespan_success(
mock_cf_allowlist.return_value = ["103.21.244.0/22"]
# Mock HTTP client
mock_client = AsyncMock()
- with patcher("httpx.AsyncClient", return_value=mock_client):
+ with patch("httpx.AsyncClient", return_value=mock_client):
async with app_module.lifespan(FASTAPI_APP):
pass
mock_client.aclose.assert_called_once()
@@ -488,7 +483,6 @@ async def test_lifespan_with_ssl_warnings_disabled(
self, mock_urllib3: Mock, mock_config: Mock, mock_cf_allowlist: Mock, mock_gh_allowlist: Mock
) -> None:
"""Test lifespan function with SSL warnings disabled."""
- from webhook_server import app as app_module
# Mock config with SSL warnings disabled
mock_config_instance = Mock()
@@ -515,8 +509,12 @@ async def test_lifespan_with_ssl_warnings_disabled(
async def test_lifespan_with_invalid_cidr(
self, mock_config: Mock, mock_cf_allowlist: Mock, mock_gh_allowlist: Mock
) -> None:
- """Test lifespan function with invalid CIDR addresses."""
- from webhook_server import app as app_module
+ """Test lifespan function with invalid CIDR addresses.
+
+ Note: Invalid CIDR addresses are filtered out, so if IP verification
+ is enabled but no valid networks are loaded, the server will fail-close
+ with RuntimeError for security.
+ """
# Mock config
mock_config_instance = Mock()
@@ -527,7 +525,7 @@ async def test_lifespan_with_invalid_cidr(
}
mock_config.return_value = mock_config_instance
- # Mock allowlist responses with invalid CIDR
+ # Mock allowlist responses with invalid CIDR (will be filtered out)
mock_gh_allowlist.return_value = ["invalid-cidr"]
mock_cf_allowlist.return_value = ["also-invalid"]
@@ -535,10 +533,10 @@ async def test_lifespan_with_invalid_cidr(
mock_client = AsyncMock()
with patch.object(app_module, "_lifespan_http_client", mock_client):
- async with app_module.lifespan(FASTAPI_APP):
- pass
-
- # Should handle invalid CIDR gracefully
+ # Should raise RuntimeError because IP verification is enabled but no valid networks loaded
+ with pytest.raises(RuntimeError, match="IP verification enabled but no allowlist loaded"):
+ async with app_module.lifespan(FASTAPI_APP):
+ pass
@patch("webhook_server.app.get_github_allowlist")
@patch("webhook_server.app.get_cloudflare_allowlist")
@@ -546,8 +544,12 @@ async def test_lifespan_with_invalid_cidr(
async def test_lifespan_with_allowlist_errors(
self, mock_config: Mock, mock_cf_allowlist: Mock, mock_gh_allowlist: Mock
) -> None:
- """Test lifespan function when allowlist fetching fails."""
- from webhook_server import app as app_module
+ """Test lifespan function when allowlist fetching fails.
+
+ Note: If IP verification is enabled but allowlist fetching fails,
+ the server will fail-close with RuntimeError for security (fail-close
+ behavior prevents insecure state).
+ """
# Mock config
mock_config_instance = Mock()
@@ -563,15 +565,13 @@ async def test_lifespan_with_allowlist_errors(
# Mock HTTP client
mock_client = AsyncMock()
with patch.object(app_module, "_lifespan_http_client", mock_client):
- # Should not raise, just log warnings
- async with app_module.lifespan(FASTAPI_APP):
- pass
- # Should handle both allowlist failures gracefully
- # (You could add log assertion here if desired)
+ # Should raise RuntimeError because IP verification is enabled but no networks loaded
+ with pytest.raises(RuntimeError, match="IP verification enabled but no allowlist loaded"):
+ async with app_module.lifespan(FASTAPI_APP):
+ pass
def test_static_files_path_construction(self) -> None:
"""Test that the static files path is constructed correctly."""
- from webhook_server import app as app_module
# The static_files_path should point to webhook_server/web/static
expected_suffix = os.path.join("webhook_server", "web", "static")
@@ -593,7 +593,6 @@ def test_static_files_path_construction(self) -> None:
@patch("webhook_server.app.os.path.isdir")
def test_static_files_validation_logic(self, mock_isdir: Mock, mock_exists: Mock) -> None:
"""Test static files validation logic without lifespan."""
- from webhook_server import app as app_module
# Test case 1: Directory exists and is valid
mock_exists.return_value = True
diff --git a/webhook_server/tests/test_app_utils.py b/webhook_server/tests/test_app_utils.py
new file mode 100644
index 000000000..7a169d594
--- /dev/null
+++ b/webhook_server/tests/test_app_utils.py
@@ -0,0 +1,92 @@
+"""Tests for webhook_server.utils.app_utils module."""
+
+import datetime
+import hashlib
+import hmac
+
+import pytest
+from fastapi import HTTPException
+
+from webhook_server.utils.app_utils import parse_datetime_string, verify_signature
+
+
+class TestVerifySignature:
+ """Test suite for verify_signature function."""
+
+ def test_verify_signature_missing_header(self) -> None:
+ """Test verify_signature raises HTTPException when signature_header is None."""
+ payload_body = b"test payload"
+ secret_token = "test_secret" # pragma: allowlist secret
+
+ with pytest.raises(HTTPException) as exc_info:
+ verify_signature(payload_body, secret_token, signature_header=None)
+
+ assert exc_info.value.status_code == 403
+ assert "x-hub-signature-256 header is missing" in exc_info.value.detail
+
+ def test_verify_signature_valid(self) -> None:
+ """Test verify_signature with valid signature."""
+ payload_body = b"test payload"
+ secret_token = "test_secret" # pragma: allowlist secret
+
+ hash_object = hmac.new(secret_token.encode("utf-8"), msg=payload_body, digestmod=hashlib.sha256)
+ expected_signature = "sha256=" + hash_object.hexdigest()
+
+ # Should not raise exception
+ verify_signature(payload_body, secret_token, signature_header=expected_signature)
+
+ def test_verify_signature_invalid(self) -> None:
+ """Test verify_signature with invalid signature."""
+ payload_body = b"test payload"
+ secret_token = "test_secret" # pragma: allowlist secret
+ invalid_signature = "sha256=invalid_signature"
+
+ with pytest.raises(HTTPException) as exc_info:
+ verify_signature(payload_body, secret_token, signature_header=invalid_signature)
+
+ assert exc_info.value.status_code == 403
+ assert "Request signatures didn't match" in exc_info.value.detail
+
+
+class TestParseDatetimeString:
+ """Test suite for parse_datetime_string function."""
+
+ def test_parse_datetime_string_none(self) -> None:
+ """Test parse_datetime_string with None input."""
+ result = parse_datetime_string(None, "test_field")
+ assert result is None
+
+ def test_parse_datetime_string_valid_iso(self) -> None:
+ """Test parse_datetime_string with valid ISO format."""
+ datetime_str = "2024-01-01T12:00:00Z"
+ result = parse_datetime_string(datetime_str, "test_field")
+ assert isinstance(result, datetime.datetime)
+ assert result.year == 2024
+ assert result.month == 1
+ assert result.day == 1
+
+ def test_parse_datetime_string_valid_with_timezone(self) -> None:
+ """Test parse_datetime_string with valid ISO format with timezone."""
+ datetime_str = "2024-01-01T12:00:00+00:00"
+ result = parse_datetime_string(datetime_str, "test_field")
+ assert isinstance(result, datetime.datetime)
+
+ def test_parse_datetime_string_invalid_format(self) -> None:
+ """Test parse_datetime_string with invalid format raises HTTPException."""
+ datetime_str = "invalid-datetime-format"
+ field_name = "test_field"
+
+ with pytest.raises(HTTPException) as exc_info:
+ parse_datetime_string(datetime_str, field_name)
+
+ assert exc_info.value.status_code == 400
+ assert f"Invalid {field_name} format" in exc_info.value.detail
+ assert datetime_str in exc_info.value.detail
+ assert "Expected ISO 8601 format" in exc_info.value.detail
+
+ def test_parse_datetime_string_empty_string(self) -> None:
+ """Test parse_datetime_string with empty string returns None."""
+ datetime_str = ""
+ result = parse_datetime_string(datetime_str, "test_field")
+ # Empty string is falsy, so it returns None (same as None input)
+ assert result is None
diff --git a/webhook_server/tests/test_branch_protection.py b/webhook_server/tests/test_branch_protection.py
index f88de8b1f..a1ec77e06 100644
--- a/webhook_server/tests/test_branch_protection.py
+++ b/webhook_server/tests/test_branch_protection.py
@@ -2,6 +2,7 @@
from typing import Any
import pytest
+
from webhook_server.libs.config import Config
from webhook_server.utils.github_repository_settings import (
DEFAULT_BRANCH_PROTECTION,
diff --git a/webhook_server/tests/test_check_run_handler.py b/webhook_server/tests/test_check_run_handler.py
index aa485f697..2401f56ad 100644
--- a/webhook_server/tests/test_check_run_handler.py
+++ b/webhook_server/tests/test_check_run_handler.py
@@ -2,7 +2,7 @@
import pytest
-from webhook_server.libs.check_run_handler import CheckRunHandler
+from webhook_server.libs.handlers.check_run_handler import CheckRunHandler
from webhook_server.utils.constants import (
BUILD_CONTAINER_STR,
CAN_BE_MERGED_STR,
@@ -74,6 +74,8 @@ async def test_process_pull_request_check_run_webhook_data_not_completed(
result = await check_run_handler.process_pull_request_check_run_webhook_data()
assert result is False
+ # Verify completion log was called (skipping is acceptable)
+ assert check_run_handler.logger.step.called # type: ignore[attr-defined]
@pytest.mark.asyncio
async def test_process_pull_request_check_run_webhook_data_can_be_merged(
@@ -87,6 +89,23 @@ async def test_process_pull_request_check_run_webhook_data_can_be_merged(
result = await check_run_handler.process_pull_request_check_run_webhook_data()
assert result is False
+ # Verify completion log was called
+ assert check_run_handler.logger.step.called # type: ignore[attr-defined]
+
+ @pytest.mark.asyncio
+ async def test_process_pull_request_check_run_webhook_data_completed_normal(
+ self, check_run_handler: CheckRunHandler
+ ) -> None:
+ """Test processing check run webhook data when action is completed (normal check run)."""
+ check_run_handler.hook_data = {
+ "action": "completed",
+ "check_run": {"name": "test-check", "status": "completed", "conclusion": "success"},
+ }
+
+ result = await check_run_handler.process_pull_request_check_run_webhook_data()
+ assert result is True
+ # Verify completion log was called
+ assert check_run_handler.logger.step.called # type: ignore[attr-defined]
@pytest.mark.asyncio
async def test_set_verify_check_queued(self, check_run_handler: CheckRunHandler) -> None:
diff --git a/webhook_server/tests/test_comment_utils.py b/webhook_server/tests/test_comment_utils.py
new file mode 100644
index 000000000..780433fd5
--- /dev/null
+++ b/webhook_server/tests/test_comment_utils.py
@@ -0,0 +1,250 @@
+"""Tests for comment_utils module."""
+
+from webhook_server.utils.comment_utils import comment_with_details
+
+
+class TestCommentWithDetails:
+ """Test suite for comment_with_details function."""
+
+ def test_basic_comment_formatting(self) -> None:
+ """Test basic comment with simple title and body."""
+ result = comment_with_details("Summary", "Details go here")
+
+ assert "Summary
" in result
+ assert "Details go here" in result
+ assert "Nested
+Nested content
+
Real-time log monitoring and filtering for webhook events
-Real-time log monitoring and filtering for webhook events